diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 37336a56d76c2..55d104b182698 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -125,11 +125,6 @@ fn main() { cmd.arg(format!("-Clinker={}", target_linker)); } - // Pass down incremental directory, if any. - if let Ok(dir) = env::var("RUSTC_INCREMENTAL") { - cmd.arg(format!("-Zincremental={}", dir)); - } - let crate_name = args.windows(2) .find(|a| &*a[0] == "--crate-name") .unwrap(); diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 93c3694bc0cac..ecf9c0a75903e 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -602,6 +602,7 @@ def build_bootstrap(self): env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \ (os.pathsep + env["LIBRARY_PATH"]) \ if "LIBRARY_PATH" in env else "" + env["RUSTFLAGS"] = "-Cdebuginfo=2" env["PATH"] = os.path.join(self.bin_root(), "bin") + \ os.pathsep + env["PATH"] if not os.path.isfile(self.cargo()): diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 7655097681ba2..79058984b1352 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -26,6 +26,7 @@ use util::{exe, libdir, add_lib_path}; use {Build, Mode}; use cache::{INTERNER, Interned, Cache}; use check; +use test; use flags::Subcommand; use doc; use tool; @@ -230,6 +231,7 @@ impl<'a> ShouldRun<'a> { #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Kind { Build, + Check, Test, Bench, Dist, @@ -251,13 +253,13 @@ impl<'a> Builder<'a> { tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, native::Llvm, tool::Rustfmt, tool::Miri), - Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest, - check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc, - check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs, - check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri, check::Clippy, - check::RustdocJS), - - Kind::Bench => describe!(check::Crate, check::CrateLibrustc), + Kind::Check => describe!(check::Std, check::Test, check::Rustc), + Kind::Test => describe!(test::Tidy, test::Bootstrap, test::DefaultCompiletest, + test::HostCompiletest, test::Crate, test::CrateLibrustc, test::Rustdoc, + test::Linkcheck, test::Cargotest, test::Cargo, test::Rls, test::Docs, + test::ErrorIndex, test::Distcheck, test::Rustfmt, test::Miri, test::Clippy, + test::RustdocJS), + Kind::Bench => describe!(test::Crate, test::CrateLibrustc), Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon, doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook), @@ -304,6 +306,7 @@ impl<'a> Builder<'a> { pub fn run(build: &Build) { let (kind, paths) = match build.config.cmd { Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), + Subcommand::Check { ref paths } => (Kind::Check, &paths[..]), Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), @@ -493,13 +496,14 @@ impl<'a> Builder<'a> { cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); } + if let Some(host_linker) = self.build.linker(compiler.host) { cargo.env("RUSTC_HOST_LINKER", host_linker); } if let Some(target_linker) = self.build.linker(target) { cargo.env("RUSTC_TARGET_LINKER", target_linker); } - if cmd != "build" { + if cmd != "build" && cmd != "check" { cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build))); } @@ -566,8 +570,7 @@ impl<'a> Builder<'a> { // not guaranteeing correctness across builds if the compiler // is changing under your feet.` if self.config.incremental && compiler.stage == 0 { - let incr_dir = self.incremental_dir(compiler); - cargo.env("RUSTC_INCREMENTAL", incr_dir); + cargo.env("CARGO_INCREMENTAL", "1"); } if let Some(ref on_fail) = self.config.on_fail { diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 5faec27943847..0bc82c4f9f2c2 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -1,4 +1,4 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,1535 +8,156 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Implementation of the test-related targets of the build system. -//! -//! This file implements the various regression test suites that we execute on -//! our CI. +//! Implementation of compiling the compiler and standard library, in "check" mode. -use std::collections::HashSet; -use std::env; -use std::ffi::OsString; -use std::iter; -use std::fmt; -use std::fs::{self, File}; -use std::path::{PathBuf, Path}; -use std::process::Command; -use std::io::Read; - -use build_helper::{self, output}; - -use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step}; -use cache::{INTERNER, Interned}; -use compile; -use dist; -use native; -use tool::{self, Tool}; -use util::{self, dylib_path, dylib_path_var}; -use {Build, Mode}; -use toolstate::ToolState; - -const ADB_TEST_DIR: &str = "/data/tmp/work"; - -/// The two modes of the test runner; tests or benchmarks. -#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] -pub enum TestKind { - /// Run `cargo test` - Test, - /// Run `cargo bench` - Bench, -} - -impl TestKind { - // Return the cargo subcommand for this test kind - fn subcommand(self) -> &'static str { - match self { - TestKind::Test => "test", - TestKind::Bench => "bench", - } - } -} - -impl fmt::Display for TestKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match *self { - TestKind::Test => "Testing", - TestKind::Bench => "Benchmarking", - }) - } -} - -fn try_run(build: &Build, cmd: &mut Command) -> bool { - if !build.fail_fast { - if !build.try_run(cmd) { - let mut failures = build.delayed_failures.borrow_mut(); - failures.push(format!("{:?}", cmd)); - return false; - } - } else { - build.run(cmd); - } - true -} - -fn try_run_quiet(build: &Build, cmd: &mut Command) { - if !build.fail_fast { - if !build.try_run_quiet(cmd) { - let mut failures = build.delayed_failures.borrow_mut(); - failures.push(format!("{:?}", cmd)); - } - } else { - build.run_quiet(cmd); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Linkcheck { - host: Interned<String>, -} - -impl Step for Linkcheck { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. - /// - /// This tool in `src/tools` will verify the validity of all our links in the - /// documentation to ensure we don't have a bunch of dead ones. - fn run(self, builder: &Builder) { - let build = builder.build; - let host = self.host; - - println!("Linkcheck ({})", host); - - builder.default_doc(None); - - let _time = util::timeit(); - try_run(build, builder.tool_cmd(Tool::Linkchecker) - .arg(build.out.join(host).join("doc"))); - } - - fn should_run(run: ShouldRun) -> ShouldRun { - let builder = run.builder; - run.path("src/tools/linkchecker").default_condition(builder.build.config.docs) - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Linkcheck { host: run.target }); - } -} +use compile::{run_cargo, std_cargo, test_cargo, rustc_cargo, add_to_sysroot}; +use builder::{RunConfig, Builder, ShouldRun, Step}; +use {Build, Compiler, Mode}; +use cache::Interned; +use std::path::PathBuf; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Cargotest { - stage: u32, - host: Interned<String>, -} - -impl Step for Cargotest { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/cargotest") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Cargotest { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. - /// - /// This tool in `src/tools` will check out a few Rust projects and run `cargo - /// test` to ensure that we don't regress the test suites there. - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = builder.compiler(self.stage, self.host); - builder.ensure(compile::Rustc { compiler, target: compiler.host }); - - // Note that this is a short, cryptic, and not scoped directory name. This - // is currently to minimize the length of path on Windows where we otherwise - // quickly run into path name limit constraints. - let out_dir = build.out.join("ct"); - t!(fs::create_dir_all(&out_dir)); - - let _time = util::timeit(); - let mut cmd = builder.tool_cmd(Tool::CargoTest); - try_run(build, cmd.arg(&build.initial_cargo) - .arg(&out_dir) - .env("RUSTC", builder.rustc(compiler)) - .env("RUSTDOC", builder.rustdoc(compiler.host))); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Cargo { - stage: u32, - host: Interned<String>, -} - -impl Step for Cargo { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/cargo") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Cargo { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs `cargo test` for `cargo` packaged with Rust. - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = builder.compiler(self.stage, self.host); - - builder.ensure(tool::Cargo { compiler, target: self.host }); - let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml")); - if !build.fail_fast { - cargo.arg("--no-fail-fast"); - } - - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - // Don't run cross-compile tests, we may not have cross-compiled libstd libs - // available. - cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); - - try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler))); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rls { - stage: u32, - host: Interned<String>, -} - -impl Step for Rls { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/rls") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Rls { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs `cargo test` for the rls. - fn run(self, builder: &Builder) { - let build = builder.build; - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - builder.ensure(tool::Rls { compiler, target: self.host }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - host, - "test", - "src/tools/rls"); - - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - builder.add_rustc_lib_path(compiler, &mut cargo); - - if try_run(build, &mut cargo) { - build.save_toolstate("rls", ToolState::TestPass); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rustfmt { - stage: u32, - host: Interned<String>, -} - -impl Step for Rustfmt { - type Output = (); - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/rustfmt") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Rustfmt { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs `cargo test` for rustfmt. - fn run(self, builder: &Builder) { - let build = builder.build; - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - builder.ensure(tool::Rustfmt { compiler, target: self.host }); - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - host, - "test", - "src/tools/rustfmt"); - - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - builder.add_rustc_lib_path(compiler, &mut cargo); - - if try_run(build, &mut cargo) { - build.save_toolstate("rustfmt", ToolState::TestPass); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Miri { - stage: u32, - host: Interned<String>, -} - -impl Step for Miri { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - let test_miri = run.builder.build.config.test_miri; - run.path("src/tools/miri").default_condition(test_miri) - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Miri { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs `cargo test` for miri. - fn run(self, builder: &Builder) { - let build = builder.build; - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) { - let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml")); - - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - // miri tests need to know about the stage sysroot - cargo.env("MIRI_SYSROOT", builder.sysroot(compiler)); - cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); - cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); - cargo.env("MIRI_PATH", miri); - - builder.add_rustc_lib_path(compiler, &mut cargo); - - if try_run(build, &mut cargo) { - build.save_toolstate("miri", ToolState::TestPass); - } - } else { - eprintln!("failed to test miri: could not build"); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Clippy { - stage: u32, - host: Interned<String>, -} - -impl Step for Clippy { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = false; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/clippy") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Clippy { - stage: run.builder.top_stage, - host: run.target, - }); - } - - /// Runs `cargo test` for clippy. - fn run(self, builder: &Builder) { - let build = builder.build; - let stage = self.stage; - let host = self.host; - let compiler = builder.compiler(stage, host); - - if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) { - let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml")); - - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - // clippy tests need to know about the stage sysroot - cargo.env("SYSROOT", builder.sysroot(compiler)); - cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); - cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); - let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir()); - cargo.env("HOST_LIBS", host_libs); - // clippy tests need to find the driver - cargo.env("CLIPPY_DRIVER_PATH", clippy); - - builder.add_rustc_lib_path(compiler, &mut cargo); - - if try_run(build, &mut cargo) { - build.save_toolstate("clippy-driver", ToolState::TestPass); - } - } else { - eprintln!("failed to test clippy: could not build"); - } - } -} - -fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString { - // Configure PATH to find the right rustc. NB. we have to use PATH - // and not RUSTC because the Cargo test suite has tests that will - // fail if rustc is not spelled `rustc`. - let path = builder.sysroot(compiler).join("bin"); - let old_path = env::var_os("PATH").unwrap_or_default(); - env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") -} - -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -pub struct RustdocJS { - pub host: Interned<String>, +pub struct Std { pub target: Interned<String>, } -impl Step for RustdocJS { +impl Step for Std { type Output = (); const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/test/rustdoc-js") + run.path("src/libstd").krate("std") } fn make_run(run: RunConfig) { - run.builder.ensure(RustdocJS { - host: run.host, + run.builder.ensure(Std { target: run.target, }); } - fn run(self, builder: &Builder) { - if let Some(ref nodejs) = builder.config.nodejs { - let mut command = Command::new(nodejs); - command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]); - builder.ensure(::doc::Std { - target: self.target, - stage: builder.top_stage, - }); - builder.run(&mut command); - } else { - println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests"); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Tidy { - host: Interned<String>, -} - -impl Step for Tidy { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - const ONLY_BUILD: bool = true; - - /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. - /// - /// This tool in `src/tools` checks up on various bits and pieces of style and - /// otherwise just implements a few lint-like checks that are specific to the - /// compiler itself. - fn run(self, builder: &Builder) { - let build = builder.build; - let host = self.host; - - let _folder = build.fold_output(|| "tidy"); - println!("tidy check ({})", host); - let mut cmd = builder.tool_cmd(Tool::Tidy); - cmd.arg(build.src.join("src")); - if !build.config.vendor { - cmd.arg("--no-vendor"); - } - if build.config.quiet_tests { - cmd.arg("--quiet"); - } - try_run(build, &mut cmd); - } - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/tidy") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Tidy { - host: run.builder.build.build, - }); - } -} - -fn testdir(build: &Build, host: Interned<String>) -> PathBuf { - build.out.join(host).join("test") -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct Test { - path: &'static str, - mode: &'static str, - suite: &'static str, -} - -static DEFAULT_COMPILETESTS: &[Test] = &[ - Test { path: "src/test/ui", mode: "ui", suite: "ui" }, - Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" }, - Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" }, - Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" }, - Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" }, - Test { - path: "src/test/run-pass-valgrind", - mode: "run-pass-valgrind", - suite: "run-pass-valgrind" - }, - Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" }, - Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" }, - Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" }, - Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" }, - - // What this runs varies depending on the native platform being apple - Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" }, -]; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct DefaultCompiletest { - compiler: Compiler, - target: Interned<String>, - mode: &'static str, - suite: &'static str, -} - -impl Step for DefaultCompiletest { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(mut run: ShouldRun) -> ShouldRun { - for test in DEFAULT_COMPILETESTS { - run = run.path(test.path); - } - run - } - - fn make_run(run: RunConfig) { - let compiler = run.builder.compiler(run.builder.top_stage, run.host); - - let test = run.path.map(|path| { - DEFAULT_COMPILETESTS.iter().find(|&&test| { - path.ends_with(test.path) - }).unwrap_or_else(|| { - panic!("make_run in compile test to receive test path, received {:?}", path); - }) - }); - - if let Some(test) = test { - run.builder.ensure(DefaultCompiletest { - compiler, - target: run.target, - mode: test.mode, - suite: test.suite, - }); - } else { - for test in DEFAULT_COMPILETESTS { - run.builder.ensure(DefaultCompiletest { - compiler, - target: run.target, - mode: test.mode, - suite: test.suite - }); - } - } - } - - fn run(self, builder: &Builder) { - builder.ensure(Compiletest { - compiler: self.compiler, - target: self.target, - mode: self.mode, - suite: self.suite, - }) - } -} - -// Also default, but host-only. -static HOST_COMPILETESTS: &[Test] = &[ - Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }, - Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" }, - Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" }, - Test { - path: "src/test/compile-fail-fulldeps", - mode: "compile-fail", - suite: "compile-fail-fulldeps", - }, - Test { - path: "src/test/incremental-fulldeps", - mode: "incremental", - suite: "incremental-fulldeps", - }, - Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" }, - Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" }, - - Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" }, - Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" }, - Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" }, - Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" }, - Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" }, - Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" }, -]; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct HostCompiletest { - compiler: Compiler, - target: Interned<String>, - mode: &'static str, - suite: &'static str, -} - -impl Step for HostCompiletest { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(mut run: ShouldRun) -> ShouldRun { - for test in HOST_COMPILETESTS { - run = run.path(test.path); - } - run - } - - fn make_run(run: RunConfig) { - let compiler = run.builder.compiler(run.builder.top_stage, run.host); - - let test = run.path.map(|path| { - HOST_COMPILETESTS.iter().find(|&&test| { - path.ends_with(test.path) - }).unwrap_or_else(|| { - panic!("make_run in compile test to receive test path, received {:?}", path); - }) - }); - - if let Some(test) = test { - run.builder.ensure(HostCompiletest { - compiler, - target: run.target, - mode: test.mode, - suite: test.suite, - }); - } else { - for test in HOST_COMPILETESTS { - if test.mode == "pretty" { - continue; - } - run.builder.ensure(HostCompiletest { - compiler, - target: run.target, - mode: test.mode, - suite: test.suite - }); - } - } - } - - fn run(self, builder: &Builder) { - builder.ensure(Compiletest { - compiler: self.compiler, - target: self.target, - mode: self.mode, - suite: self.suite, - }) - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct Compiletest { - compiler: Compiler, - target: Interned<String>, - mode: &'static str, - suite: &'static str, -} - -impl Step for Compiletest { - type Output = (); - - fn should_run(run: ShouldRun) -> ShouldRun { - run.never() - } - - /// Executes the `compiletest` tool to run a suite of tests. - /// - /// Compiles all tests with `compiler` for `target` with the specified - /// compiletest `mode` and `suite` arguments. For example `mode` can be - /// "run-pass" or `suite` can be something like `debuginfo`. fn run(self, builder: &Builder) { let build = builder.build; - let compiler = self.compiler; let target = self.target; - let mode = self.mode; - let suite = self.suite; - - // Skip codegen tests if they aren't enabled in configuration. - if !build.config.codegen_tests && suite == "codegen" { - return; - } - - if suite == "debuginfo" { - // Skip debuginfo tests on MSVC - if build.build.contains("msvc") { - return; - } - - if mode == "debuginfo-XXX" { - return if build.build.contains("apple") { - builder.ensure(Compiletest { - mode: "debuginfo-lldb", - ..self - }); - } else { - builder.ensure(Compiletest { - mode: "debuginfo-gdb", - ..self - }); - }; - } - - builder.ensure(dist::DebuggerScripts { - sysroot: builder.sysroot(compiler), - host: target - }); - } - - if suite.ends_with("fulldeps") || - // FIXME: Does pretty need librustc compiled? Note that there are - // fulldeps test suites with mode = pretty as well. - mode == "pretty" || - mode == "rustdoc" || - mode == "run-make" { - builder.ensure(compile::Rustc { compiler, target }); - } - - builder.ensure(compile::Test { compiler, target }); - builder.ensure(native::TestHelpers { target }); - builder.ensure(RemoteCopyLibs { compiler, target }); - - let _folder = build.fold_output(|| format!("test_{}", suite)); - println!("Check compiletest suite={} mode={} ({} -> {})", - suite, mode, &compiler.host, target); - let mut cmd = builder.tool_cmd(Tool::Compiletest); - - // compiletest currently has... a lot of arguments, so let's just pass all - // of them! - - cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); - cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); - cmd.arg("--rustc-path").arg(builder.rustc(compiler)); - - // Avoid depending on rustdoc when we don't need it. - if mode == "rustdoc" || mode == "run-make" { - cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host)); - } - - cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); - cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); - cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); - cmd.arg("--mode").arg(mode); - cmd.arg("--target").arg(target); - cmd.arg("--host").arg(&*compiler.host); - cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build)); - - if let Some(ref nodejs) = build.config.nodejs { - cmd.arg("--nodejs").arg(nodejs); - } - - let mut flags = vec!["-Crpath".to_string()]; - if build.config.rust_optimize_tests { - flags.push("-O".to_string()); - } - if build.config.rust_debuginfo_tests { - flags.push("-g".to_string()); - } - flags.push("-Zmiri -Zunstable-options".to_string()); - flags.push(build.config.cmd.rustc_args().join(" ")); - - if let Some(linker) = build.linker(target) { - cmd.arg("--linker").arg(linker); - } - - let hostflags = flags.clone(); - cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); - - let mut targetflags = flags.clone(); - targetflags.push(format!("-Lnative={}", - build.test_helpers_out(target).display())); - cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); - - cmd.arg("--docck-python").arg(build.python()); - - if build.build.ends_with("apple-darwin") { - // Force /usr/bin/python on macOS for LLDB tests because we're loading the - // LLDB plugin's compiled module which only works with the system python - // (namely not Homebrew-installed python) - cmd.arg("--lldb-python").arg("/usr/bin/python"); - } else { - cmd.arg("--lldb-python").arg(build.python()); - } - - if let Some(ref gdb) = build.config.gdb { - cmd.arg("--gdb").arg(gdb); - } - if let Some(ref vers) = build.lldb_version { - cmd.arg("--lldb-version").arg(vers); - } - if let Some(ref dir) = build.lldb_python_dir { - cmd.arg("--lldb-python-dir").arg(dir); - } - - cmd.args(&build.config.cmd.test_args()); - - if build.is_verbose() { - cmd.arg("--verbose"); - } - - if build.config.quiet_tests { - cmd.arg("--quiet"); - } - - if build.config.llvm_enabled { - let llvm_config = build.llvm_config(target); - let llvm_version = output(Command::new(&llvm_config).arg("--version")); - cmd.arg("--llvm-version").arg(llvm_version); - if !build.is_rust_llvm(target) { - cmd.arg("--system-llvm"); - } - - // Only pass correct values for these flags for the `run-make` suite as it - // requires that a C++ compiler was configured which isn't always the case. - if suite == "run-make" { - let llvm_components = output(Command::new(&llvm_config).arg("--components")); - let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); - cmd.arg("--cc").arg(build.cc(target)) - .arg("--cxx").arg(build.cxx(target).unwrap()) - .arg("--cflags").arg(build.cflags(target).join(" ")) - .arg("--llvm-components").arg(llvm_components.trim()) - .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); - if let Some(ar) = build.ar(target) { - cmd.arg("--ar").arg(ar); - } - } - } - if suite == "run-make" && !build.config.llvm_enabled { - println!("Ignoring run-make test suite as they generally dont work without LLVM"); - return; - } + let compiler = builder.compiler(0, build.build); - if suite != "run-make" { - cmd.arg("--cc").arg("") - .arg("--cxx").arg("") - .arg("--cflags").arg("") - .arg("--llvm-components").arg("") - .arg("--llvm-cxxflags").arg(""); - } + let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage)); + println!("Checking std artifacts ({} -> {})", &compiler.host, target); - if build.remote_tested(target) { - cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); - } - - // Running a C compiler on MSVC requires a few env vars to be set, to be - // sure to set them here. - // - // Note that if we encounter `PATH` we make sure to append to our own `PATH` - // rather than stomp over it. - if target.contains("msvc") { - for &(ref k, ref v) in build.cc[&target].env() { - if k != "PATH" { - cmd.env(k, v); - } - } - } - cmd.env("RUSTC_BOOTSTRAP", "1"); - build.add_rust_test_threads(&mut cmd); - - if build.config.sanitizers { - cmd.env("SANITIZER_SUPPORT", "1"); - } - - if build.config.profiler { - cmd.env("PROFILER_SUPPORT", "1"); - } - - cmd.arg("--adb-path").arg("adb"); - cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); - if target.contains("android") { - // Assume that cc for this target comes from the android sysroot - cmd.arg("--android-cross-path") - .arg(build.cc(target).parent().unwrap().parent().unwrap()); - } else { - cmd.arg("--android-cross-path").arg(""); - } - - build.ci_env.force_coloring_in_ci(&mut cmd); - - let _time = util::timeit(); - try_run(build, &mut cmd); + let out_dir = build.stage_out(compiler, Mode::Libstd); + build.clear_if_dirty(&out_dir, &builder.rustc(compiler)); + let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "check"); + std_cargo(build, &compiler, target, &mut cargo); + run_cargo(build, + &mut cargo, + &libstd_stamp(build, compiler, target), + true); + let libdir = builder.sysroot_libdir(compiler, target); + add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target)); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Docs { - compiler: Compiler, +pub struct Rustc { + pub target: Interned<String>, } -impl Step for Docs { +impl Step for Rustc { type Output = (); - const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/doc") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Docs { - compiler: run.builder.compiler(run.builder.top_stage, run.host), - }); - } - - /// Run `rustdoc --test` for all documentation in `src/doc`. - /// - /// This will run all tests in our markdown documentation (e.g. the book) - /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to - /// `compiler`. - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = self.compiler; - - builder.ensure(compile::Test { compiler, target: compiler.host }); - - // Do a breadth-first traversal of the `src/doc` directory and just run - // tests for all files that end in `*.md` - let mut stack = vec![build.src.join("src/doc")]; - let _time = util::timeit(); - let _folder = build.fold_output(|| "test_docs"); - - while let Some(p) = stack.pop() { - if p.is_dir() { - stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); - continue - } - - if p.extension().and_then(|s| s.to_str()) != Some("md") { - continue; - } - - // The nostarch directory in the book is for no starch, and so isn't - // guaranteed to build. We don't care if it doesn't build, so skip it. - if p.to_str().map_or(false, |p| p.contains("nostarch")) { - continue; - } - - markdown_test(builder, compiler, &p); - } - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ErrorIndex { - compiler: Compiler, -} - -impl Step for ErrorIndex { - type Output = (); const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/tools/error_index_generator") + run.path("src/librustc").krate("rustc-main") } fn make_run(run: RunConfig) { - run.builder.ensure(ErrorIndex { - compiler: run.builder.compiler(run.builder.top_stage, run.host), - }); - } - - /// Run the error index generator tool to execute the tests located in the error - /// index. - /// - /// The `error_index_generator` tool lives in `src/tools` and is used to - /// generate a markdown file from the error indexes of the code base which is - /// then passed to `rustdoc --test`. - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = self.compiler; - - builder.ensure(compile::Std { compiler, target: compiler.host }); - - let _folder = build.fold_output(|| "test_error_index"); - println!("Testing error-index stage{}", compiler.stage); - - let dir = testdir(build, compiler.host); - t!(fs::create_dir_all(&dir)); - let output = dir.join("error-index.md"); - - let _time = util::timeit(); - build.run(builder.tool_cmd(Tool::ErrorIndex) - .arg("markdown") - .arg(&output) - .env("CFG_BUILD", &build.build) - .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir())); - - markdown_test(builder, compiler, &output); - } -} - -fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) { - let build = builder.build; - let mut file = t!(File::open(markdown)); - let mut contents = String::new(); - t!(file.read_to_string(&mut contents)); - if !contents.contains("```") { - return; - } - - println!("doc tests for: {}", markdown.display()); - let mut cmd = builder.rustdoc_cmd(compiler.host); - build.add_rust_test_threads(&mut cmd); - cmd.arg("--test"); - cmd.arg(markdown); - cmd.env("RUSTC_BOOTSTRAP", "1"); - - let test_args = build.config.cmd.test_args().join(" "); - cmd.arg("--test-args").arg(test_args); - - if build.config.quiet_tests { - try_run_quiet(build, &mut cmd); - } else { - try_run(build, &mut cmd); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct CrateLibrustc { - compiler: Compiler, - target: Interned<String>, - test_kind: TestKind, - krate: Option<Interned<String>>, -} - -impl Step for CrateLibrustc { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.krate("rustc-main") - } - - fn make_run(run: RunConfig) { - let builder = run.builder; - let compiler = builder.compiler(builder.top_stage, run.host); - - let make = |name: Option<Interned<String>>| { - let test_kind = if builder.kind == Kind::Test { - TestKind::Test - } else if builder.kind == Kind::Bench { - TestKind::Bench - } else { - panic!("unexpected builder.kind in crate: {:?}", builder.kind); - }; - - builder.ensure(CrateLibrustc { - compiler, - target: run.target, - test_kind, - krate: name, - }); - }; - - if let Some(path) = run.path { - for (name, krate_path) in builder.crates("rustc-main") { - if path.ends_with(krate_path) { - make(Some(name)); - } - } - } else { - make(None); - } - } - - - fn run(self, builder: &Builder) { - builder.ensure(Crate { - compiler: self.compiler, - target: self.target, - mode: Mode::Librustc, - test_kind: self.test_kind, - krate: self.krate, + run.builder.ensure(Rustc { + target: run.target, }); } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Crate { - compiler: Compiler, - target: Interned<String>, - mode: Mode, - test_kind: TestKind, - krate: Option<Interned<String>>, -} - -impl Step for Crate { - type Output = (); - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.krate("std").krate("test") - } - fn make_run(run: RunConfig) { - let builder = run.builder; - let compiler = builder.compiler(builder.top_stage, run.host); - - let make = |mode: Mode, name: Option<Interned<String>>| { - let test_kind = if builder.kind == Kind::Test { - TestKind::Test - } else if builder.kind == Kind::Bench { - TestKind::Bench - } else { - panic!("unexpected builder.kind in crate: {:?}", builder.kind); - }; - - builder.ensure(Crate { - compiler, - target: run.target, - mode, - test_kind, - krate: name, - }); - }; - - if let Some(path) = run.path { - for (name, krate_path) in builder.crates("std") { - if path.ends_with(krate_path) { - make(Mode::Libstd, Some(name)); - } - } - for (name, krate_path) in builder.crates("test") { - if path.ends_with(krate_path) { - make(Mode::Libtest, Some(name)); - } - } - } else { - make(Mode::Libstd, None); - make(Mode::Libtest, None); - } - } - - /// Run all unit tests plus documentation tests for an entire crate DAG defined - /// by a `Cargo.toml` - /// - /// This is what runs tests for crates like the standard library, compiler, etc. - /// It essentially is the driver for running `cargo test`. + /// Build the compiler. /// - /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` - /// arguments, and those arguments are discovered from `cargo metadata`. + /// This will build the compiler for a particular stage of the build using + /// the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. fn run(self, builder: &Builder) { let build = builder.build; - let compiler = self.compiler; + let compiler = builder.compiler(0, build.build); let target = self.target; - let mode = self.mode; - let test_kind = self.test_kind; - let krate = self.krate; - - builder.ensure(compile::Test { compiler, target }); - builder.ensure(RemoteCopyLibs { compiler, target }); - - // If we're not doing a full bootstrap but we're testing a stage2 version of - // libstd, then what we're actually testing is the libstd produced in - // stage1. Reflect that here by updating the compiler that we're working - // with automatically. - let compiler = if build.force_use_stage1(compiler, target) { - builder.compiler(1, compiler.host) - } else { - compiler.clone() - }; - - let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand()); - let (name, root) = match mode { - Mode::Libstd => { - compile::std_cargo(build, &compiler, target, &mut cargo); - ("libstd", "std") - } - Mode::Libtest => { - compile::test_cargo(build, &compiler, target, &mut cargo); - ("libtest", "test") - } - Mode::Librustc => { - builder.ensure(compile::Rustc { compiler, target }); - compile::rustc_cargo(build, target, &mut cargo); - ("librustc", "rustc-main") - } - _ => panic!("can only test libraries"), - }; - let root = INTERNER.intern_string(String::from(root)); - let _folder = build.fold_output(|| { - format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name) - }); - println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, - &compiler.host, target); - - // Build up the base `cargo test` command. - // - // Pass in some standard flags then iterate over the graph we've discovered - // in `cargo metadata` with the maps above and figure out what `-p` - // arguments need to get passed. - if test_kind.subcommand() == "test" && !build.fail_fast { - cargo.arg("--no-fail-fast"); - } - - match krate { - Some(krate) => { - cargo.arg("-p").arg(krate); - } - None => { - let mut visited = HashSet::new(); - let mut next = vec![root]; - while let Some(name) = next.pop() { - // Right now jemalloc and the sanitizer crates are - // target-specific crate in the sense that it's not present - // on all platforms. Custom skip it here for now, but if we - // add more this probably wants to get more generalized. - // - // Also skip `build_helper` as it's not compiled normally - // for target during the bootstrap and it's just meant to be - // a helper crate, not tested. If it leaks through then it - // ends up messing with various mtime calculations and such. - if !name.contains("jemalloc") && - *name != *"build_helper" && - !(name.starts_with("rustc_") && name.ends_with("san")) && - name != "dlmalloc" { - cargo.arg("-p").arg(&format!("{}:0.0.0", name)); - } - for dep in build.crates[&name].deps.iter() { - if visited.insert(dep) { - next.push(*dep); - } - } - } - } - } - - // The tests are going to run with the *target* libraries, so we need to - // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. - // - // Note that to run the compiler we need to run with the *host* libraries, - // but our wrapper scripts arrange for that to be the case anyway. - let mut dylib_path = dylib_path(); - dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); - cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - cargo.arg("--"); - cargo.args(&build.config.cmd.test_args()); + let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage)); + println!("Checking compiler artifacts ({} -> {})", &compiler.host, target); - if build.config.quiet_tests { - cargo.arg("--quiet"); - } + let stage_out = builder.stage_out(compiler, Mode::Librustc); + build.clear_if_dirty(&stage_out, &libstd_stamp(build, compiler, target)); + build.clear_if_dirty(&stage_out, &libtest_stamp(build, compiler, target)); - let _time = util::timeit(); - - if target.contains("emscripten") { - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), - build.config.nodejs.as_ref().expect("nodejs not configured")); - } else if target.starts_with("wasm32") { - // On the wasm32-unknown-unknown target we're using LTO which is - // incompatible with `-C prefer-dynamic`, so disable that here - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - let node = build.config.nodejs.as_ref() - .expect("nodejs not configured"); - let runner = format!("{} {}/src/etc/wasm32-shim.js", - node.display(), - build.src.display()); - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner); - } else if build.remote_tested(target) { - cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), - format!("{} run", - builder.tool_exe(Tool::RemoteTestClient).display())); - } - try_run(build, &mut cargo); + let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "check"); + rustc_cargo(build, target, &mut cargo); + run_cargo(build, + &mut cargo, + &librustc_stamp(build, compiler, target), + true); + let libdir = builder.sysroot_libdir(compiler, target); + add_to_sysroot(&libdir, &librustc_stamp(build, compiler, target)); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Rustdoc { - host: Interned<String>, - test_kind: TestKind, +pub struct Test { + pub target: Interned<String>, } -impl Step for Rustdoc { +impl Step for Test { type Output = (); const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/librustdoc").path("src/tools/rustdoc") + run.path("src/libtest").krate("test") } fn make_run(run: RunConfig) { - let builder = run.builder; - - let test_kind = if builder.kind == Kind::Test { - TestKind::Test - } else if builder.kind == Kind::Bench { - TestKind::Bench - } else { - panic!("unexpected builder.kind in crate: {:?}", builder.kind); - }; - - builder.ensure(Rustdoc { - host: run.host, - test_kind, + run.builder.ensure(Test { + target: run.target, }); } fn run(self, builder: &Builder) { let build = builder.build; - let test_kind = self.test_kind; - - let compiler = builder.compiler(builder.top_stage, self.host); - let target = compiler.host; - - let mut cargo = tool::prepare_tool_cargo(builder, - compiler, - target, - test_kind.subcommand(), - "src/tools/rustdoc"); - let _folder = build.fold_output(|| { - format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage) - }); - println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage, - &compiler.host, target); - - if test_kind.subcommand() == "test" && !build.fail_fast { - cargo.arg("--no-fail-fast"); - } - - cargo.arg("-p").arg("rustdoc:0.0.0"); - - cargo.arg("--"); - cargo.args(&build.config.cmd.test_args()); - - if build.config.quiet_tests { - cargo.arg("--quiet"); - } - - let _time = util::timeit(); + let target = self.target; + let compiler = builder.compiler(0, build.build); - try_run(build, &mut cargo); + let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage)); + println!("Checking test artifacts ({} -> {})", &compiler.host, target); + let out_dir = build.stage_out(compiler, Mode::Libtest); + build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target)); + let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "check"); + test_cargo(build, &compiler, target, &mut cargo); + run_cargo(build, + &mut cargo, + &libtest_stamp(build, compiler, target), + true); + let libdir = builder.sysroot_libdir(compiler, target); + add_to_sysroot(&libdir, &libtest_stamp(build, compiler, target)); } } -fn envify(s: &str) -> String { - s.chars().map(|c| { - match c { - '-' => '_', - c => c, - } - }).flat_map(|c| c.to_uppercase()).collect() +/// Cargo's output path for the standard library in a given stage, compiled +/// by a particular compiler for the specified target. +pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf { + build.cargo_out(compiler, Mode::Libstd, target).join(".libstd-check.stamp") } -/// Some test suites are run inside emulators or on remote devices, and most -/// of our test binaries are linked dynamically which means we need to ship -/// the standard library and such to the emulator ahead of time. This step -/// represents this and is a dependency of all test suites. -/// -/// Most of the time this is a noop. For some steps such as shipping data to -/// QEMU we have to build our own tools so we've got conditional dependencies -/// on those programs as well. Note that the remote test client is built for -/// the build target (us) and the server is built for the target. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct RemoteCopyLibs { - compiler: Compiler, - target: Interned<String>, +/// Cargo's output path for libtest in a given stage, compiled by a particular +/// compiler for the specified target. +pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf { + build.cargo_out(compiler, Mode::Libtest, target).join(".libtest-check.stamp") } -impl Step for RemoteCopyLibs { - type Output = (); - - fn should_run(run: ShouldRun) -> ShouldRun { - run.never() - } - - fn run(self, builder: &Builder) { - let build = builder.build; - let compiler = self.compiler; - let target = self.target; - if !build.remote_tested(target) { - return - } - - builder.ensure(compile::Test { compiler, target }); - - println!("REMOTE copy libs to emulator ({})", target); - t!(fs::create_dir_all(build.out.join("tmp"))); - - let server = builder.ensure(tool::RemoteTestServer { compiler, target }); - - // Spawn the emulator and wait for it to come online - let tool = builder.tool_exe(Tool::RemoteTestClient); - let mut cmd = Command::new(&tool); - cmd.arg("spawn-emulator") - .arg(target) - .arg(&server) - .arg(build.out.join("tmp")); - if let Some(rootfs) = build.qemu_rootfs(target) { - cmd.arg(rootfs); - } - build.run(&mut cmd); - - // Push all our dylibs to the emulator - for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { - let f = t!(f); - let name = f.file_name().into_string().unwrap(); - if util::is_dylib(&name) { - build.run(Command::new(&tool) - .arg("push") - .arg(f.path())); - } - } - } +/// Cargo's output path for librustc in a given stage, compiled by a particular +/// compiler for the specified target. +pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf { + build.cargo_out(compiler, Mode::Librustc, target).join(".librustc-check.stamp") } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Distcheck; - -impl Step for Distcheck { - type Output = (); - const ONLY_BUILD: bool = true; - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("distcheck") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Distcheck); - } - - /// Run "distcheck", a 'make check' from a tarball - fn run(self, builder: &Builder) { - let build = builder.build; - - println!("Distcheck"); - let dir = build.out.join("tmp").join("distcheck"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - // Guarantee that these are built before we begin running. - builder.ensure(dist::PlainSourceTarball); - builder.ensure(dist::Src); - - let mut cmd = Command::new("tar"); - cmd.arg("-xzf") - .arg(builder.ensure(dist::PlainSourceTarball)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); - build.run(Command::new("./configure") - .args(&build.config.configure_args) - .arg("--enable-vendor") - .current_dir(&dir)); - build.run(Command::new(build_helper::make(&build.build)) - .arg("check") - .current_dir(&dir)); - - // Now make sure that rust-src has all of libstd's dependencies - println!("Distcheck rust-src"); - let dir = build.out.join("tmp").join("distcheck-src"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - let mut cmd = Command::new("tar"); - cmd.arg("-xzf") - .arg(builder.ensure(dist::Src)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); - - let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); - build.run(Command::new(&build.initial_cargo) - .arg("generate-lockfile") - .arg("--manifest-path") - .arg(&toml) - .current_dir(&dir)); - } -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct Bootstrap; - -impl Step for Bootstrap { - type Output = (); - const DEFAULT: bool = true; - const ONLY_HOSTS: bool = true; - const ONLY_BUILD: bool = true; - - /// Test the build system itself - fn run(self, builder: &Builder) { - let build = builder.build; - let mut cmd = Command::new(&build.initial_cargo); - cmd.arg("test") - .current_dir(build.src.join("src/bootstrap")) - .env("CARGO_TARGET_DIR", build.out.join("bootstrap")) - .env("RUSTC_BOOTSTRAP", "1") - .env("RUSTC", &build.initial_rustc); - if !build.fail_fast { - cmd.arg("--no-fail-fast"); - } - cmd.arg("--").args(&build.config.cmd.test_args()); - try_run(build, &mut cmd); - } - - fn should_run(run: ShouldRun) -> ShouldRun { - run.path("src/bootstrap") - } - - fn make_run(run: RunConfig) { - run.builder.ensure(Bootstrap); - } -} diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index c6adfc7ffae47..21bbd82dd333a 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -108,7 +108,8 @@ impl Step for Std { std_cargo(build, &compiler, target, &mut cargo); run_cargo(build, &mut cargo, - &libstd_stamp(build, compiler, target)); + &libstd_stamp(build, compiler, target), + false); builder.ensure(StdLink { compiler: builder.compiler(compiler.stage, build.build), @@ -360,7 +361,8 @@ impl Step for Test { test_cargo(build, &compiler, target, &mut cargo); run_cargo(build, &mut cargo, - &libtest_stamp(build, compiler, target)); + &libtest_stamp(build, compiler, target), + false); builder.ensure(TestLink { compiler: builder.compiler(compiler.stage, build.build), @@ -488,7 +490,8 @@ impl Step for Rustc { rustc_cargo(build, target, &mut cargo); run_cargo(build, &mut cargo, - &librustc_stamp(build, compiler, target)); + &librustc_stamp(build, compiler, target), + false); builder.ensure(RustcLink { compiler: builder.compiler(compiler.stage, build.build), @@ -755,7 +758,7 @@ impl Step for Assemble { /// /// For a particular stage this will link the file listed in `stamp` into the /// `sysroot_dst` provided. -fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) { +pub fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) { t!(fs::create_dir_all(&sysroot_dst)); for path in read_stamp_file(stamp) { copy(&path, &sysroot_dst.join(path.file_name().unwrap())); @@ -785,7 +788,7 @@ fn stderr_isatty() -> bool { } } -fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) { +pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool) { // Instruct Cargo to give us json messages on stdout, critically leaving // stderr as piped so we can get those pretty colors. cargo.arg("--message-format").arg("json") @@ -836,7 +839,8 @@ fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) { // Skip files like executables if !filename.ends_with(".rlib") && !filename.ends_with(".lib") && - !is_dylib(&filename) { + !is_dylib(&filename) && + !(is_check && filename.ends_with(".rmeta")) { continue } diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 0816c4dfe3d07..478e496078add 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -48,6 +48,9 @@ pub enum Subcommand { Build { paths: Vec<PathBuf>, }, + Check { + paths: Vec<PathBuf>, + }, Doc { paths: Vec<PathBuf>, }, @@ -88,6 +91,7 @@ Usage: x.py <subcommand> [options] [<paths>...] Subcommands: build Compile either the compiler or libraries + check Compile either the compiler or libraries, using cargo check test Build and run some test suites bench Build and run some benchmarks doc Build documentation @@ -128,6 +132,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`"); // there on out. let subcommand = args.iter().find(|&s| (s == "build") + || (s == "check") || (s == "test") || (s == "bench") || (s == "doc") @@ -217,6 +222,21 @@ Arguments: arguments would), and then use the compiler built in stage 0 to build src/libtest and its dependencies. Once this is done, build/$ARCH/stage1 contains a usable compiler."); + } + "check" => { + subcommand_help.push_str("\n +Arguments: + This subcommand accepts a number of paths to directories to the crates + and/or artifacts to compile. For example: + + ./x.py check src/libcore + ./x.py check src/libcore src/libproc_macro + + If no arguments are passed then the complete artifacts are compiled: std, test, and rustc. Note + also that since we use `cargo check`, by default this will automatically enable incremental + compilation, so there's no need to pass it separately, though it won't hurt. We also completely + ignore the stage passed, as there's no way to compile in non-stage 0 without actually building + the compiler."); } "test" => { subcommand_help.push_str("\n @@ -286,6 +306,9 @@ Arguments: "build" => { Subcommand::Build { paths: paths } } + "check" => { + Subcommand::Check { paths: paths } + } "test" => { Subcommand::Test { paths, diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 3738828a4baed..a6a5ba6772390 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -150,6 +150,7 @@ use util::{exe, libdir, OutputFolder, CiEnv}; mod cc_detect; mod channel; mod check; +mod test; mod clean; mod compile; mod metadata; @@ -449,12 +450,6 @@ impl Build { out } - /// Get the directory for incremental by-products when using the - /// given compiler. - fn incremental_dir(&self, compiler: Compiler) -> PathBuf { - self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage)) - } - /// Returns the root directory for all output generated in a particular /// stage when running with a particular host compiler. /// @@ -776,7 +771,11 @@ impl Build { fn release(&self, num: &str) -> String { match &self.config.channel[..] { "stable" => num.to_string(), - "beta" => format!("{}-beta.{}", num, self.beta_prerelease_version()), + "beta" => if self.rust_info.is_git() { + format!("{}-beta.{}", num, self.beta_prerelease_version()) + } else { + format!("{}-beta", num) + }, "nightly" => format!("{}-nightly", num), _ => format!("{}-dev", num), } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs new file mode 100644 index 0000000000000..5faec27943847 --- /dev/null +++ b/src/bootstrap/test.rs @@ -0,0 +1,1542 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Implementation of the test-related targets of the build system. +//! +//! This file implements the various regression test suites that we execute on +//! our CI. + +use std::collections::HashSet; +use std::env; +use std::ffi::OsString; +use std::iter; +use std::fmt; +use std::fs::{self, File}; +use std::path::{PathBuf, Path}; +use std::process::Command; +use std::io::Read; + +use build_helper::{self, output}; + +use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step}; +use cache::{INTERNER, Interned}; +use compile; +use dist; +use native; +use tool::{self, Tool}; +use util::{self, dylib_path, dylib_path_var}; +use {Build, Mode}; +use toolstate::ToolState; + +const ADB_TEST_DIR: &str = "/data/tmp/work"; + +/// The two modes of the test runner; tests or benchmarks. +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] +pub enum TestKind { + /// Run `cargo test` + Test, + /// Run `cargo bench` + Bench, +} + +impl TestKind { + // Return the cargo subcommand for this test kind + fn subcommand(self) -> &'static str { + match self { + TestKind::Test => "test", + TestKind::Bench => "bench", + } + } +} + +impl fmt::Display for TestKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match *self { + TestKind::Test => "Testing", + TestKind::Bench => "Benchmarking", + }) + } +} + +fn try_run(build: &Build, cmd: &mut Command) -> bool { + if !build.fail_fast { + if !build.try_run(cmd) { + let mut failures = build.delayed_failures.borrow_mut(); + failures.push(format!("{:?}", cmd)); + return false; + } + } else { + build.run(cmd); + } + true +} + +fn try_run_quiet(build: &Build, cmd: &mut Command) { + if !build.fail_fast { + if !build.try_run_quiet(cmd) { + let mut failures = build.delayed_failures.borrow_mut(); + failures.push(format!("{:?}", cmd)); + } + } else { + build.run_quiet(cmd); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Linkcheck { + host: Interned<String>, +} + +impl Step for Linkcheck { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will verify the validity of all our links in the + /// documentation to ensure we don't have a bunch of dead ones. + fn run(self, builder: &Builder) { + let build = builder.build; + let host = self.host; + + println!("Linkcheck ({})", host); + + builder.default_doc(None); + + let _time = util::timeit(); + try_run(build, builder.tool_cmd(Tool::Linkchecker) + .arg(build.out.join(host).join("doc"))); + } + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/linkchecker").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Linkcheck { host: run.target }); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargotest { + stage: u32, + host: Interned<String>, +} + +impl Step for Cargotest { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/cargotest") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Cargotest { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will check out a few Rust projects and run `cargo + /// test` to ensure that we don't regress the test suites there. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = builder.compiler(self.stage, self.host); + builder.ensure(compile::Rustc { compiler, target: compiler.host }); + + // Note that this is a short, cryptic, and not scoped directory name. This + // is currently to minimize the length of path on Windows where we otherwise + // quickly run into path name limit constraints. + let out_dir = build.out.join("ct"); + t!(fs::create_dir_all(&out_dir)); + + let _time = util::timeit(); + let mut cmd = builder.tool_cmd(Tool::CargoTest); + try_run(build, cmd.arg(&build.initial_cargo) + .arg(&out_dir) + .env("RUSTC", builder.rustc(compiler)) + .env("RUSTDOC", builder.rustdoc(compiler.host))); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargo { + stage: u32, + host: Interned<String>, +} + +impl Step for Cargo { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/cargo") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Cargo { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for `cargo` packaged with Rust. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = builder.compiler(self.stage, self.host); + + builder.ensure(tool::Cargo { compiler, target: self.host }); + let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test"); + cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml")); + if !build.fail_fast { + cargo.arg("--no-fail-fast"); + } + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + // Don't run cross-compile tests, we may not have cross-compiled libstd libs + // available. + cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); + + try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler))); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rls { + stage: u32, + host: Interned<String>, +} + +impl Step for Rls { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/rls") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rls { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for the rls. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + builder.ensure(tool::Rls { compiler, target: self.host }); + let mut cargo = tool::prepare_tool_cargo(builder, + compiler, + host, + "test", + "src/tools/rls"); + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + builder.add_rustc_lib_path(compiler, &mut cargo); + + if try_run(build, &mut cargo) { + build.save_toolstate("rls", ToolState::TestPass); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustfmt { + stage: u32, + host: Interned<String>, +} + +impl Step for Rustfmt { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/rustfmt") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rustfmt { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for rustfmt. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + builder.ensure(tool::Rustfmt { compiler, target: self.host }); + let mut cargo = tool::prepare_tool_cargo(builder, + compiler, + host, + "test", + "src/tools/rustfmt"); + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + builder.add_rustc_lib_path(compiler, &mut cargo); + + if try_run(build, &mut cargo) { + build.save_toolstate("rustfmt", ToolState::TestPass); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Miri { + stage: u32, + host: Interned<String>, +} + +impl Step for Miri { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let test_miri = run.builder.build.config.test_miri; + run.path("src/tools/miri").default_condition(test_miri) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Miri { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for miri. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + if let Some(miri) = builder.ensure(tool::Miri { compiler, target: self.host }) { + let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); + cargo.arg("--manifest-path").arg(build.src.join("src/tools/miri/Cargo.toml")); + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + // miri tests need to know about the stage sysroot + cargo.env("MIRI_SYSROOT", builder.sysroot(compiler)); + cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); + cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); + cargo.env("MIRI_PATH", miri); + + builder.add_rustc_lib_path(compiler, &mut cargo); + + if try_run(build, &mut cargo) { + build.save_toolstate("miri", ToolState::TestPass); + } + } else { + eprintln!("failed to test miri: could not build"); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Clippy { + stage: u32, + host: Interned<String>, +} + +impl Step for Clippy { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = false; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/clippy") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Clippy { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for clippy. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + if let Some(clippy) = builder.ensure(tool::Clippy { compiler, target: self.host }) { + let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); + cargo.arg("--manifest-path").arg(build.src.join("src/tools/clippy/Cargo.toml")); + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + // clippy tests need to know about the stage sysroot + cargo.env("SYSROOT", builder.sysroot(compiler)); + cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); + cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); + let host_libs = builder.stage_out(compiler, Mode::Tool).join(builder.cargo_dir()); + cargo.env("HOST_LIBS", host_libs); + // clippy tests need to find the driver + cargo.env("CLIPPY_DRIVER_PATH", clippy); + + builder.add_rustc_lib_path(compiler, &mut cargo); + + if try_run(build, &mut cargo) { + build.save_toolstate("clippy-driver", ToolState::TestPass); + } + } else { + eprintln!("failed to test clippy: could not build"); + } + } +} + +fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString { + // Configure PATH to find the right rustc. NB. we have to use PATH + // and not RUSTC because the Cargo test suite has tests that will + // fail if rustc is not spelled `rustc`. + let path = builder.sysroot(compiler).join("bin"); + let old_path = env::var_os("PATH").unwrap_or_default(); + env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RustdocJS { + pub host: Interned<String>, + pub target: Interned<String>, +} + +impl Step for RustdocJS { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/test/rustdoc-js") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(RustdocJS { + host: run.host, + target: run.target, + }); + } + + fn run(self, builder: &Builder) { + if let Some(ref nodejs) = builder.config.nodejs { + let mut command = Command::new(nodejs); + command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]); + builder.ensure(::doc::Std { + target: self.target, + stage: builder.top_stage, + }); + builder.run(&mut command); + } else { + println!("No nodejs found, skipping \"src/test/rustdoc-js\" tests"); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Tidy { + host: Interned<String>, +} + +impl Step for Tidy { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD: bool = true; + + /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` checks up on various bits and pieces of style and + /// otherwise just implements a few lint-like checks that are specific to the + /// compiler itself. + fn run(self, builder: &Builder) { + let build = builder.build; + let host = self.host; + + let _folder = build.fold_output(|| "tidy"); + println!("tidy check ({})", host); + let mut cmd = builder.tool_cmd(Tool::Tidy); + cmd.arg(build.src.join("src")); + if !build.config.vendor { + cmd.arg("--no-vendor"); + } + if build.config.quiet_tests { + cmd.arg("--quiet"); + } + try_run(build, &mut cmd); + } + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/tidy") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Tidy { + host: run.builder.build.build, + }); + } +} + +fn testdir(build: &Build, host: Interned<String>) -> PathBuf { + build.out.join(host).join("test") +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct Test { + path: &'static str, + mode: &'static str, + suite: &'static str, +} + +static DEFAULT_COMPILETESTS: &[Test] = &[ + Test { path: "src/test/ui", mode: "ui", suite: "ui" }, + Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" }, + Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" }, + Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" }, + Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" }, + Test { + path: "src/test/run-pass-valgrind", + mode: "run-pass-valgrind", + suite: "run-pass-valgrind" + }, + Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" }, + Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" }, + Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" }, + Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" }, + + // What this runs varies depending on the native platform being apple + Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" }, +]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct DefaultCompiletest { + compiler: Compiler, + target: Interned<String>, + mode: &'static str, + suite: &'static str, +} + +impl Step for DefaultCompiletest { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(mut run: ShouldRun) -> ShouldRun { + for test in DEFAULT_COMPILETESTS { + run = run.path(test.path); + } + run + } + + fn make_run(run: RunConfig) { + let compiler = run.builder.compiler(run.builder.top_stage, run.host); + + let test = run.path.map(|path| { + DEFAULT_COMPILETESTS.iter().find(|&&test| { + path.ends_with(test.path) + }).unwrap_or_else(|| { + panic!("make_run in compile test to receive test path, received {:?}", path); + }) + }); + + if let Some(test) = test { + run.builder.ensure(DefaultCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite, + }); + } else { + for test in DEFAULT_COMPILETESTS { + run.builder.ensure(DefaultCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite + }); + } + } + } + + fn run(self, builder: &Builder) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: self.mode, + suite: self.suite, + }) + } +} + +// Also default, but host-only. +static HOST_COMPILETESTS: &[Test] = &[ + Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }, + Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" }, + Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" }, + Test { + path: "src/test/compile-fail-fulldeps", + mode: "compile-fail", + suite: "compile-fail-fulldeps", + }, + Test { + path: "src/test/incremental-fulldeps", + mode: "incremental", + suite: "incremental-fulldeps", + }, + Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" }, + Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" }, + + Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" }, + Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" }, + Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" }, + Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" }, + Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" }, + Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" }, +]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct HostCompiletest { + compiler: Compiler, + target: Interned<String>, + mode: &'static str, + suite: &'static str, +} + +impl Step for HostCompiletest { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(mut run: ShouldRun) -> ShouldRun { + for test in HOST_COMPILETESTS { + run = run.path(test.path); + } + run + } + + fn make_run(run: RunConfig) { + let compiler = run.builder.compiler(run.builder.top_stage, run.host); + + let test = run.path.map(|path| { + HOST_COMPILETESTS.iter().find(|&&test| { + path.ends_with(test.path) + }).unwrap_or_else(|| { + panic!("make_run in compile test to receive test path, received {:?}", path); + }) + }); + + if let Some(test) = test { + run.builder.ensure(HostCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite, + }); + } else { + for test in HOST_COMPILETESTS { + if test.mode == "pretty" { + continue; + } + run.builder.ensure(HostCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite + }); + } + } + } + + fn run(self, builder: &Builder) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: self.mode, + suite: self.suite, + }) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct Compiletest { + compiler: Compiler, + target: Interned<String>, + mode: &'static str, + suite: &'static str, +} + +impl Step for Compiletest { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Executes the `compiletest` tool to run a suite of tests. + /// + /// Compiles all tests with `compiler` for `target` with the specified + /// compiletest `mode` and `suite` arguments. For example `mode` can be + /// "run-pass" or `suite` can be something like `debuginfo`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + let suite = self.suite; + + // Skip codegen tests if they aren't enabled in configuration. + if !build.config.codegen_tests && suite == "codegen" { + return; + } + + if suite == "debuginfo" { + // Skip debuginfo tests on MSVC + if build.build.contains("msvc") { + return; + } + + if mode == "debuginfo-XXX" { + return if build.build.contains("apple") { + builder.ensure(Compiletest { + mode: "debuginfo-lldb", + ..self + }); + } else { + builder.ensure(Compiletest { + mode: "debuginfo-gdb", + ..self + }); + }; + } + + builder.ensure(dist::DebuggerScripts { + sysroot: builder.sysroot(compiler), + host: target + }); + } + + if suite.ends_with("fulldeps") || + // FIXME: Does pretty need librustc compiled? Note that there are + // fulldeps test suites with mode = pretty as well. + mode == "pretty" || + mode == "rustdoc" || + mode == "run-make" { + builder.ensure(compile::Rustc { compiler, target }); + } + + builder.ensure(compile::Test { compiler, target }); + builder.ensure(native::TestHelpers { target }); + builder.ensure(RemoteCopyLibs { compiler, target }); + + let _folder = build.fold_output(|| format!("test_{}", suite)); + println!("Check compiletest suite={} mode={} ({} -> {})", + suite, mode, &compiler.host, target); + let mut cmd = builder.tool_cmd(Tool::Compiletest); + + // compiletest currently has... a lot of arguments, so let's just pass all + // of them! + + cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); + cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); + cmd.arg("--rustc-path").arg(builder.rustc(compiler)); + + // Avoid depending on rustdoc when we don't need it. + if mode == "rustdoc" || mode == "run-make" { + cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host)); + } + + cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); + cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); + cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); + cmd.arg("--mode").arg(mode); + cmd.arg("--target").arg(target); + cmd.arg("--host").arg(&*compiler.host); + cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build)); + + if let Some(ref nodejs) = build.config.nodejs { + cmd.arg("--nodejs").arg(nodejs); + } + + let mut flags = vec!["-Crpath".to_string()]; + if build.config.rust_optimize_tests { + flags.push("-O".to_string()); + } + if build.config.rust_debuginfo_tests { + flags.push("-g".to_string()); + } + flags.push("-Zmiri -Zunstable-options".to_string()); + flags.push(build.config.cmd.rustc_args().join(" ")); + + if let Some(linker) = build.linker(target) { + cmd.arg("--linker").arg(linker); + } + + let hostflags = flags.clone(); + cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); + + let mut targetflags = flags.clone(); + targetflags.push(format!("-Lnative={}", + build.test_helpers_out(target).display())); + cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); + + cmd.arg("--docck-python").arg(build.python()); + + if build.build.ends_with("apple-darwin") { + // Force /usr/bin/python on macOS for LLDB tests because we're loading the + // LLDB plugin's compiled module which only works with the system python + // (namely not Homebrew-installed python) + cmd.arg("--lldb-python").arg("/usr/bin/python"); + } else { + cmd.arg("--lldb-python").arg(build.python()); + } + + if let Some(ref gdb) = build.config.gdb { + cmd.arg("--gdb").arg(gdb); + } + if let Some(ref vers) = build.lldb_version { + cmd.arg("--lldb-version").arg(vers); + } + if let Some(ref dir) = build.lldb_python_dir { + cmd.arg("--lldb-python-dir").arg(dir); + } + + cmd.args(&build.config.cmd.test_args()); + + if build.is_verbose() { + cmd.arg("--verbose"); + } + + if build.config.quiet_tests { + cmd.arg("--quiet"); + } + + if build.config.llvm_enabled { + let llvm_config = build.llvm_config(target); + let llvm_version = output(Command::new(&llvm_config).arg("--version")); + cmd.arg("--llvm-version").arg(llvm_version); + if !build.is_rust_llvm(target) { + cmd.arg("--system-llvm"); + } + + // Only pass correct values for these flags for the `run-make` suite as it + // requires that a C++ compiler was configured which isn't always the case. + if suite == "run-make" { + let llvm_components = output(Command::new(&llvm_config).arg("--components")); + let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); + cmd.arg("--cc").arg(build.cc(target)) + .arg("--cxx").arg(build.cxx(target).unwrap()) + .arg("--cflags").arg(build.cflags(target).join(" ")) + .arg("--llvm-components").arg(llvm_components.trim()) + .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); + if let Some(ar) = build.ar(target) { + cmd.arg("--ar").arg(ar); + } + } + } + if suite == "run-make" && !build.config.llvm_enabled { + println!("Ignoring run-make test suite as they generally dont work without LLVM"); + return; + } + + if suite != "run-make" { + cmd.arg("--cc").arg("") + .arg("--cxx").arg("") + .arg("--cflags").arg("") + .arg("--llvm-components").arg("") + .arg("--llvm-cxxflags").arg(""); + } + + if build.remote_tested(target) { + cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); + } + + // Running a C compiler on MSVC requires a few env vars to be set, to be + // sure to set them here. + // + // Note that if we encounter `PATH` we make sure to append to our own `PATH` + // rather than stomp over it. + if target.contains("msvc") { + for &(ref k, ref v) in build.cc[&target].env() { + if k != "PATH" { + cmd.env(k, v); + } + } + } + cmd.env("RUSTC_BOOTSTRAP", "1"); + build.add_rust_test_threads(&mut cmd); + + if build.config.sanitizers { + cmd.env("SANITIZER_SUPPORT", "1"); + } + + if build.config.profiler { + cmd.env("PROFILER_SUPPORT", "1"); + } + + cmd.arg("--adb-path").arg("adb"); + cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); + if target.contains("android") { + // Assume that cc for this target comes from the android sysroot + cmd.arg("--android-cross-path") + .arg(build.cc(target).parent().unwrap().parent().unwrap()); + } else { + cmd.arg("--android-cross-path").arg(""); + } + + build.ci_env.force_coloring_in_ci(&mut cmd); + + let _time = util::timeit(); + try_run(build, &mut cmd); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Docs { + compiler: Compiler, +} + +impl Step for Docs { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/doc") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Docs { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + }); + } + + /// Run `rustdoc --test` for all documentation in `src/doc`. + /// + /// This will run all tests in our markdown documentation (e.g. the book) + /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to + /// `compiler`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + + builder.ensure(compile::Test { compiler, target: compiler.host }); + + // Do a breadth-first traversal of the `src/doc` directory and just run + // tests for all files that end in `*.md` + let mut stack = vec![build.src.join("src/doc")]; + let _time = util::timeit(); + let _folder = build.fold_output(|| "test_docs"); + + while let Some(p) = stack.pop() { + if p.is_dir() { + stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); + continue + } + + if p.extension().and_then(|s| s.to_str()) != Some("md") { + continue; + } + + // The nostarch directory in the book is for no starch, and so isn't + // guaranteed to build. We don't care if it doesn't build, so skip it. + if p.to_str().map_or(false, |p| p.contains("nostarch")) { + continue; + } + + markdown_test(builder, compiler, &p); + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ErrorIndex { + compiler: Compiler, +} + +impl Step for ErrorIndex { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/error_index_generator") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(ErrorIndex { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + }); + } + + /// Run the error index generator tool to execute the tests located in the error + /// index. + /// + /// The `error_index_generator` tool lives in `src/tools` and is used to + /// generate a markdown file from the error indexes of the code base which is + /// then passed to `rustdoc --test`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + + builder.ensure(compile::Std { compiler, target: compiler.host }); + + let _folder = build.fold_output(|| "test_error_index"); + println!("Testing error-index stage{}", compiler.stage); + + let dir = testdir(build, compiler.host); + t!(fs::create_dir_all(&dir)); + let output = dir.join("error-index.md"); + + let _time = util::timeit(); + build.run(builder.tool_cmd(Tool::ErrorIndex) + .arg("markdown") + .arg(&output) + .env("CFG_BUILD", &build.build) + .env("RUSTC_ERROR_METADATA_DST", build.extended_error_dir())); + + markdown_test(builder, compiler, &output); + } +} + +fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) { + let build = builder.build; + let mut file = t!(File::open(markdown)); + let mut contents = String::new(); + t!(file.read_to_string(&mut contents)); + if !contents.contains("```") { + return; + } + + println!("doc tests for: {}", markdown.display()); + let mut cmd = builder.rustdoc_cmd(compiler.host); + build.add_rust_test_threads(&mut cmd); + cmd.arg("--test"); + cmd.arg(markdown); + cmd.env("RUSTC_BOOTSTRAP", "1"); + + let test_args = build.config.cmd.test_args().join(" "); + cmd.arg("--test-args").arg(test_args); + + if build.config.quiet_tests { + try_run_quiet(build, &mut cmd); + } else { + try_run(build, &mut cmd); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrateLibrustc { + compiler: Compiler, + target: Interned<String>, + test_kind: TestKind, + krate: Option<Interned<String>>, +} + +impl Step for CrateLibrustc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.krate("rustc-main") + } + + fn make_run(run: RunConfig) { + let builder = run.builder; + let compiler = builder.compiler(builder.top_stage, run.host); + + let make = |name: Option<Interned<String>>| { + let test_kind = if builder.kind == Kind::Test { + TestKind::Test + } else if builder.kind == Kind::Bench { + TestKind::Bench + } else { + panic!("unexpected builder.kind in crate: {:?}", builder.kind); + }; + + builder.ensure(CrateLibrustc { + compiler, + target: run.target, + test_kind, + krate: name, + }); + }; + + if let Some(path) = run.path { + for (name, krate_path) in builder.crates("rustc-main") { + if path.ends_with(krate_path) { + make(Some(name)); + } + } + } else { + make(None); + } + } + + + fn run(self, builder: &Builder) { + builder.ensure(Crate { + compiler: self.compiler, + target: self.target, + mode: Mode::Librustc, + test_kind: self.test_kind, + krate: self.krate, + }); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Crate { + compiler: Compiler, + target: Interned<String>, + mode: Mode, + test_kind: TestKind, + krate: Option<Interned<String>>, +} + +impl Step for Crate { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.krate("std").krate("test") + } + + fn make_run(run: RunConfig) { + let builder = run.builder; + let compiler = builder.compiler(builder.top_stage, run.host); + + let make = |mode: Mode, name: Option<Interned<String>>| { + let test_kind = if builder.kind == Kind::Test { + TestKind::Test + } else if builder.kind == Kind::Bench { + TestKind::Bench + } else { + panic!("unexpected builder.kind in crate: {:?}", builder.kind); + }; + + builder.ensure(Crate { + compiler, + target: run.target, + mode, + test_kind, + krate: name, + }); + }; + + if let Some(path) = run.path { + for (name, krate_path) in builder.crates("std") { + if path.ends_with(krate_path) { + make(Mode::Libstd, Some(name)); + } + } + for (name, krate_path) in builder.crates("test") { + if path.ends_with(krate_path) { + make(Mode::Libtest, Some(name)); + } + } + } else { + make(Mode::Libstd, None); + make(Mode::Libtest, None); + } + } + + /// Run all unit tests plus documentation tests for an entire crate DAG defined + /// by a `Cargo.toml` + /// + /// This is what runs tests for crates like the standard library, compiler, etc. + /// It essentially is the driver for running `cargo test`. + /// + /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` + /// arguments, and those arguments are discovered from `cargo metadata`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + let test_kind = self.test_kind; + let krate = self.krate; + + builder.ensure(compile::Test { compiler, target }); + builder.ensure(RemoteCopyLibs { compiler, target }); + + // If we're not doing a full bootstrap but we're testing a stage2 version of + // libstd, then what we're actually testing is the libstd produced in + // stage1. Reflect that here by updating the compiler that we're working + // with automatically. + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) + } else { + compiler.clone() + }; + + let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand()); + let (name, root) = match mode { + Mode::Libstd => { + compile::std_cargo(build, &compiler, target, &mut cargo); + ("libstd", "std") + } + Mode::Libtest => { + compile::test_cargo(build, &compiler, target, &mut cargo); + ("libtest", "test") + } + Mode::Librustc => { + builder.ensure(compile::Rustc { compiler, target }); + compile::rustc_cargo(build, target, &mut cargo); + ("librustc", "rustc-main") + } + _ => panic!("can only test libraries"), + }; + let root = INTERNER.intern_string(String::from(root)); + let _folder = build.fold_output(|| { + format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name) + }); + println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, + &compiler.host, target); + + // Build up the base `cargo test` command. + // + // Pass in some standard flags then iterate over the graph we've discovered + // in `cargo metadata` with the maps above and figure out what `-p` + // arguments need to get passed. + if test_kind.subcommand() == "test" && !build.fail_fast { + cargo.arg("--no-fail-fast"); + } + + match krate { + Some(krate) => { + cargo.arg("-p").arg(krate); + } + None => { + let mut visited = HashSet::new(); + let mut next = vec![root]; + while let Some(name) = next.pop() { + // Right now jemalloc and the sanitizer crates are + // target-specific crate in the sense that it's not present + // on all platforms. Custom skip it here for now, but if we + // add more this probably wants to get more generalized. + // + // Also skip `build_helper` as it's not compiled normally + // for target during the bootstrap and it's just meant to be + // a helper crate, not tested. If it leaks through then it + // ends up messing with various mtime calculations and such. + if !name.contains("jemalloc") && + *name != *"build_helper" && + !(name.starts_with("rustc_") && name.ends_with("san")) && + name != "dlmalloc" { + cargo.arg("-p").arg(&format!("{}:0.0.0", name)); + } + for dep in build.crates[&name].deps.iter() { + if visited.insert(dep) { + next.push(*dep); + } + } + } + } + } + + // The tests are going to run with the *target* libraries, so we need to + // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. + // + // Note that to run the compiler we need to run with the *host* libraries, + // but our wrapper scripts arrange for that to be the case anyway. + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); + cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + cargo.arg("--"); + cargo.args(&build.config.cmd.test_args()); + + if build.config.quiet_tests { + cargo.arg("--quiet"); + } + + let _time = util::timeit(); + + if target.contains("emscripten") { + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + build.config.nodejs.as_ref().expect("nodejs not configured")); + } else if target.starts_with("wasm32") { + // On the wasm32-unknown-unknown target we're using LTO which is + // incompatible with `-C prefer-dynamic`, so disable that here + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + let node = build.config.nodejs.as_ref() + .expect("nodejs not configured"); + let runner = format!("{} {}/src/etc/wasm32-shim.js", + node.display(), + build.src.display()); + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner); + } else if build.remote_tested(target) { + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + format!("{} run", + builder.tool_exe(Tool::RemoteTestClient).display())); + } + try_run(build, &mut cargo); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustdoc { + host: Interned<String>, + test_kind: TestKind, +} + +impl Step for Rustdoc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/librustdoc").path("src/tools/rustdoc") + } + + fn make_run(run: RunConfig) { + let builder = run.builder; + + let test_kind = if builder.kind == Kind::Test { + TestKind::Test + } else if builder.kind == Kind::Bench { + TestKind::Bench + } else { + panic!("unexpected builder.kind in crate: {:?}", builder.kind); + }; + + builder.ensure(Rustdoc { + host: run.host, + test_kind, + }); + } + + fn run(self, builder: &Builder) { + let build = builder.build; + let test_kind = self.test_kind; + + let compiler = builder.compiler(builder.top_stage, self.host); + let target = compiler.host; + + let mut cargo = tool::prepare_tool_cargo(builder, + compiler, + target, + test_kind.subcommand(), + "src/tools/rustdoc"); + let _folder = build.fold_output(|| { + format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage) + }); + println!("{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage, + &compiler.host, target); + + if test_kind.subcommand() == "test" && !build.fail_fast { + cargo.arg("--no-fail-fast"); + } + + cargo.arg("-p").arg("rustdoc:0.0.0"); + + cargo.arg("--"); + cargo.args(&build.config.cmd.test_args()); + + if build.config.quiet_tests { + cargo.arg("--quiet"); + } + + let _time = util::timeit(); + + try_run(build, &mut cargo); + } +} + +fn envify(s: &str) -> String { + s.chars().map(|c| { + match c { + '-' => '_', + c => c, + } + }).flat_map(|c| c.to_uppercase()).collect() +} + +/// Some test suites are run inside emulators or on remote devices, and most +/// of our test binaries are linked dynamically which means we need to ship +/// the standard library and such to the emulator ahead of time. This step +/// represents this and is a dependency of all test suites. +/// +/// Most of the time this is a noop. For some steps such as shipping data to +/// QEMU we have to build our own tools so we've got conditional dependencies +/// on those programs as well. Note that the remote test client is built for +/// the build target (us) and the server is built for the target. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RemoteCopyLibs { + compiler: Compiler, + target: Interned<String>, +} + +impl Step for RemoteCopyLibs { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + if !build.remote_tested(target) { + return + } + + builder.ensure(compile::Test { compiler, target }); + + println!("REMOTE copy libs to emulator ({})", target); + t!(fs::create_dir_all(build.out.join("tmp"))); + + let server = builder.ensure(tool::RemoteTestServer { compiler, target }); + + // Spawn the emulator and wait for it to come online + let tool = builder.tool_exe(Tool::RemoteTestClient); + let mut cmd = Command::new(&tool); + cmd.arg("spawn-emulator") + .arg(target) + .arg(&server) + .arg(build.out.join("tmp")); + if let Some(rootfs) = build.qemu_rootfs(target) { + cmd.arg(rootfs); + } + build.run(&mut cmd); + + // Push all our dylibs to the emulator + for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { + let f = t!(f); + let name = f.file_name().into_string().unwrap(); + if util::is_dylib(&name) { + build.run(Command::new(&tool) + .arg("push") + .arg(f.path())); + } + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Distcheck; + +impl Step for Distcheck { + type Output = (); + const ONLY_BUILD: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("distcheck") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Distcheck); + } + + /// Run "distcheck", a 'make check' from a tarball + fn run(self, builder: &Builder) { + let build = builder.build; + + println!("Distcheck"); + let dir = build.out.join("tmp").join("distcheck"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + // Guarantee that these are built before we begin running. + builder.ensure(dist::PlainSourceTarball); + builder.ensure(dist::Src); + + let mut cmd = Command::new("tar"); + cmd.arg("-xzf") + .arg(builder.ensure(dist::PlainSourceTarball)) + .arg("--strip-components=1") + .current_dir(&dir); + build.run(&mut cmd); + build.run(Command::new("./configure") + .args(&build.config.configure_args) + .arg("--enable-vendor") + .current_dir(&dir)); + build.run(Command::new(build_helper::make(&build.build)) + .arg("check") + .current_dir(&dir)); + + // Now make sure that rust-src has all of libstd's dependencies + println!("Distcheck rust-src"); + let dir = build.out.join("tmp").join("distcheck-src"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + let mut cmd = Command::new("tar"); + cmd.arg("-xzf") + .arg(builder.ensure(dist::Src)) + .arg("--strip-components=1") + .current_dir(&dir); + build.run(&mut cmd); + + let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); + build.run(Command::new(&build.initial_cargo) + .arg("generate-lockfile") + .arg("--manifest-path") + .arg(&toml) + .current_dir(&dir)); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Bootstrap; + +impl Step for Bootstrap { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD: bool = true; + + /// Test the build system itself + fn run(self, builder: &Builder) { + let build = builder.build; + let mut cmd = Command::new(&build.initial_cargo); + cmd.arg("test") + .current_dir(build.src.join("src/bootstrap")) + .env("CARGO_TARGET_DIR", build.out.join("bootstrap")) + .env("RUSTC_BOOTSTRAP", "1") + .env("RUSTC", &build.initial_rustc); + if !build.fail_fast { + cmd.arg("--no-fail-fast"); + } + cmd.arg("--").args(&build.config.cmd.test_args()); + try_run(build, &mut cmd); + } + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/bootstrap") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Bootstrap); + } +} diff --git a/src/ci/docker/dist-x86_64-freebsd/Dockerfile b/src/ci/docker/dist-x86_64-freebsd/Dockerfile index 7483d395622c8..f9f5b7062f8a4 100644 --- a/src/ci/docker/dist-x86_64-freebsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-freebsd/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:16.04 +FROM ubuntu:18.04 RUN apt-get update && apt-get install -y --no-install-recommends \ clang \ diff --git a/src/libcompiler_builtins b/src/libcompiler_builtins index 0ba07e49264a5..0a95675bab808 160000 --- a/src/libcompiler_builtins +++ b/src/libcompiler_builtins @@ -1 +1 @@ -Subproject commit 0ba07e49264a54cb5bbd4856fcea083bb3fbec15 +Subproject commit 0a95675bab808c49f86208bacc89c5d9c53ac43f diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 14f54fbffac2e..1de9091b5df7d 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -638,6 +638,7 @@ define_dep_nodes!( <'tcx> [input] TargetFeaturesWhitelist, [] TargetFeaturesEnabled(DefId), + [] InstanceDefSizeEstimate { instance_def: InstanceDef<'tcx> }, ); trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug { diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index ce35e6552ca83..97cf9b01410b1 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -43,7 +43,6 @@ use syntax::abi::Abi; use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute}; -use syntax::codemap::Spanned; use syntax_pos::Span; use hir::*; use hir::def::Def; @@ -336,6 +335,9 @@ pub trait Visitor<'v> : Sized { fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) { walk_variant(self, v, g, item_id) } + fn visit_label(&mut self, label: &'v Label) { + walk_label(self, label) + } fn visit_lifetime(&mut self, lifetime: &'v Lifetime) { walk_lifetime(self, lifetime) } @@ -370,18 +372,6 @@ pub trait Visitor<'v> : Sized { } } -pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option<Name>) { - if let Some(name) = opt_name { - visitor.visit_name(span, name); - } -} - -pub fn walk_opt_sp_name<'v, V: Visitor<'v>>(visitor: &mut V, opt_sp_name: &Option<Spanned<Name>>) { - if let Some(ref sp_name) = *opt_sp_name { - visitor.visit_name(sp_name.span, sp_name.node); - } -} - /// Walks the contents of a crate. See also `Crate::visit_all_items`. pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) { visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); @@ -420,6 +410,10 @@ pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) { walk_list!(visitor, visit_ty, &local.ty); } +pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) { + visitor.visit_name(label.span, label.name); +} + pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) { visitor.visit_id(lifetime.id); match lifetime.name { @@ -452,7 +446,9 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { match item.node { ItemExternCrate(opt_name) => { visitor.visit_id(item.id); - walk_opt_name(visitor, item.span, opt_name) + if let Some(name) = opt_name { + visitor.visit_name(item.span, name); + } } ItemUse(ref path, _) => { visitor.visit_id(item.id); @@ -993,14 +989,14 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(if_block); walk_list!(visitor, visit_expr, optional_else); } - ExprWhile(ref subexpression, ref block, ref opt_sp_name) => { + ExprWhile(ref subexpression, ref block, ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_expr(subexpression); visitor.visit_block(block); - walk_opt_sp_name(visitor, opt_sp_name); } - ExprLoop(ref block, ref opt_sp_name, _) => { + ExprLoop(ref block, ref opt_label, _) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_block(block); - walk_opt_sp_name(visitor, opt_sp_name); } ExprMatch(ref subexpression, ref arms, _) => { visitor.visit_expr(subexpression); @@ -1036,28 +1032,28 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { ExprPath(ref qpath) => { visitor.visit_qpath(qpath, expression.id, expression.span); } - ExprBreak(label, ref opt_expr) => { - label.ident.map(|ident| { - match label.target_id { + ExprBreak(ref destination, ref opt_expr) => { + if let Some(ref label) = destination.label { + visitor.visit_label(label); + match destination.target_id { ScopeTarget::Block(node_id) | ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => visitor.visit_def_mention(Def::Label(node_id)), ScopeTarget::Loop(LoopIdResult::Err(_)) => {}, }; - visitor.visit_name(ident.span, ident.node.name); - }); + } walk_list!(visitor, visit_expr, opt_expr); } - ExprAgain(label) => { - label.ident.map(|ident| { - match label.target_id { + ExprAgain(ref destination) => { + if let Some(ref label) = destination.label { + visitor.visit_label(label); + match destination.target_id { ScopeTarget::Block(_) => bug!("can't `continue` to a non-loop block"), ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => visitor.visit_def_mention(Def::Label(node_id)), ScopeTarget::Loop(LoopIdResult::Err(_)) => {}, }; - visitor.visit_name(ident.span, ident.node.name); - }); + } } ExprRet(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index e76f39a8fb100..f2f420c91dd1a 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -773,22 +773,22 @@ impl<'a> LoweringContext<'a> { *self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident)) } - fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> { - o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name)) + fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> { + label.map(|label| hir::Label { name: label.ident.name, span: label.span }) } - fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>) + fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination { match destination { - Some((id, label_ident)) => { + Some((id, label)) => { let target = if let Def::Label(loop_id) = self.expect_full_def(id) { hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id) } else { hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel) }; hir::Destination { - ident: Some(label_ident), + label: self.lower_label(Some(label)), target_id: hir::ScopeTarget::Loop(target), } }, @@ -798,7 +798,7 @@ impl<'a> LoweringContext<'a> { .map(|innermost_loop_id| *innermost_loop_id); hir::Destination { - ident: None, + label: None, target_id: hir::ScopeTarget::Loop( loop_id.map(|id| Ok(self.lower_node_id(id).node_id)) .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)) @@ -2751,17 +2751,17 @@ impl<'a> LoweringContext<'a> { hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt) } - ExprKind::While(ref cond, ref body, opt_ident) => { + ExprKind::While(ref cond, ref body, opt_label) => { self.with_loop_scope(e.id, |this| hir::ExprWhile( this.with_loop_condition_scope(|this| P(this.lower_expr(cond))), this.lower_block(body, false), - this.lower_opt_sp_ident(opt_ident))) + this.lower_label(opt_label))) } - ExprKind::Loop(ref body, opt_ident) => { + ExprKind::Loop(ref body, opt_label) => { self.with_loop_scope(e.id, |this| hir::ExprLoop(this.lower_block(body, false), - this.lower_opt_sp_ident(opt_ident), + this.lower_label(opt_label), hir::LoopSource::Loop)) } ExprKind::Catch(ref body) => { @@ -2837,8 +2837,8 @@ impl<'a> LoweringContext<'a> { (&None, &Some(..), Closed) => "RangeToInclusive", (&Some(..), &Some(..), Closed) => "RangeInclusive", (_, &None, Closed) => - panic!(self.diagnostic().span_fatal( - e.span, "inclusive range with no end")), + self.diagnostic().span_fatal( + e.span, "inclusive range with no end").raise(), }; let fields = @@ -2877,30 +2877,30 @@ impl<'a> LoweringContext<'a> { hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional, ImplTraitContext::Disallowed)) } - ExprKind::Break(opt_ident, ref opt_expr) => { - let label_result = if self.is_in_loop_condition && opt_ident.is_none() { + ExprKind::Break(opt_label, ref opt_expr) => { + let destination = if self.is_in_loop_condition && opt_label.is_none() { hir::Destination { - ident: opt_ident, + label: None, target_id: hir::ScopeTarget::Loop( Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()), } } else { - self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident))) + self.lower_loop_destination(opt_label.map(|label| (e.id, label))) }; hir::ExprBreak( - label_result, + destination, opt_expr.as_ref().map(|x| P(self.lower_expr(x)))) } - ExprKind::Continue(opt_ident) => + ExprKind::Continue(opt_label) => hir::ExprAgain( - if self.is_in_loop_condition && opt_ident.is_none() { + if self.is_in_loop_condition && opt_label.is_none() { hir::Destination { - ident: opt_ident, + label: None, target_id: hir::ScopeTarget::Loop(Err( hir::LoopIdError::UnlabeledCfInWhileCondition).into()), } } else { - self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident))) + self.lower_loop_destination(opt_label.map(|label| (e.id, label))) }), ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))), ExprKind::InlineAsm(ref asm) => { @@ -3000,7 +3000,7 @@ impl<'a> LoweringContext<'a> { // Desugar ExprWhileLet // From: `[opt_ident]: while let <pat> = <sub_expr> <body>` - ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { + ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_label) => { // to: // // [opt_ident]: loop { @@ -3041,7 +3041,7 @@ impl<'a> LoweringContext<'a> { // `[opt_ident]: loop { ... }` let loop_block = P(self.block_expr(P(match_expr))); - let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), + let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label), hir::LoopSource::WhileLet); // add attributes to the outer returned expr node loop_expr @@ -3049,7 +3049,7 @@ impl<'a> LoweringContext<'a> { // Desugar ExprForLoop // From: `[opt_ident]: for <pat> in <head> <body>` - ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { + ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => { // to: // // { @@ -3150,7 +3150,7 @@ impl<'a> LoweringContext<'a> { None)); // `[opt_ident]: loop { ... }` - let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), + let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label), hir::LoopSource::ForLoop); let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id); let loop_expr = P(hir::Expr { @@ -3270,7 +3270,7 @@ impl<'a> LoweringContext<'a> { e.span, hir::ExprBreak( hir::Destination { - ident: None, + label: None, target_id: hir::ScopeTarget::Block(catch_node), }, Some(from_err_expr) diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 8921fecf1b886..2854b9da1476f 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -34,7 +34,7 @@ use util::nodemap::{NodeMap, FxHashSet}; use syntax_pos::{Span, DUMMY_SP}; use syntax::codemap::{self, Spanned}; use syntax::abi::Abi; -use syntax::ast::{self, Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; +use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::ext::hygiene::SyntaxContext; use syntax::ptr::P; @@ -172,6 +172,18 @@ pub const DUMMY_HIR_ID: HirId = HirId { pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId(!0); +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] +pub struct Label { + pub name: Name, + pub span: Span, +} + +impl fmt::Debug for Label { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "label({:?})", self.name) + } +} + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -1276,11 +1288,11 @@ pub enum Expr_ { /// A while loop, with an optional label /// /// `'label: while expr { block }` - ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>), + ExprWhile(P<Expr>, P<Block>, Option<Label>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` - ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource), + ExprLoop(P<Block>, Option<Label>, LoopSource), /// A `match` block, with a source that indicates whether or not it is /// the result of a desugaring, and if so, which kind. ExprMatch(P<Expr>, HirVec<Arm>, MatchSource), @@ -1459,7 +1471,7 @@ impl ScopeTarget { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Destination { // This is `Some(_)` iff there is an explicit user-specified `label - pub ident: Option<Spanned<Ident>>, + pub label: Option<Label>, // These errors are caught and then reported during the diagnostics pass in // librustc_passes/loops.rs diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 4cfa7a470a4fa..30c1ad01d1401 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -1337,9 +1337,9 @@ impl<'a> State<'a> { hir::ExprIf(ref test, ref blk, ref elseopt) => { self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?; } - hir::ExprWhile(ref test, ref blk, opt_sp_name) => { - if let Some(sp_name) = opt_sp_name { - self.print_name(sp_name.node)?; + hir::ExprWhile(ref test, ref blk, opt_label) => { + if let Some(label) = opt_label { + self.print_name(label.name)?; self.word_space(":")?; } self.head("while")?; @@ -1347,9 +1347,9 @@ impl<'a> State<'a> { self.s.space()?; self.print_block(&blk)?; } - hir::ExprLoop(ref blk, opt_sp_name, _) => { - if let Some(sp_name) = opt_sp_name { - self.print_name(sp_name.node)?; + hir::ExprLoop(ref blk, opt_label, _) => { + if let Some(label) = opt_label { + self.print_name(label.name)?; self.word_space(":")?; } self.head("loop")?; @@ -1424,11 +1424,11 @@ impl<'a> State<'a> { hir::ExprPath(ref qpath) => { self.print_qpath(qpath, true)? } - hir::ExprBreak(label, ref opt_expr) => { + hir::ExprBreak(destination, ref opt_expr) => { self.s.word("break")?; self.s.space()?; - if let Some(label_ident) = label.ident { - self.print_name(label_ident.node.name)?; + if let Some(label) = destination.label { + self.print_name(label.name)?; self.s.space()?; } if let Some(ref expr) = *opt_expr { @@ -1436,11 +1436,11 @@ impl<'a> State<'a> { self.s.space()?; } } - hir::ExprAgain(label) => { + hir::ExprAgain(destination) => { self.s.word("continue")?; self.s.space()?; - if let Some(label_ident) = label.ident { - self.print_name(label_ident.node.name)?; + if let Some(label) = destination.label { + self.print_name(label.name)?; self.s.space()? } } diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 59d9db48bdc63..7dca96f94e655 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -148,6 +148,11 @@ impl_stable_hash_for!(enum hir::LifetimeName { Name(name) }); +impl_stable_hash_for!(struct hir::Label { + span, + name +}); + impl_stable_hash_for!(struct hir::Lifetime { id, span, @@ -619,7 +624,7 @@ impl_stable_hash_for!(enum hir::CaptureClause { impl_stable_hash_for_spanned!(usize); impl_stable_hash_for!(struct hir::Destination { - ident, + label, target_id }); diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 72740dd40be29..68d81a2dee352 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -82,7 +82,7 @@ pub type VarOrigins = IndexVec<RegionVid, RegionVariableOrigin>; /// Describes constraints between the region variables and other /// regions, as well as other conditions that must be verified, or /// assumptions that can be made. -#[derive(Default)] +#[derive(Debug, Default)] pub struct RegionConstraintData<'tcx> { /// Constraints of the form `A <= B`, where either `A` or `B` can /// be a region variable (or neither, as it happens). diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 935dfd75dd8b7..944d770516375 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -1018,7 +1018,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) { fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> { match ex.node { hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => { - Some((label.node, label.span)) + Some((label.name, label.span)) } _ => None, } diff --git a/src/librustc/mir/mono.rs b/src/librustc/mir/mono.rs index efdf4066815f4..49e5c0dc21f9e 100644 --- a/src/librustc/mir/mono.rs +++ b/src/librustc/mir/mono.rs @@ -10,7 +10,7 @@ use syntax::ast::NodeId; use syntax::symbol::InternedString; -use ty::Instance; +use ty::{Instance, TyCtxt}; use util::nodemap::FxHashMap; use rustc_data_structures::base_n; use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult, @@ -25,6 +25,21 @@ pub enum MonoItem<'tcx> { GlobalAsm(NodeId), } +impl<'tcx> MonoItem<'tcx> { + pub fn size_estimate<'a>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> usize { + match *self { + MonoItem::Fn(instance) => { + // Estimate the size of a function based on how many statements + // it contains. + tcx.instance_def_size_estimate(instance.def) + }, + // Conservatively estimate the size of a static declaration + // or assembly to be 1. + MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1, + } + } +} + impl<'tcx> HashStable<StableHashingContext<'tcx>> for MonoItem<'tcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'tcx>, @@ -52,6 +67,7 @@ pub struct CodegenUnit<'tcx> { /// as well as the crate name and disambiguator. name: InternedString, items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>, + size_estimate: Option<usize>, } #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -101,6 +117,7 @@ impl<'tcx> CodegenUnit<'tcx> { CodegenUnit { name: name, items: FxHashMap(), + size_estimate: None, } } @@ -131,6 +148,24 @@ impl<'tcx> CodegenUnit<'tcx> { let hash = hash & ((1u128 << 80) - 1); base_n::encode(hash, base_n::CASE_INSENSITIVE) } + + pub fn estimate_size<'a>(&mut self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) { + // Estimate the size of a codegen unit as (approximately) the number of MIR + // statements it corresponds to. + self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum()); + } + + pub fn size_estimate(&self) -> usize { + // Should only be called if `estimate_size` has previously been called. + self.size_estimate.expect("estimate_size must be called before getting a size_estimate") + } + + pub fn modify_size_estimate(&mut self, delta: usize) { + assert!(self.size_estimate.is_some()); + if let Some(size_estimate) = self.size_estimate { + self.size_estimate = Some(size_estimate + delta); + } + } } impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> { @@ -140,6 +175,8 @@ impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> { let CodegenUnit { ref items, name, + // The size estimate is not relevant to the hash + size_estimate: _, } = *self; name.hash_stable(hcx, hasher); diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index da119ba45694d..b9546143a054b 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -72,6 +72,26 @@ pub enum OptLevel { SizeMin, // -Oz } +#[derive(Clone, Copy, PartialEq, Hash)] +pub enum Lto { + /// Don't do any LTO whatsoever + No, + + /// Do a full crate graph LTO. The flavor is determined by the compiler + /// (currently the default is "fat"). + Yes, + + /// Do a full crate graph LTO with ThinLTO + Thin, + + /// Do a local graph LTO with ThinLTO (only relevant for multiple codegen + /// units). + ThinLocal, + + /// Do a full crate graph LTO with "fat" LTO + Fat, +} + #[derive(Clone, Copy, PartialEq, Hash)] pub enum DebugInfoLevel { NoDebugInfo, @@ -389,7 +409,7 @@ top_level_options!( // commands like `--emit llvm-ir` which they're often incompatible with // if we otherwise use the defaults of rustc. cli_forced_codegen_units: Option<usize> [UNTRACKED], - cli_forced_thinlto: Option<bool> [UNTRACKED], + cli_forced_thinlto_off: bool [UNTRACKED], } ); @@ -590,7 +610,7 @@ pub fn basic_options() -> Options { debug_assertions: true, actually_rustdoc: false, cli_forced_codegen_units: None, - cli_forced_thinlto: None, + cli_forced_thinlto_off: false, } } @@ -780,11 +800,13 @@ macro_rules! options { Some("crate=integer"); pub const parse_unpretty: Option<&'static str> = Some("`string` or `string=string`"); + pub const parse_lto: Option<&'static str> = + Some("one of `thin`, `fat`, or omitted"); } #[allow(dead_code)] mod $mod_set { - use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer}; + use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer, Lto}; use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel}; use std::path::PathBuf; @@ -978,6 +1000,16 @@ macro_rules! options { _ => false, } } + + fn parse_lto(slot: &mut Lto, v: Option<&str>) -> bool { + *slot = match v { + None => Lto::Yes, + Some("thin") => Lto::Thin, + Some("fat") => Lto::Fat, + Some(_) => return false, + }; + true + } } ) } @@ -994,7 +1026,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, "extra arguments to append to the linker invocation (space separated)"), link_dead_code: bool = (false, parse_bool, [UNTRACKED], "don't let linker strip dead code (turning it on can be used for code coverage)"), - lto: bool = (false, parse_bool, [TRACKED], + lto: Lto = (Lto::No, parse_lto, [TRACKED], "perform LLVM link-time optimizations"), target_cpu: Option<String> = (None, parse_opt_string, [TRACKED], "select target processor (rustc --print target-cpus for details)"), @@ -1135,6 +1167,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "treat all errors that occur as bugs"), external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED], "show macro backtraces even for non-local macros"), + teach: bool = (false, parse_bool, [TRACKED], + "show extended diagnostic help"), continue_parse_after_error: bool = (false, parse_bool, [TRACKED], "attempt to recover from parse errors (experimental)"), incremental: Option<String> = (None, parse_opt_string, [UNTRACKED], @@ -1333,7 +1367,7 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config { sp.struct_fatal(&format!("Error loading target specification: {}", e)) .help("Use `--print target-list` for a list of built-in targets") .emit(); - panic!(FatalError); + FatalError.raise(); } }; @@ -1341,8 +1375,8 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config { "16" => (ast::IntTy::I16, ast::UintTy::U16), "32" => (ast::IntTy::I32, ast::UintTy::U32), "64" => (ast::IntTy::I64, ast::UintTy::U64), - w => panic!(sp.fatal(&format!("target specification was invalid: \ - unrecognized target-pointer-width {}", w))), + w => sp.fatal(&format!("target specification was invalid: \ + unrecognized target-pointer-width {}", w)).raise(), }; Config { @@ -1632,8 +1666,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) let mut debugging_opts = build_debugging_options(matches, error_format); if !debugging_opts.unstable_options && error_format == ErrorOutputType::Json(true) { - early_error(ErrorOutputType::Json(false), - "--error-format=pretty-json is unstable"); + early_error(ErrorOutputType::Json(false), "--error-format=pretty-json is unstable"); } let mut output_types = BTreeMap::new(); @@ -1677,7 +1710,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) let mut cg = build_codegen_options(matches, error_format); let mut codegen_units = cg.codegen_units; - let mut thinlto = None; + let mut disable_thinlto = false; // Issue #30063: if user requests llvm-related output to one // particular path, disable codegen-units. @@ -1699,12 +1732,12 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) } early_warn(error_format, "resetting to default -C codegen-units=1"); codegen_units = Some(1); - thinlto = Some(false); + disable_thinlto = true; } } _ => { codegen_units = Some(1); - thinlto = Some(false); + disable_thinlto = true; } } } @@ -1734,7 +1767,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) (&None, &None) => None, }.map(|m| PathBuf::from(m)); - if cg.lto && incremental.is_some() { + if cg.lto != Lto::No && incremental.is_some() { early_error(error_format, "can't perform LTO when compiling incrementally"); } @@ -1934,7 +1967,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) debug_assertions, actually_rustdoc: false, cli_forced_codegen_units: codegen_units, - cli_forced_thinlto: thinlto, + cli_forced_thinlto_off: disable_thinlto, }, cfg) } @@ -2052,7 +2085,7 @@ mod dep_tracking { use std::hash::Hash; use std::path::PathBuf; use std::collections::hash_map::DefaultHasher; - use super::{Passes, CrateType, OptLevel, DebugInfoLevel, + use super::{Passes, CrateType, OptLevel, DebugInfoLevel, Lto, OutputTypes, Externs, ErrorOutputType, Sanitizer}; use syntax::feature_gate::UnstableFeatures; use rustc_back::{PanicStrategy, RelroLevel}; @@ -2107,6 +2140,7 @@ mod dep_tracking { impl_dep_tracking_hash_via_hash!(RelroLevel); impl_dep_tracking_hash_via_hash!(Passes); impl_dep_tracking_hash_via_hash!(OptLevel); + impl_dep_tracking_hash_via_hash!(Lto); impl_dep_tracking_hash_via_hash!(DebugInfoLevel); impl_dep_tracking_hash_via_hash!(UnstableFeatures); impl_dep_tracking_hash_via_hash!(Externs); @@ -2180,6 +2214,7 @@ mod tests { use lint; use middle::cstore; use session::config::{build_configuration, build_session_options_and_crate_config}; + use session::config::Lto; use session::build_session; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; @@ -2656,7 +2691,7 @@ mod tests { // Make sure changing a [TRACKED] option changes the hash opts = reference.clone(); - opts.cg.lto = true; + opts.cg.lto = Lto::Fat; assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); opts = reference.clone(); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 36f716a4a7694..2765239d5e649 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -250,7 +250,7 @@ impl Session { } pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { - panic!(self.diagnostic().span_fatal(sp, msg)) + self.diagnostic().span_fatal(sp, msg).raise() } pub fn span_fatal_with_code<S: Into<MultiSpan>>( &self, @@ -258,10 +258,10 @@ impl Session { msg: &str, code: DiagnosticId, ) -> ! { - panic!(self.diagnostic().span_fatal_with_code(sp, msg, code)) + self.diagnostic().span_fatal_with_code(sp, msg, code).raise() } pub fn fatal(&self, msg: &str) -> ! { - panic!(self.diagnostic().fatal(msg)) + self.diagnostic().fatal(msg).raise() } pub fn span_err_or_warn<S: Into<MultiSpan>>(&self, is_warning: bool, sp: S, msg: &str) { if is_warning { @@ -498,9 +498,65 @@ impl Session { self.use_mir() } - pub fn lto(&self) -> bool { - self.opts.cg.lto || self.target.target.options.requires_lto + /// Calculates the flavor of LTO to use for this compilation. + pub fn lto(&self) -> config::Lto { + // If our target has codegen requirements ignore the command line + if self.target.target.options.requires_lto { + return config::Lto::Fat + } + + // If the user specified something, return that. If they only said `-C + // lto` and we've for whatever reason forced off ThinLTO via the CLI, + // then ensure we can't use a ThinLTO. + match self.opts.cg.lto { + config::Lto::No => {} + config::Lto::Yes if self.opts.cli_forced_thinlto_off => { + return config::Lto::Fat + } + other => return other, + } + + // Ok at this point the target doesn't require anything and the user + // hasn't asked for anything. Our next decision is whether or not + // we enable "auto" ThinLTO where we use multiple codegen units and + // then do ThinLTO over those codegen units. The logic below will + // either return `No` or `ThinLocal`. + + // If processing command line options determined that we're incompatible + // with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option. + if self.opts.cli_forced_thinlto_off { + return config::Lto::No + } + + // If `-Z thinlto` specified process that, but note that this is mostly + // a deprecated option now that `-C lto=thin` exists. + if let Some(enabled) = self.opts.debugging_opts.thinlto { + if enabled { + return config::Lto::ThinLocal + } else { + return config::Lto::No + } + } + + // If there's only one codegen unit and LTO isn't enabled then there's + // no need for ThinLTO so just return false. + if self.codegen_units() == 1 { + return config::Lto::No + } + + // Right now ThinLTO isn't compatible with incremental compilation. + if self.opts.incremental.is_some() { + return config::Lto::No + } + + // Now we're in "defaults" territory. By default we enable ThinLTO for + // optimized compiles (anything greater than O0). + match self.opts.optimize { + config::OptLevel::No => config::Lto::No, + _ => config::Lto::ThinLocal, + } } + /// Returns the panic strategy for this compile session. If the user explicitly selected one /// using '-C panic', use that, otherwise use the panic strategy defined by the target. pub fn panic_strategy(&self) -> PanicStrategy { @@ -805,36 +861,8 @@ impl Session { 16 } - /// Returns whether ThinLTO is enabled for this compilation - pub fn thinlto(&self) -> bool { - // If processing command line options determined that we're incompatible - // with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option. - if let Some(enabled) = self.opts.cli_forced_thinlto { - return enabled - } - - // If explicitly specified, use that with the next highest priority - if let Some(enabled) = self.opts.debugging_opts.thinlto { - return enabled - } - - // If there's only one codegen unit and LTO isn't enabled then there's - // no need for ThinLTO so just return false. - if self.codegen_units() == 1 && !self.lto() { - return false - } - - // Right now ThinLTO isn't compatible with incremental compilation. - if self.opts.incremental.is_some() { - return false - } - - // Now we're in "defaults" territory. By default we enable ThinLTO for - // optimized compiles (anything greater than O0). - match self.opts.optimize { - config::OptLevel::No => false, - _ => true, - } + pub fn teach(&self, code: &DiagnosticId) -> bool { + self.opts.debugging_opts.teach && !self.parse_sess.span_diagnostic.code_emitted(code) } } @@ -919,7 +947,7 @@ pub fn build_session_(sopts: config::Options, let host = match Target::search(config::host_triple()) { Ok(t) => t, Err(e) => { - panic!(span_diagnostic.fatal(&format!("Error loading host specification: {}", e))); + span_diagnostic.fatal(&format!("Error loading host specification: {}", e)).raise(); } }; let target_cfg = config::build_target_config(&sopts, &span_diagnostic); @@ -945,7 +973,7 @@ pub fn build_session_(sopts: config::Options, let working_dir = match env::current_dir() { Ok(dir) => dir, Err(e) => { - panic!(p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e))) + p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e)).raise() } }; let working_dir = file_path_mapping.map_prefix(working_dir); @@ -1076,7 +1104,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { }; let handler = errors::Handler::with_emitter(true, false, emitter); handler.emit(&MultiSpan::new(), msg, errors::Level::Fatal); - panic!(errors::FatalError); + errors::FatalError.raise(); } pub fn early_warn(output: config::ErrorOutputType, msg: &str) { diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 067340ecacc00..42200a3a44728 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -794,48 +794,56 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } fn get_fn_like_arguments(&self, node: hir::map::Node) -> (Span, Vec<ArgKind>) { - if let hir::map::NodeExpr(&hir::Expr { - node: hir::ExprClosure(_, ref _decl, id, span, _), - .. - }) = node { - (self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter() - .map(|arg| { - if let hir::Pat { - node: hir::PatKind::Tuple(args, _), - span, - .. - } = arg.pat.clone().into_inner() { - ArgKind::Tuple( + match node { + hir::map::NodeExpr(&hir::Expr { + node: hir::ExprClosure(_, ref _decl, id, span, _), + .. + }) => { + (self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter() + .map(|arg| { + if let hir::Pat { + node: hir::PatKind::Tuple(args, _), span, - args.iter().map(|pat| { - let snippet = self.tcx.sess.codemap() - .span_to_snippet(pat.span).unwrap(); - (snippet, "_".to_owned()) - }).collect::<Vec<_>>(), - ) - } else { - let name = self.tcx.sess.codemap().span_to_snippet(arg.pat.span).unwrap(); - ArgKind::Arg(name, "_".to_owned()) - } - }) - .collect::<Vec<ArgKind>>()) - } else if let hir::map::NodeItem(&hir::Item { - span, - node: hir::ItemFn(ref decl, ..), - .. - }) = node { - (self.tcx.sess.codemap().def_span(span), decl.inputs.iter() - .map(|arg| match arg.clone().into_inner().node { - hir::TyTup(ref tys) => ArgKind::Tuple( - arg.span, - tys.iter() - .map(|_| ("_".to_owned(), "_".to_owned())) - .collect::<Vec<_>>(), - ), - _ => ArgKind::Arg("_".to_owned(), "_".to_owned()) - }).collect::<Vec<ArgKind>>()) - } else { - panic!("non-FnLike node found: {:?}", node); + .. + } = arg.pat.clone().into_inner() { + ArgKind::Tuple( + span, + args.iter().map(|pat| { + let snippet = self.tcx.sess.codemap() + .span_to_snippet(pat.span).unwrap(); + (snippet, "_".to_owned()) + }).collect::<Vec<_>>(), + ) + } else { + let name = self.tcx.sess.codemap() + .span_to_snippet(arg.pat.span).unwrap(); + ArgKind::Arg(name, "_".to_owned()) + } + }) + .collect::<Vec<ArgKind>>()) + } + hir::map::NodeItem(&hir::Item { + span, + node: hir::ItemFn(ref decl, ..), + .. + }) | + hir::map::NodeImplItem(&hir::ImplItem { + span, + node: hir::ImplItemKind::Method(hir::MethodSig { ref decl, .. }, _), + .. + }) => { + (self.tcx.sess.codemap().def_span(span), decl.inputs.iter() + .map(|arg| match arg.clone().into_inner().node { + hir::TyTup(ref tys) => ArgKind::Tuple( + arg.span, + tys.iter() + .map(|_| ("_".to_owned(), "_".to_owned())) + .collect::<Vec<_>>(), + ), + _ => ArgKind::Arg("_".to_owned(), "_".to_owned()) + }).collect::<Vec<ArgKind>>()) + } + _ => panic!("non-FnLike node found: {:?}", node), } } diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index 3342d13dd6e5f..ae539f07336d5 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -101,7 +101,7 @@ pub struct MismatchedProjectionTypes<'tcx> { pub err: ty::error::TypeError<'tcx> } -#[derive(PartialEq, Eq, Debug)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)] enum ProjectionTyCandidate<'tcx> { // from a where-clause in the env or object type ParamEnv(ty::PolyProjectionPredicate<'tcx>), @@ -293,9 +293,23 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, Reveal::UserFacing => ty, Reveal::All => { + let recursion_limit = self.tcx().sess.recursion_limit.get(); + if self.depth >= recursion_limit { + let obligation = Obligation::with_depth( + self.cause.clone(), + recursion_limit, + self.param_env, + ty, + ); + self.selcx.infcx().report_overflow_error(&obligation, true); + } + let generic_ty = self.tcx().type_of(def_id); let concrete_ty = generic_ty.subst(self.tcx(), substs); - self.fold_ty(concrete_ty) + self.depth += 1; + let folded_ty = self.fold_ty(concrete_ty); + self.depth -= 1; + folded_ty } } } @@ -824,21 +838,12 @@ fn project_type<'cx, 'gcx, 'tcx>( // Drop duplicates. // // Note: `candidates.vec` seems to be on the critical path of the - // compiler. Replacing it with an hash set was also tried, which would - // render the following dedup unnecessary. It led to cleaner code but - // prolonged compiling time of `librustc` from 5m30s to 6m in one test, or - // ~9% performance lost. - if candidates.vec.len() > 1 { - let mut i = 0; - while i < candidates.vec.len() { - let has_dup = (0..i).any(|j| candidates.vec[i] == candidates.vec[j]); - if has_dup { - candidates.vec.swap_remove(i); - } else { - i += 1; - } - } - } + // compiler. Replacing it with an HashSet was also tried, which would + // render the following dedup unnecessary. The original comment indicated + // that it was 9% slower, but that data is now obsolete and a new + // benchmark should be performed. + candidates.vec.sort_unstable(); + candidates.vec.dedup(); // Prefer where-clauses. As in select, if there are multiple // candidates, we prefer where-clause candidates over impls. This diff --git a/src/librustc/ty/inhabitedness/mod.rs b/src/librustc/ty/inhabitedness/mod.rs index 0072512464a0e..93e4cd9adf888 100644 --- a/src/librustc/ty/inhabitedness/mod.rs +++ b/src/librustc/ty/inhabitedness/mod.rs @@ -262,10 +262,11 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { })) }, TyArray(ty, len) => { - if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) { - DefIdForest::empty() - } else { - ty.uninhabited_from(visited, tcx) + match len.val.to_const_int().and_then(|i| i.to_u64()) { + // If the array is definitely non-empty, it's uninhabited if + // the type of its elements is uninhabited. + Some(n) if n != 0 => ty.uninhabited_from(visited, tcx), + _ => DefIdForest::empty() } } TyRef(_, ref tm) => { diff --git a/src/librustc/ty/maps/config.rs b/src/librustc/ty/maps/config.rs index 8dedcb24c2fb6..eb07876b05f26 100644 --- a/src/librustc/ty/maps/config.rs +++ b/src/librustc/ty/maps/config.rs @@ -617,8 +617,8 @@ impl<'tcx> QueryDescription<'tcx> for queries::optimized_mir<'tcx> { } fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: SerializedDepNodeIndex) - -> Option<Self::Value> { + id: SerializedDepNodeIndex) + -> Option<Self::Value> { let mir: Option<::mir::Mir<'tcx>> = tcx.on_disk_query_result_cache .try_load_query_result(tcx, id); mir.map(|x| tcx.alloc_mir(x)) @@ -637,6 +637,27 @@ impl<'tcx> QueryDescription<'tcx> for queries::target_features_whitelist<'tcx> { } } +impl<'tcx> QueryDescription<'tcx> for queries::instance_def_size_estimate<'tcx> { + fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String { + format!("estimating size for `{}`", tcx.item_path_str(def.def_id())) + } +} + +impl<'tcx> QueryDescription<'tcx> for queries::generics_of<'tcx> { + #[inline] + fn cache_on_disk(def_id: Self::Key) -> bool { + def_id.is_local() + } + + fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: SerializedDepNodeIndex) + -> Option<Self::Value> { + let generics: Option<ty::Generics> = tcx.on_disk_query_result_cache + .try_load_query_result(tcx, id); + generics.map(|x| tcx.alloc_generics(x)) + } +} + macro_rules! impl_disk_cacheable_query( ($query_name:ident, |$key:tt| $cond:expr) => { impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> { @@ -662,3 +683,6 @@ impl_disk_cacheable_query!(mir_const_qualif, |def_id| def_id.is_local()); impl_disk_cacheable_query!(check_match, |def_id| def_id.is_local()); impl_disk_cacheable_query!(contains_extern_indicator, |_| true); impl_disk_cacheable_query!(def_symbol_name, |_| true); +impl_disk_cacheable_query!(type_of, |def_id| def_id.is_local()); +impl_disk_cacheable_query!(predicates_of, |def_id| def_id.is_local()); +impl_disk_cacheable_query!(used_trait_imports, |def_id| def_id.is_local()); diff --git a/src/librustc/ty/maps/mod.rs b/src/librustc/ty/maps/mod.rs index e7e92b8a4288f..6c79f6a62fa0b 100644 --- a/src/librustc/ty/maps/mod.rs +++ b/src/librustc/ty/maps/mod.rs @@ -365,6 +365,9 @@ define_maps! { <'tcx> target_features_whitelist_node(CrateNum) -> Rc<FxHashSet<String>>, [] fn target_features_enabled: TargetFeaturesEnabled(DefId) -> Rc<Vec<String>>, + // Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning. + [] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>) + -> usize, } ////////////////////////////////////////////////////////////////////// @@ -514,3 +517,10 @@ fn substitute_normalize_and_test_predicates_node<'tcx>(key: (DefId, &'tcx Substs fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { DepConstructor::TargetFeaturesWhitelist } + +fn instance_def_size_estimate_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>) + -> DepConstructor<'tcx> { + DepConstructor::InstanceDefSizeEstimate { + instance_def + } +} diff --git a/src/librustc/ty/maps/on_disk_cache.rs b/src/librustc/ty/maps/on_disk_cache.rs index 4e2421dad2161..56ed0f9106f30 100644 --- a/src/librustc/ty/maps/on_disk_cache.rs +++ b/src/librustc/ty/maps/on_disk_cache.rs @@ -204,8 +204,12 @@ impl<'sess> OnDiskCache<'sess> { let enc = &mut encoder; let qri = &mut query_result_index; - // Encode TypeckTables + encode_query_results::<type_of, _>(tcx, enc, qri)?; + encode_query_results::<generics_of, _>(tcx, enc, qri)?; + encode_query_results::<predicates_of, _>(tcx, enc, qri)?; + encode_query_results::<used_trait_imports, _>(tcx, enc, qri)?; encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?; + encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?; encode_query_results::<optimized_mir, _>(tcx, enc, qri)?; encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?; encode_query_results::<borrowck, _>(tcx, enc, qri)?; @@ -215,7 +219,6 @@ impl<'sess> OnDiskCache<'sess> { encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?; encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?; encode_query_results::<symbol_name, _>(tcx, enc, qri)?; - encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?; encode_query_results::<check_match, _>(tcx, enc, qri)?; } diff --git a/src/librustc/ty/maps/plumbing.rs b/src/librustc/ty/maps/plumbing.rs index d670ecc2691ae..c9eebc3d2a0a7 100644 --- a/src/librustc/ty/maps/plumbing.rs +++ b/src/librustc/ty/maps/plumbing.rs @@ -761,6 +761,7 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, DepKind::EraseRegionsTy | DepKind::NormalizeTy | DepKind::SubstituteNormalizeAndTestPredicates | + DepKind::InstanceDefSizeEstimate | // This one should never occur in this context DepKind::Null => { @@ -982,4 +983,8 @@ impl_load_from_cache!( ConstIsRvaluePromotableToStatic => const_is_rvalue_promotable_to_static, ContainsExternIndicator => contains_extern_indicator, CheckMatch => check_match, + TypeOfItem => type_of, + GenericsOfItem => generics_of, + PredicatesOfItem => predicates_of, + UsedTraitImports => used_trait_imports, ); diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index b3acfb7fac8a3..63df1179af228 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -17,7 +17,7 @@ pub use self::fold::TypeFoldable; use hir::{map as hir_map, FreevarMap, TraitMap}; use hir::def::{Def, CtorKind, ExportMap}; -use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::map::DefPathData; use hir::svh::Svh; use ich::Fingerprint; @@ -39,8 +39,8 @@ use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet}; use serialize::{self, Encodable, Encoder}; use std::cell::RefCell; -use std::collections::BTreeMap; use std::cmp; +use std::cmp::Ordering; use std::fmt; use std::hash::{Hash, Hasher}; use std::iter::FromIterator; @@ -499,6 +499,20 @@ impl<'tcx> Hash for TyS<'tcx> { } } +impl<'tcx> Ord for TyS<'tcx> { + #[inline] + fn cmp(&self, other: &TyS<'tcx>) -> Ordering { + // (self as *const _).cmp(other as *const _) + (self as *const TyS<'tcx>).cmp(&(other as *const TyS<'tcx>)) + } +} +impl<'tcx> PartialOrd for TyS<'tcx> { + #[inline] + fn partial_cmp(&self, other: &TyS<'tcx>) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + impl<'tcx> TyS<'tcx> { pub fn is_primitive_ty(&self) -> bool { match self.sty { @@ -568,6 +582,19 @@ impl<T> PartialEq for Slice<T> { } impl<T> Eq for Slice<T> {} +impl<T> Ord for Slice<T> { + #[inline] + fn cmp(&self, other: &Slice<T>) -> Ordering { + (&self.0 as *const [T]).cmp(&(&other.0 as *const [T])) + } +} +impl<T> PartialOrd for Slice<T> { + #[inline] + fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + impl<T> Hash for Slice<T> { fn hash<H: Hasher>(&self, s: &mut H) { (self.as_ptr(), self.len()).hash(s) @@ -758,9 +785,8 @@ pub struct Generics { pub regions: Vec<RegionParameterDef>, pub types: Vec<TypeParameterDef>, - /// Reverse map to each `TypeParameterDef`'s `index` field, from - /// `def_id.index` (`def_id.krate` is the same as the item's). - pub type_param_to_index: BTreeMap<DefIndex, u32>, + /// Reverse map to each `TypeParameterDef`'s `index` field + pub type_param_to_index: FxHashMap<DefId, u32>, pub has_self: bool, pub has_late_bound_regions: Option<Span>, @@ -1103,7 +1129,7 @@ pub type PolySubtypePredicate<'tcx> = ty::Binder<SubtypePredicate<'tcx>>; /// equality between arbitrary types. Processing an instance of /// Form #2 eventually yields one of these `ProjectionPredicate` /// instances to normalize the LHS. -#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] pub struct ProjectionPredicate<'tcx> { pub projection_ty: ProjectionTy<'tcx>, pub ty: Ty<'tcx>, @@ -2695,6 +2721,20 @@ fn crate_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx.hir.crate_hash } +fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + instance_def: InstanceDef<'tcx>) + -> usize { + match instance_def { + InstanceDef::Item(..) | + InstanceDef::DropGlue(..) => { + let mir = tcx.instance_mir(instance_def); + mir.basic_blocks().iter().map(|bb| bb.statements.len()).sum() + }, + // Estimate the size of other compiler-generated shims to be 1. + _ => 1 + } +} + pub fn provide(providers: &mut ty::maps::Providers) { context::provide(providers); erase_regions::provide(providers); @@ -2712,6 +2752,7 @@ pub fn provide(providers: &mut ty::maps::Providers) { original_crate_name, crate_hash, trait_impls_of: trait_def::trait_impls_of_provider, + instance_def_size_estimate, ..*providers }; } diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index b6ba7896497b4..db7e4fe45ef76 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -638,7 +638,7 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { /// erase, or otherwise "discharge" these bound regions, we change the /// type from `Binder<T>` to just `T` (see /// e.g. `liberate_late_bound_regions`). -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct Binder<T>(pub T); impl<T> Binder<T> { @@ -738,7 +738,7 @@ impl<T> Binder<T> { /// Represents the projection of an associated type. In explicit UFCS /// form this would be written `<T as Trait<..>>::N`. -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct ProjectionTy<'tcx> { /// The parameters of the associated item. pub substs: &'tcx Substs<'tcx>, diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index 80b113dfdf5a5..7c167f69ebd8c 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -29,7 +29,7 @@ use std::mem; /// To reduce memory usage, a `Kind` is a interned pointer, /// with the lowest 2 bits being reserved for a tag to /// indicate the type (`Ty` or `Region`) it points to. -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Kind<'tcx> { ptr: NonZero<usize>, marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)> diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index b65b18d0caa8c..2e860f940a7a7 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -320,8 +320,8 @@ pub struct TargetOptions { /// Relocation model to use in object file. Corresponds to `llc /// -relocation-model=$relocation_model`. Defaults to "pic". pub relocation_model: String, - /// Code model to use. Corresponds to `llc -code-model=$code_model`. Defaults to "default". - pub code_model: String, + /// Code model to use. Corresponds to `llc -code-model=$code_model`. + pub code_model: Option<String>, /// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec" /// and "local-exec". This is similar to the -ftls-model option in GCC/Clang. pub tls_model: String, @@ -483,7 +483,7 @@ impl Default for TargetOptions { only_cdylib: false, executables: false, relocation_model: "pic".to_string(), - code_model: "default".to_string(), + code_model: None, tls_model: "global-dynamic".to_string(), disable_redzone: false, eliminate_frame_pointer: true, @@ -736,7 +736,7 @@ impl Target { key!(only_cdylib, bool); key!(executables, bool); key!(relocation_model); - key!(code_model); + key!(code_model, optional); key!(tls_model); key!(disable_redzone, bool); key!(eliminate_frame_pointer, bool); diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index e737328b75ea6..cdb50a0ae4850 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -87,11 +87,11 @@ use std::env; use std::ffi::OsString; use std::io::{self, Read, Write}; use std::iter::repeat; +use std::panic; use std::path::PathBuf; use std::process::{self, Command, Stdio}; use std::rc::Rc; use std::str; -use std::sync::{Arc, Mutex}; use std::thread; use syntax::ast; @@ -168,7 +168,7 @@ pub fn run<F>(run_compiler: F) -> isize handler.emit(&MultiSpan::new(), "aborting due to previous error(s)", errors::Level::Fatal); - exit_on_err(); + panic::resume_unwind(Box::new(errors::FatalErrorMarker)); } } } @@ -1228,27 +1228,16 @@ pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<Any + Send>> /// The diagnostic emitter yielded to the procedure should be used for reporting /// errors of the compiler. pub fn monitor<F: FnOnce() + Send + 'static>(f: F) { - struct Sink(Arc<Mutex<Vec<u8>>>); - impl Write for Sink { - fn write(&mut self, data: &[u8]) -> io::Result<usize> { - Write::write(&mut *self.0.lock().unwrap(), data) - } - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } - } - - let data = Arc::new(Mutex::new(Vec::new())); - let err = Sink(data.clone()); - let result = in_rustc_thread(move || { - io::set_panic(Some(box err)); f() }); if let Err(value) = result { // Thread panicked without emitting a fatal diagnostic - if !value.is::<errors::FatalError>() { + if !value.is::<errors::FatalErrorMarker>() { + // Emit a newline + eprintln!(""); + let emitter = Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, None, @@ -1273,22 +1262,12 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) { ¬e, errors::Level::Note); } - - eprintln!("{}", str::from_utf8(&data.lock().unwrap()).unwrap()); } - exit_on_err(); + panic::resume_unwind(Box::new(errors::FatalErrorMarker)); } } -fn exit_on_err() -> ! { - // Panic so the process returns a failure code, but don't pollute the - // output with some unnecessary panic messages, we've already - // printed everything that we needed to. - io::set_panic(Some(box io::sink())); - panic!(); -} - #[cfg(stage0)] pub fn diagnostics_registry() -> errors::registry::Registry { use errors::registry::Registry; diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index 8da4321fa5b71..2e654fe9929a6 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -27,7 +27,7 @@ pub struct Diagnostic { pub suggestions: Vec<CodeSuggestion>, } -#[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum DiagnosticId { Error(String), Lint(String), @@ -281,6 +281,10 @@ impl Diagnostic { self } + pub fn get_code(&self) -> Option<DiagnosticId> { + self.code.clone() + } + pub fn message(&self) -> String { self.message.iter().map(|i| i.0.to_owned()).collect::<String>() } diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 1fb673815eea3..3d50c95d3f4f9 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -19,6 +19,7 @@ #![cfg_attr(unix, feature(libc))] #![feature(conservative_impl_trait)] #![feature(i128_type)] +#![feature(optin_builtin_traits)] extern crate term; #[cfg(unix)] @@ -44,6 +45,7 @@ use std::rc::Rc; use std::{error, fmt}; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::SeqCst; +use std::panic; mod diagnostic; mod diagnostic_builder; @@ -201,6 +203,18 @@ impl CodeSuggestion { #[must_use] pub struct FatalError; +pub struct FatalErrorMarker; + +// Don't implement Send on FatalError. This makes it impossible to panic!(FatalError). +// We don't want to invoke the panic handler and print a backtrace for fatal errors. +impl !Send for FatalError {} + +impl FatalError { + pub fn raise(self) -> ! { + panic::resume_unwind(Box::new(FatalErrorMarker)) + } +} + impl fmt::Display for FatalError { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "parser fatal error") @@ -245,6 +259,11 @@ pub struct Handler { delayed_span_bug: RefCell<Option<Diagnostic>>, tracked_diagnostics: RefCell<Option<Vec<Diagnostic>>>, + // This set contains the `DiagnosticId` of all emitted diagnostics to avoid + // emitting the same diagnostic with extended help (`--teach`) twice, which + // would be uneccessary repetition. + tracked_diagnostic_codes: RefCell<FxHashSet<DiagnosticId>>, + // This set contains a hash of every diagnostic that has been emitted by // this handler. These hashes is used to avoid emitting the same error // twice. @@ -303,6 +322,7 @@ impl Handler { continue_after_error: Cell::new(true), delayed_span_bug: RefCell::new(None), tracked_diagnostics: RefCell::new(None), + tracked_diagnostic_codes: RefCell::new(FxHashSet()), emitted_diagnostics: RefCell::new(FxHashSet()), } } @@ -539,7 +559,7 @@ impl Handler { } } - panic!(self.fatal(&s)); + self.fatal(&s).raise(); } pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) { if lvl == Warning && !self.flags.can_emit_warnings { @@ -575,6 +595,14 @@ impl Handler { (ret, diagnostics) } + /// `true` if a diagnostic with this code has already been emitted in this handler. + /// + /// Used to suppress emitting the same error multiple times with extended explanation when + /// calling `-Zteach`. + pub fn code_emitted(&self, code: &DiagnosticId) -> bool { + self.tracked_diagnostic_codes.borrow().contains(code) + } + fn emit_db(&self, db: &DiagnosticBuilder) { let diagnostic = &**db; @@ -582,6 +610,10 @@ impl Handler { list.push(diagnostic.clone()); } + if let Some(ref code) = diagnostic.code { + self.tracked_diagnostic_codes.borrow_mut().insert(code.clone()); + } + let diagnostic_hash = { use std::hash::Hash; let mut hasher = StableHasher::new(); diff --git a/src/librustc_llvm/ffi.rs b/src/librustc_llvm/ffi.rs index f51e51a88b10f..99e43a2ddf98d 100644 --- a/src/librustc_llvm/ffi.rs +++ b/src/librustc_llvm/ffi.rs @@ -299,12 +299,11 @@ pub enum RelocMode { #[repr(C)] pub enum CodeModel { Other, - Default, - JITDefault, Small, Kernel, Medium, Large, + None, } /// LLVMRustDiagnosticKind @@ -331,7 +330,6 @@ pub enum DiagnosticKind { pub enum ArchiveKind { Other, K_GNU, - K_MIPS64, K_BSD, K_COFF, } @@ -498,6 +496,10 @@ pub mod debuginfo { const FlagStaticMember = (1 << 12); const FlagLValueReference = (1 << 13); const FlagRValueReference = (1 << 14); + const FlagExternalTypeRef = (1 << 15); + const FlagIntroducedVirtual = (1 << 18); + const FlagBitField = (1 << 19); + const FlagNoReturn = (1 << 20); const FlagMainSubprogram = (1 << 21); } } diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index c75a026a0f8b9..8dcf7444dd18f 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -105,7 +105,6 @@ impl FromStr for ArchiveKind { fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "gnu" => Ok(ArchiveKind::K_GNU), - "mips64" => Ok(ArchiveKind::K_MIPS64), "bsd" => Ok(ArchiveKind::K_BSD), "coff" => Ok(ArchiveKind::K_COFF), _ => Err(()), diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs index 901b73c610e3b..9dcd4435580ab 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs @@ -681,6 +681,8 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { let data = self.infcx.take_and_reset_region_constraints(); if !data.is_empty() { + debug!("fully_perform_op: constraints generated at {:?} are {:#?}", + locations, data); self.constraints .outlives_sets .push(OutlivesSet { locations, data }); @@ -1539,6 +1541,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { where T: fmt::Debug + TypeFoldable<'tcx>, { + debug!("normalize(value={:?}, location={:?})", value, location); self.fully_perform_op(location.at_self(), |this| { let mut selcx = traits::SelectionContext::new(this.infcx); let cause = this.misc(this.last_span); diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index e899cc072e072..806d787c84522 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -115,6 +115,7 @@ use syntax::ast::NodeId; use syntax::symbol::{Symbol, InternedString}; use rustc::mir::mono::MonoItem; use monomorphize::item::{MonoItemExt, InstantiationMode}; +use core::usize; pub use rustc::mir::mono::CodegenUnit; @@ -224,6 +225,8 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut initial_partitioning = place_root_translation_items(tcx, trans_items); + initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx)); + debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter()); // If the partitioning should produce a fixed count of codegen units, merge @@ -241,6 +244,8 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut post_inlining = place_inlined_translation_items(initial_partitioning, inlining_map); + post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx)); + debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter()); // Next we try to make as many symbols "internal" as possible, so LLVM has @@ -422,14 +427,13 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning< codegen_units.sort_by_key(|cgu| cgu.name().clone()); // Merge the two smallest codegen units until the target size is reached. - // Note that "size" is estimated here rather inaccurately as the number of - // translation items in a given unit. This could be improved on. while codegen_units.len() > target_cgu_count { // Sort small cgus to the back - codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64)); + codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate()); let mut smallest = codegen_units.pop().unwrap(); let second_smallest = codegen_units.last_mut().unwrap(); + second_smallest.modify_size_estimate(smallest.size_estimate()); for (k, v) in smallest.items_mut().drain() { second_smallest.items_mut().insert(k, v); } diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 3f49128d2e823..6971033c8994b 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -141,14 +141,6 @@ impl<'a> AstValidator<'a> { impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_expr(&mut self, expr: &'a Expr) { match expr.node { - ExprKind::While(.., Some(ident)) | - ExprKind::Loop(_, Some(ident)) | - ExprKind::WhileLet(.., Some(ident)) | - ExprKind::ForLoop(.., Some(ident)) | - ExprKind::Break(Some(ident), _) | - ExprKind::Continue(Some(ident)) => { - self.check_label(ident.node, ident.span); - } ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => { span_err!(self.session, expr.span, E0472, "asm! is unsupported on this target"); } @@ -211,6 +203,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> { visit::walk_use_tree(self, use_tree, id); } + fn visit_label(&mut self, label: &'a Label) { + self.check_label(label.ident, label.span); + visit::walk_label(self, label); + } + fn visit_lifetime(&mut self, lifetime: &'a Lifetime) { self.check_lifetime(lifetime); visit::walk_lifetime(self, lifetime); diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 55c7e5f392416..557ff887a3ef2 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -55,7 +55,7 @@ use syntax::attr; use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind}; use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, GenericParam, Generics}; use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind}; -use syntax::ast::{Local, Mutability, Pat, PatKind, Path}; +use syntax::ast::{Label, Local, Mutability, Pat, PatKind, Path}; use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind}; use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue}; use syntax::parse::token; @@ -2045,7 +2045,7 @@ impl<'a> Resolver<'a> { segments: vec![], span: use_tree.span, }; - self.resolve_use_tree(item, use_tree, &path); + self.resolve_use_tree(item.id, use_tree, &path); } ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_) => { @@ -2056,7 +2056,7 @@ impl<'a> Resolver<'a> { } } - fn resolve_use_tree(&mut self, item: &Item, use_tree: &ast::UseTree, prefix: &Path) { + fn resolve_use_tree(&mut self, id: NodeId, use_tree: &ast::UseTree, prefix: &Path) { match use_tree.kind { ast::UseTreeKind::Nested(ref items) => { let path = Path { @@ -2070,10 +2070,10 @@ impl<'a> Resolver<'a> { if items.len() == 0 { // Resolve prefix of an import with empty braces (issue #28388). - self.smart_resolve_path(item.id, None, &path, PathSource::ImportPrefix); + self.smart_resolve_path(id, None, &path, PathSource::ImportPrefix); } else { - for &(ref tree, _) in items { - self.resolve_use_tree(item, tree, &path); + for &(ref tree, nested_id) in items { + self.resolve_use_tree(nested_id, tree, &path); } } } @@ -3415,13 +3415,13 @@ impl<'a> Resolver<'a> { } } - fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F) + fn with_resolved_label<F>(&mut self, label: Option<Label>, id: NodeId, f: F) where F: FnOnce(&mut Resolver) { if let Some(label) = label { let def = Def::Label(id); self.with_label_rib(|this| { - this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def); + this.label_ribs.last_mut().unwrap().bindings.insert(label.ident, def); f(this); }); } else { @@ -3429,7 +3429,7 @@ impl<'a> Resolver<'a> { } } - fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) { + fn resolve_labeled_block(&mut self, label: Option<Label>, id: NodeId, block: &Block) { self.with_resolved_label(label, id, |this| this.visit_block(block)); } @@ -3452,19 +3452,19 @@ impl<'a> Resolver<'a> { } ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => { - match self.search_label(label.node, |rib, id| rib.bindings.get(&id).cloned()) { + match self.search_label(label.ident, |rib, id| rib.bindings.get(&id).cloned()) { None => { // Search again for close matches... // Picks the first label that is "close enough", which is not necessarily // the closest match - let close_match = self.search_label(label.node, |rib, ident| { + let close_match = self.search_label(label.ident, |rib, ident| { let names = rib.bindings.iter().map(|(id, _)| &id.name); find_best_match_for_name(names, &*ident.name.as_str(), None) }); self.record_def(expr.id, err_path_resolution()); resolve_error(self, label.span, - ResolutionError::UndeclaredLabel(&label.node.name.as_str(), + ResolutionError::UndeclaredLabel(&label.ident.name.as_str(), close_match)); } Some(def @ Def::Label(_)) => { diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 5079ce7752354..9cabd9356e9bf 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -871,6 +871,31 @@ impl<'a, 'tcx> FnType<'tcx> { match arg.layout.abi { layout::Abi::Aggregate { .. } => {} + + // This is a fun case! The gist of what this is doing is + // that we want callers and callees to always agree on the + // ABI of how they pass SIMD arguments. If we were to *not* + // make these arguments indirect then they'd be immediates + // in LLVM, which means that they'd used whatever the + // appropriate ABI is for the callee and the caller. That + // means, for example, if the caller doesn't have AVX + // enabled but the callee does, then passing an AVX argument + // across this boundary would cause corrupt data to show up. + // + // This problem is fixed by unconditionally passing SIMD + // arguments through memory between callers and callees + // which should get them all to agree on ABI regardless of + // target feature sets. Some more information about this + // issue can be found in #44367. + // + // Note that the platform intrinsic ABI is exempt here as + // that's how we connect up to LLVM and it's unstable + // anyway, we control all calls to it in libstd. + layout::Abi::Vector { .. } if abi != Abi::PlatformIntrinsic => { + arg.make_indirect(); + return + } + _ => return } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 923e5549927de..f050edcd513b9 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -16,7 +16,7 @@ use super::rpath::RPathConfig; use super::rpath; use metadata::METADATA_FILENAME; use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest}; -use rustc::session::config::RUST_CGU_EXT; +use rustc::session::config::{RUST_CGU_EXT, Lto}; use rustc::session::filesearch; use rustc::session::search_paths::PathKind; use rustc::session::Session; @@ -503,7 +503,8 @@ fn link_staticlib(sess: &Session, }); ab.add_rlib(path, &name.as_str(), - sess.lto() && !ignored_for_lto(sess, &trans.crate_info, cnum), + is_full_lto_enabled(sess) && + !ignored_for_lto(sess, &trans.crate_info, cnum), skip_object_files).unwrap(); all_native_libs.extend(trans.crate_info.native_libraries[&cnum].iter().cloned()); @@ -1211,7 +1212,8 @@ fn add_upstream_rust_crates(cmd: &mut Linker, lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) }); - if (!sess.lto() || ignored_for_lto(sess, &trans.crate_info, cnum)) && + if (!is_full_lto_enabled(sess) || + ignored_for_lto(sess, &trans.crate_info, cnum)) && crate_type != config::CrateTypeDylib && !skip_native { cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath)); @@ -1264,7 +1266,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, // file, then we don't need the object file as it's part of the // LTO module. Note that `#![no_builtins]` is excluded from LTO, // though, so we let that object file slide. - let skip_because_lto = sess.lto() && + let skip_because_lto = is_full_lto_enabled(sess) && is_rust_object && (sess.target.target.options.no_builtins || !trans.crate_info.is_no_builtins.contains(&cnum)); @@ -1301,7 +1303,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, fn add_dynamic_crate(cmd: &mut Linker, sess: &Session, cratepath: &Path) { // If we're performing LTO, then it should have been previously required // that all upstream rust dependencies were available in an rlib format. - assert!(!sess.lto()); + assert!(!is_full_lto_enabled(sess)); // Just need to tell the linker about where the library lives and // what its name is @@ -1409,3 +1411,13 @@ fn link_binaryen(sess: &Session, e)); } } + +fn is_full_lto_enabled(sess: &Session) -> bool { + match sess.lto() { + Lto::Yes | + Lto::Thin | + Lto::Fat => true, + Lto::No | + Lto::ThinLocal => false, + } +} diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index b612247ffcd42..9ff5bcf7a33ca 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -18,7 +18,7 @@ use llvm::{ModuleRef, TargetMachineRef, True, False}; use llvm; use rustc::hir::def_id::LOCAL_CRATE; use rustc::middle::exported_symbols::SymbolExportLevel; -use rustc::session::config; +use rustc::session::config::{self, Lto}; use rustc::util::common::time; use time_graph::Timeline; use {ModuleTranslation, ModuleLlvm, ModuleKind, ModuleSource}; @@ -95,25 +95,22 @@ impl LtoModuleTranslation { } } -pub enum LTOMode { - WholeCrateGraph, - JustThisCrate, -} - pub(crate) fn run(cgcx: &CodegenContext, - modules: Vec<ModuleTranslation>, - mode: LTOMode, - timeline: &mut Timeline) + modules: Vec<ModuleTranslation>, + timeline: &mut Timeline) -> Result<Vec<LtoModuleTranslation>, FatalError> { let diag_handler = cgcx.create_diag_handler(); - let export_threshold = match mode { - LTOMode::WholeCrateGraph => { + let export_threshold = match cgcx.lto { + // We're just doing LTO for our one crate + Lto::ThinLocal => SymbolExportLevel::Rust, + + // We're doing LTO for the entire crate graph + Lto::Yes | Lto::Fat | Lto::Thin => { symbol_export::crates_export_threshold(&cgcx.crate_types) } - LTOMode::JustThisCrate => { - SymbolExportLevel::Rust - } + + Lto::No => panic!("didn't request LTO but we're doing LTO"), }; let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| { @@ -140,7 +137,7 @@ pub(crate) fn run(cgcx: &CodegenContext, // We save off all the bytecode and LLVM module ids for later processing // with either fat or thin LTO let mut upstream_modules = Vec::new(); - if let LTOMode::WholeCrateGraph = mode { + if cgcx.lto != Lto::ThinLocal { if cgcx.opts.cg.prefer_dynamic { diag_handler.struct_err("cannot prefer dynamic linking when performing LTO") .note("only 'staticlib', 'bin', and 'cdylib' outputs are \ @@ -186,13 +183,16 @@ pub(crate) fn run(cgcx: &CodegenContext, } let arr = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); - match mode { - LTOMode::WholeCrateGraph if !cgcx.thinlto => { + match cgcx.lto { + Lto::Yes | // `-C lto` == fat LTO by default + Lto::Fat => { fat_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline) } - _ => { + Lto::Thin | + Lto::ThinLocal => { thin_lto(&diag_handler, modules, upstream_modules, &arr, timeline) } + Lto::No => unreachable!(), } } diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index a013af7a4600e..8afa63a5e9735 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -11,6 +11,7 @@ use back::bytecode::{self, RLIB_BYTECODE_EXTENSION}; use back::lto::{self, ModuleBuffer, ThinBuffer}; use back::link::{self, get_linker, remove}; +use back::command::Command; use back::linker::LinkerInfo; use back::symbol_export::ExportedSymbols; use base; @@ -18,8 +19,8 @@ use consts; use rustc_incremental::{save_trans_partition, in_incr_comp_dir}; use rustc::dep_graph::{DepGraph, WorkProductFileKind}; use rustc::middle::cstore::{LinkMeta, EncodedMetadata}; -use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses, - AllPasses, Sanitizer}; +use rustc::session::config::{self, OutputFilenames, OutputType, Passes, SomePasses, + AllPasses, Sanitizer, Lto}; use rustc::session::Session; use rustc::util::nodemap::FxHashMap; use rustc_back::LinkerFlavor; @@ -32,7 +33,7 @@ use CrateInfo; use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::ty::TyCtxt; use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry}; -use rustc::util::fs::{link_or_copy, rename_or_copy_remove}; +use rustc::util::fs::{link_or_copy}; use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId}; use errors::emitter::{Emitter}; use syntax::attr; @@ -68,8 +69,7 @@ pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 7] = [ ("ropi-rwpi", llvm::RelocMode::ROPI_RWPI), ]; -pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [ - ("default", llvm::CodeModel::Default), +pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[ ("small", llvm::CodeModel::Small), ("kernel", llvm::CodeModel::Kernel), ("medium", llvm::CodeModel::Medium), @@ -155,7 +155,7 @@ fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize { pub fn create_target_machine(sess: &Session) -> TargetMachineRef { target_machine_factory(sess)().unwrap_or_else(|err| { - panic!(llvm_err(sess.diagnostic(), err)) + llvm_err(sess.diagnostic(), err).raise() }) } @@ -170,20 +170,23 @@ pub fn target_machine_factory(sess: &Session) let ffunction_sections = sess.target.target.options.function_sections; let fdata_sections = ffunction_sections; - let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => &s, - None => &sess.target.target.options.code_model, - }; - - let code_model = match CODE_GEN_MODEL_ARGS.iter().find( - |&&arg| arg.0 == code_model_arg) { - Some(x) => x.1, - _ => { - sess.err(&format!("{:?} is not a valid code model", - code_model_arg)); - sess.abort_if_errors(); - bug!(); + let code_model_arg = sess.opts.cg.code_model.as_ref().or( + sess.target.target.options.code_model.as_ref(), + ); + + let code_model = match code_model_arg { + Some(s) => { + match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) { + Some(x) => x.1, + _ => { + sess.err(&format!("{:?} is not a valid code model", + code_model_arg)); + sess.abort_if_errors(); + bug!(); + } + } } + None => llvm::CodeModel::None, }; let singlethread = sess.target.target.options.singlethread; @@ -258,6 +261,7 @@ pub struct ModuleConfig { // make the object file bitcode. Provides easy compatibility with // emscripten's ecc compiler, when used as the linker. obj_is_bitcode: bool, + no_integrated_as: bool, } impl ModuleConfig { @@ -275,6 +279,7 @@ impl ModuleConfig { emit_asm: false, emit_obj: false, obj_is_bitcode: false, + no_integrated_as: false, no_verify: false, no_prepopulate_passes: false, @@ -313,13 +318,18 @@ impl ModuleConfig { } } +/// Assembler name and command used by codegen when no_integrated_as is enabled +struct AssemblerCommand { + name: PathBuf, + cmd: Command, +} + /// Additional resources used by optimize_and_codegen (not module specific) #[derive(Clone)] pub struct CodegenContext { // Resouces needed when running LTO pub time_passes: bool, - pub lto: bool, - pub thinlto: bool, + pub lto: Lto, pub no_landing_pads: bool, pub save_temps: bool, pub fewer_names: bool, @@ -356,6 +366,8 @@ pub struct CodegenContext { // A reference to the TimeGraph so we can register timings. None means that // measuring is disabled. time_graph: Option<TimeGraph>, + // The assembler command if no_integrated_as option is enabled, None otherwise + assembler_cmd: Option<Arc<AssemblerCommand>>, } impl CodegenContext { @@ -576,13 +588,8 @@ fn generate_lto_work(cgcx: &CodegenContext, TRANS_WORK_PACKAGE_KIND, "generate lto") }).unwrap_or(Timeline::noop()); - let mode = if cgcx.lto { - lto::LTOMode::WholeCrateGraph - } else { - lto::LTOMode::JustThisCrate - }; - let lto_modules = lto::run(cgcx, modules, mode, &mut timeline) - .unwrap_or_else(|e| panic!(e)); + let lto_modules = lto::run(cgcx, modules, &mut timeline) + .unwrap_or_else(|e| e.raise()); lto_modules.into_iter().map(|module| { let cost = module.cost(); @@ -639,13 +646,17 @@ unsafe fn codegen(cgcx: &CodegenContext, !cgcx.crate_types.contains(&config::CrateTypeRlib) && mtrans.kind == ModuleKind::Regular; + // If we don't have the integrated assembler, then we need to emit asm + // from LLVM and use `gcc` to create the object file. + let asm_to_obj = config.emit_obj && config.no_integrated_as; + // Change what we write and cleanup based on whether obj files are // just llvm bitcode. In that case write bitcode, and possibly // delete the bitcode if it wasn't requested. Don't generate the // machine code, instead copy the .o file from the .bc let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm); let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm; - let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm; + let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm && !asm_to_obj; let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm; let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); @@ -725,13 +736,13 @@ unsafe fn codegen(cgcx: &CodegenContext, timeline.record("ir"); } - if config.emit_asm || (asm2wasm && config.emit_obj) { + if config.emit_asm || (asm2wasm && config.emit_obj) || asm_to_obj { let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); // We can't use the same module for asm and binary output, because that triggers // various errors like invalid IR or broken binaries, so we might have to clone the // module to produce the asm output - let llmod = if config.emit_obj { + let llmod = if config.emit_obj && !asm2wasm { llvm::LLVMCloneModule(llmod) } else { llmod @@ -740,7 +751,7 @@ unsafe fn codegen(cgcx: &CodegenContext, write_output_file(diag_handler, tm, cpm, llmod, &path, llvm::FileType::AssemblyFile) })?; - if config.emit_obj { + if config.emit_obj && !asm2wasm { llvm::LLVMDisposeModule(llmod); } timeline.record("asm"); @@ -760,6 +771,14 @@ unsafe fn codegen(cgcx: &CodegenContext, llvm::FileType::ObjectFile) })?; timeline.record("obj"); + } else if asm_to_obj { + let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); + run_assembler(cgcx, diag_handler, &assembly, &obj_out); + timeline.record("asm_to_obj"); + + if !config.emit_asm && !cgcx.save_temps { + drop(fs::remove_file(&assembly)); + } } Ok(()) @@ -841,7 +860,6 @@ pub fn start_async_translation(tcx: TyCtxt, total_cgus: usize) -> OngoingCrateTranslation { let sess = tcx.sess; - let crate_output = tcx.output_filenames(LOCAL_CRATE); let crate_name = tcx.crate_name(LOCAL_CRATE); let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins"); let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs, @@ -855,19 +873,9 @@ pub fn start_async_translation(tcx: TyCtxt, subsystem.to_string() }); - let no_integrated_as = tcx.sess.opts.cg.no_integrated_as || - (tcx.sess.target.target.options.no_integrated_as && - (crate_output.outputs.contains_key(&OutputType::Object) || - crate_output.outputs.contains_key(&OutputType::Exe))); let linker_info = LinkerInfo::new(tcx); let crate_info = CrateInfo::new(tcx); - let output_types_override = if no_integrated_as { - OutputTypes::new(&[(OutputType::Assembly, None)]) - } else { - sess.opts.output_types.clone() - }; - // Figure out what we actually need to build. let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone()); let mut metadata_config = ModuleConfig::new(vec![]); @@ -913,7 +921,10 @@ pub fn start_async_translation(tcx: TyCtxt, allocator_config.emit_bc_compressed = true; } - for output_type in output_types_override.keys() { + modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as || + tcx.sess.target.target.options.no_integrated_as; + + for output_type in sess.opts.output_types.keys() { match *output_type { OutputType::Bitcode => { modules_config.emit_bc = true; } OutputType::LlvmAssembly => { modules_config.emit_ir = true; } @@ -976,7 +987,6 @@ pub fn start_async_translation(tcx: TyCtxt, metadata, windows_subsystem, linker_info, - no_integrated_as, crate_info, time_graph, @@ -1280,28 +1290,51 @@ fn execute_work_item(cgcx: &CodegenContext, unsafe { optimize(cgcx, &diag_handler, &mtrans, config, timeline)?; - let lto = cgcx.lto; + // After we've done the initial round of optimizations we need to + // decide whether to synchronously codegen this module or ship it + // back to the coordinator thread for further LTO processing (which + // has to wait for all the initial modules to be optimized). + // + // Here we dispatch based on the `cgcx.lto` and kind of module we're + // translating... + let needs_lto = match cgcx.lto { + Lto::No => false, + + // Here we've got a full crate graph LTO requested. We ignore + // this, however, if the crate type is only an rlib as there's + // no full crate graph to process, that'll happen later. + // + // This use case currently comes up primarily for targets that + // require LTO so the request for LTO is always unconditionally + // passed down to the backend, but we don't actually want to do + // anything about it yet until we've got a final product. + Lto::Yes | Lto::Fat | Lto::Thin => { + cgcx.crate_types.len() != 1 || + cgcx.crate_types[0] != config::CrateTypeRlib + } - let auto_thin_lto = - cgcx.thinlto && - cgcx.total_cgus > 1 && - mtrans.kind != ModuleKind::Allocator; + // When we're automatically doing ThinLTO for multi-codegen-unit + // builds we don't actually want to LTO the allocator modules if + // it shows up. This is due to various linker shenanigans that + // we'll encounter later. + // + // Additionally here's where we also factor in the current LLVM + // version. If it doesn't support ThinLTO we skip this. + Lto::ThinLocal => { + mtrans.kind != ModuleKind::Allocator && + llvm::LLVMRustThinLTOAvailable() + } + }; - // If we're a metadata module we never participate in LTO. - // - // If LTO was explicitly requested on the command line, we always - // LTO everything else. - // - // If LTO *wasn't* explicitly requested and we're not a metdata - // module, then we may automatically do ThinLTO if we've got - // multiple codegen units. Note, however, that the allocator module - // doesn't participate here automatically because of linker - // shenanigans later on. - if mtrans.kind == ModuleKind::Metadata || (!lto && !auto_thin_lto) { + // Metadata modules never participate in LTO regardless of the lto + // settings. + let needs_lto = needs_lto && mtrans.kind != ModuleKind::Metadata; + + if needs_lto { + Ok(WorkItemResult::NeedsLTO(mtrans)) + } else { let module = codegen(cgcx, &diag_handler, mtrans, config, timeline)?; Ok(WorkItemResult::Compiled(module)) - } else { - Ok(WorkItemResult::NeedsLTO(mtrans)) } } } @@ -1377,28 +1410,25 @@ fn start_executing_work(tcx: TyCtxt, each_linked_rlib_for_lto.push((cnum, path.to_path_buf())); })); - let crate_types = sess.crate_types.borrow(); - let only_rlib = crate_types.len() == 1 && - crate_types[0] == config::CrateTypeRlib; - let wasm_import_memory = attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory"); + let assembler_cmd = if modules_config.no_integrated_as { + // HACK: currently we use linker (gcc) as our assembler + let (name, mut cmd, _) = get_linker(sess); + cmd.args(&sess.target.target.options.asm_args); + Some(Arc::new(AssemblerCommand { + name, + cmd, + })) + } else { + None + }; + let cgcx = CodegenContext { crate_types: sess.crate_types.borrow().clone(), each_linked_rlib_for_lto, - // If we're only building an rlibc then allow the LTO flag to be passed - // but don't actually do anything, the full LTO will happen later - lto: sess.lto() && !only_rlib, - - // Enable ThinLTO if requested, but only if the target we're compiling - // for doesn't require full LTO. Some targets require one LLVM module - // (they effectively don't have a linker) so it's up to us to use LTO to - // link everything together. - thinlto: sess.thinlto() && - !sess.target.target.options.requires_lto && - unsafe { llvm::LLVMRustThinLTOAvailable() }, - + lto: sess.lto(), no_landing_pads: sess.no_landing_pads(), fewer_names: sess.fewer_names(), save_temps: sess.opts.cg.save_temps, @@ -1423,6 +1453,7 @@ fn start_executing_work(tcx: TyCtxt, binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen, debuginfo: tcx.sess.opts.debuginfo, wasm_import_memory: wasm_import_memory, + assembler_cmd, }; // This is the "main loop" of parallel work happening for parallel codegen. @@ -1931,15 +1962,14 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) { }); } -pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { - let (pname, mut cmd, _) = get_linker(sess); - - for arg in &sess.target.target.options.asm_args { - cmd.arg(arg); - } +pub fn run_assembler(cgcx: &CodegenContext, handler: &Handler, assembly: &Path, object: &Path) { + let assembler = cgcx.assembler_cmd + .as_ref() + .expect("cgcx.assembler_cmd is missing?"); - cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object)) - .arg(&outputs.temp_path(OutputType::Assembly, None)); + let pname = &assembler.name; + let mut cmd = assembler.cmd.clone(); + cmd.arg("-c").arg("-o").arg(object).arg(assembly); debug!("{:?}", cmd); match cmd.output() { @@ -1948,18 +1978,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let mut note = prog.stderr.clone(); note.extend_from_slice(&prog.stdout); - sess.struct_err(&format!("linking with `{}` failed: {}", - pname.display(), - prog.status)) + handler.struct_err(&format!("linking with `{}` failed: {}", + pname.display(), + prog.status)) .note(&format!("{:?}", &cmd)) .note(str::from_utf8(¬e[..]).unwrap()) .emit(); - sess.abort_if_errors(); + handler.abort_if_errors(); } }, Err(e) => { - sess.err(&format!("could not exec the linker `{}`: {}", pname.display(), e)); - sess.abort_if_errors(); + handler.err(&format!("could not exec the linker `{}`: {}", pname.display(), e)); + handler.abort_if_errors(); } } } @@ -2133,7 +2163,6 @@ pub struct OngoingCrateTranslation { metadata: EncodedMetadata, windows_subsystem: Option<String>, linker_info: LinkerInfo, - no_integrated_as: bool, crate_info: CrateInfo, time_graph: Option<TimeGraph>, coordinator_send: Sender<Box<Any + Send>>, @@ -2189,26 +2218,6 @@ impl OngoingCrateTranslation { metadata_module: compiled_modules.metadata_module, }; - if self.no_integrated_as { - run_assembler(sess, &self.output_filenames); - - // HACK the linker expects the object file to be named foo.0.o but - // `run_assembler` produces an object named just foo.o. Rename it if we - // are going to build an executable - if sess.opts.output_types.contains_key(&OutputType::Exe) { - let f = self.output_filenames.path(OutputType::Object); - rename_or_copy_remove(&f, - f.with_file_name(format!("{}.0.o", - f.file_stem().unwrap().to_string_lossy()))).unwrap(); - } - - // Remove assembly source, unless --save-temps was specified - if !sess.opts.cg.save_temps { - fs::remove_file(&self.output_filenames - .temp_path(OutputType::Assembly, None)).unwrap(); - } - } - trans } diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 734ad8f3929ed..466a86e7ea558 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -78,7 +78,7 @@ use std::ffi::CString; use std::str; use std::sync::Arc; use std::time::{Instant, Duration}; -use std::i32; +use std::{i32, usize}; use std::iter; use std::sync::mpsc; use syntax_pos::Span; @@ -823,12 +823,10 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module); // We sort the codegen units by size. This way we can schedule work for LLVM - // a bit more efficiently. Note that "size" is defined rather crudely at the - // moment as it is just the number of TransItems in the CGU, not taking into - // account the size of each TransItem. + // a bit more efficiently. let codegen_units = { let mut codegen_units = codegen_units; - codegen_units.sort_by_key(|cgu| -(cgu.items().len() as isize)); + codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate()); codegen_units }; diff --git a/src/librustc_trans/cabi_x86_64.rs b/src/librustc_trans/cabi_x86_64.rs index 6db18bfecf258..62bac8469ce4b 100644 --- a/src/librustc_trans/cabi_x86_64.rs +++ b/src/librustc_trans/cabi_x86_64.rs @@ -16,9 +16,11 @@ use context::CodegenCx; use rustc::ty::layout::{self, TyLayout, Size}; -#[derive(Clone, Copy, PartialEq, Debug)] +/// Classification of "eightbyte" components. +// NB: the order of the variants is from general to specific, +// such that `unify(a, b)` is the "smaller" of `a` and `b`. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] enum Class { - None, Int, Sse, SseUp @@ -32,29 +34,10 @@ const LARGEST_VECTOR_SIZE: usize = 512; const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64; fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) - -> Result<[Class; MAX_EIGHTBYTES], Memory> { - fn unify(cls: &mut [Class], - off: Size, - c: Class) { - let i = (off.bytes() / 8) as usize; - let to_write = match (cls[i], c) { - (Class::None, _) => c, - (_, Class::None) => return, - - (Class::Int, _) | - (_, Class::Int) => Class::Int, - - (Class::Sse, _) | - (_, Class::Sse) => Class::Sse, - - (Class::SseUp, Class::SseUp) => Class::SseUp - }; - cls[i] = to_write; - } - + -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> { fn classify<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, layout: TyLayout<'tcx>, - cls: &mut [Class], + cls: &mut [Option<Class>], off: Size) -> Result<(), Memory> { if !off.is_abi_aligned(layout.align) { @@ -64,31 +47,20 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) return Ok(()); } - match layout.abi { - layout::Abi::Uninhabited => {} + let mut c = match layout.abi { + layout::Abi::Uninhabited => return Ok(()), layout::Abi::Scalar(ref scalar) => { - let reg = match scalar.value { + match scalar.value { layout::Int(..) | layout::Pointer => Class::Int, layout::F32 | layout::F64 => Class::Sse - }; - unify(cls, off, reg); - } - - layout::Abi::Vector { ref element, count } => { - unify(cls, off, Class::Sse); - - // everything after the first one is the upper - // half of a register. - let stride = element.value.size(cx); - for i in 1..count { - let field_off = off + stride * i; - unify(cls, field_off, Class::SseUp); } } + layout::Abi::Vector { .. } => Class::Sse, + layout::Abi::ScalarPair(..) | layout::Abi::Aggregate { .. } => { match layout.variants { @@ -97,12 +69,26 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) let field_off = off + layout.fields.offset(i); classify(cx, layout.field(cx, i), cls, field_off)?; } + return Ok(()); } layout::Variants::Tagged { .. } | layout::Variants::NicheFilling { .. } => return Err(Memory), } } + }; + + // Fill in `cls` for scalars (Int/Sse) and vectors (Sse). + let first = (off.bytes() / 8) as usize; + let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize; + for cls in &mut cls[first..=last] { + *cls = Some(cls.map_or(c, |old| old.min(c))); + + // Everything after the first Sse "eightbyte" + // component is the upper half of a register. + if c == Class::Sse { + c = Class::SseUp; + } } Ok(()) @@ -113,23 +99,23 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) return Err(Memory); } - let mut cls = [Class::None; MAX_EIGHTBYTES]; + let mut cls = [None; MAX_EIGHTBYTES]; classify(cx, arg.layout, &mut cls, Size::from_bytes(0))?; if n > 2 { - if cls[0] != Class::Sse { + if cls[0] != Some(Class::Sse) { return Err(Memory); } - if cls[1..n].iter().any(|&c| c != Class::SseUp) { + if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) { return Err(Memory); } } else { let mut i = 0; while i < n { - if cls[i] == Class::SseUp { - cls[i] = Class::Sse; - } else if cls[i] == Class::Sse { + if cls[i] == Some(Class::SseUp) { + cls[i] = Some(Class::Sse); + } else if cls[i] == Some(Class::Sse) { i += 1; - while i != n && cls[i] == Class::SseUp { i += 1; } + while i != n && cls[i] == Some(Class::SseUp) { i += 1; } } else { i += 1; } @@ -139,14 +125,14 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) Ok(cls) } -fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> { +fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> { if *i >= cls.len() { return None; } match cls[*i] { - Class::None => None, - Class::Int => { + None => None, + Some(Class::Int) => { *i += 1; Some(match size.bytes() { 1 => Reg::i8(), @@ -156,8 +142,10 @@ fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> { _ => Reg::i64() }) } - Class::Sse => { - let vec_len = 1 + cls[*i+1..].iter().take_while(|&&c| c == Class::SseUp).count(); + Some(Class::Sse) => { + let vec_len = 1 + cls[*i+1..].iter() + .take_while(|&&c| c == Some(Class::SseUp)) + .count(); *i += vec_len; Some(if vec_len == 1 { match size.bytes() { @@ -171,20 +159,20 @@ fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> { } }) } - c => bug!("reg_component: unhandled class {:?}", c) + Some(c) => bug!("reg_component: unhandled class {:?}", c) } } -fn cast_target(cls: &[Class], size: Size) -> CastTarget { +fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget { let mut i = 0; let lo = reg_component(cls, &mut i, size).unwrap(); let offset = Size::from_bytes(8) * (i as u64); - let target = if size <= offset { - CastTarget::from(lo) - } else { - let hi = reg_component(cls, &mut i, size - offset).unwrap(); - CastTarget::Pair(lo, hi) - }; + let mut target = CastTarget::from(lo); + if size > offset { + if let Some(hi) = reg_component(cls, &mut i, size - offset) { + target = CastTarget::Pair(lo, hi); + } + } assert_eq!(reg_component(cls, &mut i, Size::from_bytes(0)), None); target } @@ -194,44 +182,48 @@ pub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tc let mut sse_regs = 8; // XMM0-7 let mut x86_64_ty = |arg: &mut ArgType<'tcx>, is_arg: bool| { - let cls = classify_arg(cx, arg); + let mut cls_or_mem = classify_arg(cx, arg); let mut needed_int = 0; let mut needed_sse = 0; - let in_mem = match cls { - Err(Memory) => true, - Ok(ref cls) if is_arg => { - for &c in cls { + if is_arg { + if let Ok(cls) = cls_or_mem { + for &c in &cls { match c { - Class::Int => needed_int += 1, - Class::Sse => needed_sse += 1, + Some(Class::Int) => needed_int += 1, + Some(Class::Sse) => needed_sse += 1, _ => {} } } - arg.layout.is_aggregate() && - (int_regs < needed_int || sse_regs < needed_sse) + if arg.layout.is_aggregate() { + if int_regs < needed_int || sse_regs < needed_sse { + cls_or_mem = Err(Memory); + } + } } - Ok(_) => false - }; + } - if in_mem { - if is_arg { - arg.make_indirect_byval(); - } else { - // `sret` parameter thus one less integer register available - arg.make_indirect(); - int_regs -= 1; + match cls_or_mem { + Err(Memory) => { + if is_arg { + arg.make_indirect_byval(); + } else { + // `sret` parameter thus one less integer register available + arg.make_indirect(); + int_regs -= 1; + } } - } else { - // split into sized chunks passed individually - int_regs -= needed_int; - sse_regs -= needed_sse; - - if arg.layout.is_aggregate() { - let size = arg.layout.size; - arg.cast_to(cast_target(cls.as_ref().unwrap(), size)) - } else { - arg.extend_integer_width_to(32); + Ok(ref cls) => { + // split into sized chunks passed individually + int_regs -= needed_int; + sse_regs -= needed_sse; + + if arg.layout.is_aggregate() { + let size = arg.layout.size; + arg.cast_to(cast_target(cls, size)) + } else { + arg.extend_integer_width_to(32); + } } } }; diff --git a/src/librustc_trans/debuginfo/mod.rs b/src/librustc_trans/debuginfo/mod.rs index b46e12d9d5b67..9071eb776d529 100644 --- a/src/librustc_trans/debuginfo/mod.rs +++ b/src/librustc_trans/debuginfo/mod.rs @@ -270,6 +270,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, } None => {} }; + if sig.output().is_never() { + flags = flags | DIFlags::FlagNoReturn; + } let fn_metadata = unsafe { llvm::LLVMRustDIBuilderCreateFunction( diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index b367eb6548d01..da01592d9118a 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -487,16 +487,18 @@ fn arg_local_refs<'a, 'tcx>(bx: &Builder<'a, 'tcx>, // The Rust ABI passes indirect variables using a pointer and a manual copy, so we // need to insert a deref here, but the C ABI uses a pointer and a copy using the // byval attribute, for which LLVM does the deref itself, so we must not add it. + // Starting with D31439 in LLVM 5, it *always* does the deref itself. let mut variable_access = VariableAccess::DirectVariable { alloca: place.llval }; - - if let PassMode::Indirect(ref attrs) = arg.mode { - if !attrs.contains(ArgAttribute::ByVal) { - variable_access = VariableAccess::IndirectVariable { - alloca: place.llval, - address_operations: &deref_op, - }; + if unsafe { llvm::LLVMRustVersionMajor() < 5 } { + if let PassMode::Indirect(ref attrs) = arg.mode { + if !attrs.contains(ArgAttribute::ByVal) { + variable_access = VariableAccess::IndirectVariable { + alloca: place.llval, + address_operations: &deref_op, + }; + } } } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 4616b4cf80c97..1139ea5fbd364 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -979,7 +979,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { let item_id = tcx.hir.get_parent_node(node_id); let item_def_id = tcx.hir.local_def_id(item_id); let generics = tcx.generics_of(item_def_id); - let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id).index]; + let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id)]; tcx.mk_param(index, tcx.hir.name(node_id)) } Def::SelfTy(_, Some(def_id)) => { @@ -1206,22 +1206,27 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { let output = bare_fn_ty.output(); let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output); for br in late_bound_in_ret.difference(&late_bound_in_args) { - let br_name = match *br { - ty::BrNamed(_, name) => name, - _ => { - span_bug!( - decl.output.span(), - "anonymous bound region {:?} in return but not args", - br); - } + let lifetime_name = match *br { + ty::BrNamed(_, name) => format!("lifetime `{}`,", name), + ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => format!("an anonymous lifetime"), }; - struct_span_err!(tcx.sess, - decl.output.span(), - E0581, - "return type references lifetime `{}`, \ - which does not appear in the fn input types", - br_name) - .emit(); + let mut err = struct_span_err!(tcx.sess, + decl.output.span(), + E0581, + "return type references {} \ + which is not constrained by the fn input types", + lifetime_name); + if let ty::BrAnon(_) = *br { + // The only way for an anonymous lifetime to wind up + // in the return type but **also** be unconstrained is + // if it only appears in "associated types" in the + // input. See #47511 for an example. In this case, + // though we can easily give a hint that ought to be + // relevant. + err.note("lifetimes appearing in an associated type \ + are not considered constrained"); + } + err.emit(); } bare_fn_ty diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 992a510f71311..48bd7b14fc96a 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -281,10 +281,12 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { .emit(); } CastError::SizedUnsizedCast => { - type_error_struct!(fcx.tcx.sess, self.span, self.expr_ty, E0607, - "cannot cast thin pointer `{}` to fat pointer `{}`", - self.expr_ty, - fcx.ty_to_string(self.cast_ty)).emit(); + use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic}; + SizedUnsizedCastError::new(&fcx.tcx.sess, + self.span, + self.expr_ty, + fcx.ty_to_string(self.cast_ty)) + .diagnostic().emit(); } CastError::UnknownCastPtrKind | CastError::UnknownExprPtrKind => { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 5b7eca7a30113..6147743437b8e 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -103,6 +103,7 @@ use rustc::ty::maps::Providers; use rustc::ty::util::{Representability, IntTypeExt}; use rustc::ty::layout::LayoutOf; use errors::{DiagnosticBuilder, DiagnosticId}; + use require_c_abi_if_variadic; use session::{CompileIncomplete, config, Session}; use TypeAndSubsts; @@ -1636,7 +1637,7 @@ impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> { let item_id = tcx.hir.ty_param_owner(node_id); let item_def_id = tcx.hir.local_def_id(item_id); let generics = tcx.generics_of(item_def_id); - let index = generics.type_param_to_index[&def_id.index]; + let index = generics.type_param_to_index[&def_id]; ty::GenericPredicates { parent: None, predicates: self.param_env.caller_bounds.iter().filter(|predicate| { @@ -2599,9 +2600,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // arguments which we skipped above. if variadic { fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) { - type_error_struct!(s, span, t, E0617, - "can't pass `{}` to variadic function, cast to `{}`", - t, cast_ty).emit(); + use structured_errors::{VariadicError, StructuredDiagnostic}; + VariadicError::new(s, span, t, cast_ty).diagnostic().emit(); } for arg in args.iter().skip(expected_arg_count) { diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 8d428c860c50a..7a91827faef83 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -40,8 +40,6 @@ use util::nodemap::FxHashMap; use rustc_const_math::ConstInt; -use std::collections::BTreeMap; - use syntax::{abi, ast}; use syntax::codemap::Spanned; use syntax::symbol::{Symbol, keywords}; @@ -240,7 +238,7 @@ fn type_param_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let param_owner = tcx.hir.ty_param_owner(param_id); let param_owner_def_id = tcx.hir.local_def_id(param_owner); let generics = tcx.generics_of(param_owner_def_id); - let index = generics.type_param_to_index[&def_id.index]; + let index = generics.type_param_to_index[&def_id]; let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id)); // Don't look for bounds where the type parameter isn't in scope. @@ -1024,10 +1022,9 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }); } - let mut type_param_to_index = BTreeMap::new(); - for param in &types { - type_param_to_index.insert(param.def_id.index, param.index); - } + let type_param_to_index = types.iter() + .map(|param| (param.def_id, param.index)) + .collect(); tcx.alloc_generics(ty::Generics { parent: parent_def_id, diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 75a74e1069c31..bd7e200d620e6 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -123,16 +123,17 @@ use std::iter; // registered before they are used. mod diagnostics; +mod astconv; mod check; mod check_unused; -mod astconv; +mod coherence; mod collect; mod constrained_type_params; +mod structured_errors; mod impl_wf_check; -mod coherence; +mod namespace; mod outlives; mod variance; -mod namespace; pub struct TypeAndSubsts<'tcx> { substs: &'tcx Substs<'tcx>, diff --git a/src/librustc_typeck/structured_errors.rs b/src/librustc_typeck/structured_errors.rs new file mode 100644 index 0000000000000..afcdc7575a3cb --- /dev/null +++ b/src/librustc_typeck/structured_errors.rs @@ -0,0 +1,150 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::session::Session; +use syntax_pos::Span; +use errors::{DiagnosticId, DiagnosticBuilder}; +use rustc::ty::{Ty, TypeFoldable}; + +pub trait StructuredDiagnostic<'tcx> { + fn session(&self) -> &Session; + + fn code(&self) -> DiagnosticId; + + fn common(&self) -> DiagnosticBuilder<'tcx>; + + fn diagnostic(&self) -> DiagnosticBuilder<'tcx> { + let err = self.common(); + if self.session().teach(&self.code()) { + self.extended(err) + } else { + self.regular(err) + } + } + + fn regular(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { + err + } + + fn extended(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { + err + } +} + +pub struct VariadicError<'tcx> { + sess: &'tcx Session, + span: Span, + t: Ty<'tcx>, + cast_ty: &'tcx str, +} + +impl<'tcx> VariadicError<'tcx> { + pub fn new(sess: &'tcx Session, + span: Span, + t: Ty<'tcx>, + cast_ty: &'tcx str) -> VariadicError<'tcx> { + VariadicError { sess, span, t, cast_ty } + } +} + +impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> { + fn session(&self) -> &Session { self.sess } + + fn code(&self) -> DiagnosticId { + __diagnostic_used!(E0617); + DiagnosticId::Error("E0617".to_owned()) + } + + fn common(&self) -> DiagnosticBuilder<'tcx> { + let mut err = if self.t.references_error() { + self.sess.diagnostic().struct_dummy() + } else { + self.sess.struct_span_fatal_with_code( + self.span, + &format!("can't pass `{}` to variadic function", self.t), + self.code(), + ) + }; + if let Ok(snippet) = self.sess.codemap().span_to_snippet(self.span) { + err.span_suggestion(self.span, + &format!("cast the value to `{}`", self.cast_ty), + format!("{} as {}", snippet, self.cast_ty)); + } else { + err.help(&format!("cast the value to `{}`", self.cast_ty)); + } + err + } + + fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { + err.note(&format!("certain types, like `{}`, must be cast before passing them to a \ + variadic function, because of arcane ABI rules dictated by the C \ + standard", + self.t)); + err + } +} + +pub struct SizedUnsizedCastError<'tcx> { + sess: &'tcx Session, + span: Span, + expr_ty: Ty<'tcx>, + cast_ty: String, +} + +impl<'tcx> SizedUnsizedCastError<'tcx> { + pub fn new(sess: &'tcx Session, + span: Span, + expr_ty: Ty<'tcx>, + cast_ty: String) -> SizedUnsizedCastError<'tcx> { + SizedUnsizedCastError { sess, span, expr_ty, cast_ty } + } +} + +impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCastError<'tcx> { + fn session(&self) -> &Session { self.sess } + + fn code(&self) -> DiagnosticId { + __diagnostic_used!(E0607); + DiagnosticId::Error("E0607".to_owned()) + } + + fn common(&self) -> DiagnosticBuilder<'tcx> { + if self.expr_ty.references_error() { + self.sess.diagnostic().struct_dummy() + } else { + self.sess.struct_span_fatal_with_code( + self.span, + &format!("cannot cast thin pointer `{}` to fat pointer `{}`", + self.expr_ty, + self.cast_ty), + self.code(), + ) + } + } + + fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> { + err.help( + "Thin pointers are \"simple\" pointers: they are purely a reference to a +memory address. + +Fat pointers are pointers referencing \"Dynamically Sized Types\" (also +called DST). DST don't have a statically known size, therefore they can +only exist behind some kind of pointers that contain additional +information. Slices and trait objects are DSTs. In the case of slices, +the additional information the fat pointer holds is their size. + +To fix this error, don't try to cast directly between thin and fat +pointers. + +For more information about casts, take a look at The Book: +https://doc.rust-lang.org/book/first-edition/casting-between-types.html"); + err + } +} diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index 0ed9b14b9d1d4..f7e10a4a47d37 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -14,16 +14,12 @@ //! We walk the set of items and, for each member, generate new constraints. use hir::def_id::DefId; -use rustc::dep_graph::{DepGraphSafe, DepKind, DepNodeColor}; -use rustc::ich::StableHashingContext; use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TyCtxt}; use syntax::ast; use rustc::hir; use rustc::hir::itemlikevisit::ItemLikeVisitor; -use rustc_data_structures::stable_hasher::StableHashingContextProvider; - use super::terms::*; use super::terms::VarianceTerm::*; @@ -132,50 +128,11 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { } } -impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> { - type ContextType = StableHashingContext<'tcx>; - - fn create_stable_hashing_context(&self) -> Self::ContextType { - self.terms_cx.tcx.create_stable_hashing_context() - } -} - -impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {} - impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { fn visit_node_helper(&mut self, id: ast::NodeId) { let tcx = self.terms_cx.tcx; let def_id = tcx.hir.local_def_id(id); - - // Encapsulate constructing the constraints into a task we can - // reference later. This can go away once the red-green - // algorithm is in place. - // - // See README.md for a detailed discussion - // on dep-graph management. - let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints); - - if let Some(DepNodeColor::Green(_)) = tcx.dep_graph.node_color(&dep_node) { - // If the corresponding node has already been marked as green, the - // appropriate portion of the DepGraph has already been loaded from - // the previous graph, so we don't do any dep-tracking. Since we - // don't cache any values though, we still have to re-run the - // computation. - tcx.dep_graph.with_ignore(|| { - self.build_constraints_for_item(def_id); - }); - } else { - tcx.dep_graph.with_task(dep_node, - self, - def_id, - visit_item_task); - } - - fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>, - def_id: DefId) - { - ccx.build_constraints_for_item(def_id); - } + self.build_constraints_for_item(def_id); } fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs index 418d2b9467096..003ee6f25995d 100644 --- a/src/librustc_typeck/variance/mod.rs +++ b/src/librustc_typeck/variance/mod.rs @@ -12,7 +12,6 @@ //! parameters. See README.md for details. use arena; -use rustc::dep_graph::DepKind; use rustc::hir; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::ty::{self, CrateVariancesMap, TyCtxt}; @@ -95,9 +94,6 @@ fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId) // Everything else must be inferred. let crate_map = tcx.crate_variances(LOCAL_CRATE); - let dep_node = item_def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints); - tcx.dep_graph.read(dep_node); - crate_map.variances.get(&item_def_id) .unwrap_or(&crate_map.empty_variance) .clone() diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 78b24a8d18ab4..0929b833c1965 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -659,7 +659,8 @@ pub struct Attributes { pub other_attrs: Vec<ast::Attribute>, pub cfg: Option<Rc<Cfg>>, pub span: Option<syntax_pos::Span>, - pub links: Vec<(String, DefId)>, + /// map from Rust paths to resolved defs and potential URL fragments + pub links: Vec<(String, DefId, Option<String>)>, } impl Attributes { @@ -820,8 +821,12 @@ impl Attributes { /// Cache must be populated before call pub fn links(&self) -> Vec<(String, String)> { use html::format::href; - self.links.iter().filter_map(|&(ref s, did)| { - if let Some((href, ..)) = href(did) { + self.links.iter().filter_map(|&(ref s, did, ref fragment)| { + if let Some((mut href, ..)) = href(did) { + if let Some(ref fragment) = *fragment { + href.push_str("#"); + href.push_str(fragment); + } Some((s.clone(), href)) } else { None @@ -843,10 +848,8 @@ impl AttributesExt for Attributes { /// they exist in both namespaces (structs and modules) fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> { match def { - // structs and mods exist in both namespaces. skip them - Def::StructCtor(..) | Def::Mod(..) => None, - Def::Variant(..) | Def::VariantCtor(..) - => Some(("variant", format!("{}()", path_str))), + // structs, variants, and mods exist in both namespaces. skip them + Def::StructCtor(..) | Def::Mod(..) | Def::Variant(..) | Def::VariantCtor(..) => None, Def::Fn(..) => Some(("function", format!("{}()", path_str))), Def::Method(..) @@ -880,10 +883,10 @@ fn ambiguity_error(cx: &DocContext, attrs: &Attributes, let sp = attrs.doc_strings.first() .map_or(DUMMY_SP, |a| a.span()); cx.sess() - .struct_span_err(sp, - &format!("`{}` is both {} {} and {} {}", - path_str, article1, kind1, - article2, kind2)) + .struct_span_warn(sp, + &format!("`{}` is both {} {} and {} {}", + path_str, article1, kind1, + article2, kind2)) .help(&format!("try `{}` if you want to select the {}, \ or `{}` if you want to \ select the {}", @@ -892,21 +895,114 @@ fn ambiguity_error(cx: &DocContext, attrs: &Attributes, .emit(); } +/// Given an enum variant's def, return the def of its enum and the associated fragment +fn handle_variant(cx: &DocContext, def: Def) -> Result<(Def, Option<String>), ()> { + use rustc::ty::DefIdTree; + + let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) { + parent + } else { + return Err(()) + }; + let parent_def = Def::Enum(parent); + let variant = cx.tcx.expect_variant_def(def); + Ok((parent_def, Some(format!("{}.v", variant.name)))) +} + /// Resolve a given string as a path, along with whether or not it is -/// in the value namespace -fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<hir::Path, ()> { +/// in the value namespace. Also returns an optional URL fragment in the case +/// of variants and methods +fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option<String>), ()> { // In case we're in a module, try to resolve the relative // path if let Some(id) = cx.mod_ids.borrow().last() { - cx.resolver.borrow_mut() - .with_scope(*id, |resolver| { - resolver.resolve_str_path_error(DUMMY_SP, - &path_str, is_val) - }) + let result = cx.resolver.borrow_mut() + .with_scope(*id, + |resolver| { + resolver.resolve_str_path_error(DUMMY_SP, + &path_str, is_val) + }); + + if let Ok(result) = result { + // In case this is a trait item, skip the + // early return and try looking for the trait + let value = match result.def { + Def::Method(_) | Def::AssociatedConst(_) => true, + Def::AssociatedTy(_) => false, + Def::Variant(_) => return handle_variant(cx, result.def), + // not a trait item, just return what we found + _ => return Ok((result.def, None)) + }; + + if value != is_val { + return Err(()) + } + } else { + // If resolution failed, it may still be a method + // because methods are not handled by the resolver + // If so, bail when we're not looking for a value + if !is_val { + return Err(()) + } + } + + // Try looking for methods and associated items + let mut split = path_str.rsplitn(2, "::"); + let mut item_name = if let Some(first) = split.next() { + first + } else { + return Err(()) + }; + + let mut path = if let Some(second) = split.next() { + second + } else { + return Err(()) + }; + + let ty = cx.resolver.borrow_mut() + .with_scope(*id, + |resolver| { + resolver.resolve_str_path_error(DUMMY_SP, + &path, false) + })?; + match ty.def { + Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => { + let item = cx.tcx.inherent_impls(did).iter() + .flat_map(|imp| cx.tcx.associated_items(*imp)) + .find(|item| item.name == item_name); + if let Some(item) = item { + if item.kind == ty::AssociatedKind::Method && is_val { + Ok((ty.def, Some(format!("method.{}", item_name)))) + } else { + Err(()) + } + } else { + Err(()) + } + } + Def::Trait(did) => { + let item = cx.tcx.associated_item_def_ids(did).iter() + .map(|item| cx.tcx.associated_item(*item)) + .find(|item| item.name == item_name); + if let Some(item) = item { + let kind = match item.kind { + ty::AssociatedKind::Const if is_val => "associatedconstant", + ty::AssociatedKind::Type if !is_val => "associatedtype", + ty::AssociatedKind::Method if is_val => "tymethod", + _ => return Err(()) + }; + + Ok((ty.def, Some(format!("{}.{}", kind, item_name)))) + } else { + Err(()) + } + } + _ => Err(()) + } + } else { - // FIXME(Manishearth) this branch doesn't seem to ever be hit, really - cx.resolver.borrow_mut() - .resolve_str_path_error(DUMMY_SP, &path_str, is_val) + Err(()) } } @@ -955,7 +1051,7 @@ impl Clean<Attributes> for [ast::Attribute] { if UnstableFeatures::from_environment().is_nightly_build() { let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new); for link in markdown_links(&dox, cx.render_type) { - let def = { + let (def, fragment) = { let mut kind = PathKind::Unknown; let path_str = if let Some(prefix) = ["struct@", "enum@", "type@", @@ -965,7 +1061,8 @@ impl Clean<Attributes> for [ast::Attribute] { link.trim_left_matches(prefix) } else if let Some(prefix) = ["const@", "static@", - "value@", "function@", "mod@", "fn@", "module@"] + "value@", "function@", "mod@", + "fn@", "module@", "method@"] .iter().find(|p| link.starts_with(**p)) { kind = PathKind::Value; link.trim_left_matches(prefix) @@ -993,8 +1090,8 @@ impl Clean<Attributes> for [ast::Attribute] { match kind { PathKind::Value => { - if let Ok(path) = resolve(cx, path_str, true) { - path.def + if let Ok(def) = resolve(cx, path_str, true) { + def } else { // this could just be a normal link or a broken link // we could potentially check if something is @@ -1003,8 +1100,8 @@ impl Clean<Attributes> for [ast::Attribute] { } } PathKind::Type => { - if let Ok(path) = resolve(cx, path_str, false) { - path.def + if let Ok(def) = resolve(cx, path_str, false) { + def } else { // this could just be a normal link continue; @@ -1013,42 +1110,42 @@ impl Clean<Attributes> for [ast::Attribute] { PathKind::Unknown => { // try everything! if let Some(macro_def) = macro_resolve(cx, path_str) { - if let Ok(type_path) = resolve(cx, path_str, false) { + if let Ok(type_def) = resolve(cx, path_str, false) { let (type_kind, article, type_disambig) - = type_ns_kind(type_path.def, path_str); + = type_ns_kind(type_def.0, path_str); ambiguity_error(cx, &attrs, path_str, article, type_kind, &type_disambig, "a", "macro", &format!("macro@{}", path_str)); continue; - } else if let Ok(value_path) = resolve(cx, path_str, true) { + } else if let Ok(value_def) = resolve(cx, path_str, true) { let (value_kind, value_disambig) - = value_ns_kind(value_path.def, path_str) + = value_ns_kind(value_def.0, path_str) .expect("struct and mod cases should have been \ caught in previous branch"); ambiguity_error(cx, &attrs, path_str, "a", value_kind, &value_disambig, "a", "macro", &format!("macro@{}", path_str)); } - macro_def - } else if let Ok(type_path) = resolve(cx, path_str, false) { + (macro_def, None) + } else if let Ok(type_def) = resolve(cx, path_str, false) { // It is imperative we search for not-a-value first // Otherwise we will find struct ctors for when we are looking // for structs, and the link won't work. // if there is something in both namespaces - if let Ok(value_path) = resolve(cx, path_str, true) { - let kind = value_ns_kind(value_path.def, path_str); + if let Ok(value_def) = resolve(cx, path_str, true) { + let kind = value_ns_kind(value_def.0, path_str); if let Some((value_kind, value_disambig)) = kind { let (type_kind, article, type_disambig) - = type_ns_kind(type_path.def, path_str); + = type_ns_kind(type_def.0, path_str); ambiguity_error(cx, &attrs, path_str, article, type_kind, &type_disambig, "a", value_kind, &value_disambig); continue; } } - type_path.def - } else if let Ok(value_path) = resolve(cx, path_str, true) { - value_path.def + type_def + } else if let Ok(value_def) = resolve(cx, path_str, true) { + value_def } else { // this could just be a normal link continue; @@ -1056,7 +1153,7 @@ impl Clean<Attributes> for [ast::Attribute] { } PathKind::Macro => { if let Some(def) = macro_resolve(cx, path_str) { - def + (def, None) } else { continue } @@ -1066,7 +1163,7 @@ impl Clean<Attributes> for [ast::Attribute] { let id = register_def(cx, def); - attrs.links.push((link, id)); + attrs.links.push((link, id, fragment)); } cx.sess().abort_if_errors(); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index b7862181119a8..1fb8f106cac03 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -1243,6 +1243,16 @@ impl DocFolder for Cache { _ => self.stripped_mod, }; + // If the impl is from a masked crate or references something from a + // masked crate then remove it completely. + if let clean::ImplItem(ref i) = item.inner { + if self.masked_crates.contains(&item.def_id.krate) || + i.trait_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) || + i.for_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) { + return None; + } + } + // Register any generics to their corresponding string. This is used // when pretty-printing types. if let Some(generics) = item.inner.generics() { @@ -1257,14 +1267,10 @@ impl DocFolder for Cache { // Collect all the implementors of traits. if let clean::ImplItem(ref i) = item.inner { - if !self.masked_crates.contains(&item.def_id.krate) { - if let Some(did) = i.trait_.def_id() { - if i.for_.def_id().map_or(true, |d| !self.masked_crates.contains(&d.krate)) { - self.implementors.entry(did).or_insert(vec![]).push(Impl { - impl_item: item.clone(), - }); - } - } + if let Some(did) = i.trait_.def_id() { + self.implementors.entry(did).or_insert(vec![]).push(Impl { + impl_item: item.clone(), + }); } } @@ -1427,24 +1433,20 @@ impl DocFolder for Cache { // Note: matching twice to restrict the lifetime of the `i` borrow. let mut dids = FxHashSet(); if let clean::Item { inner: clean::ImplItem(ref i), .. } = item { - let masked_trait = i.trait_.def_id().map_or(false, - |d| self.masked_crates.contains(&d.krate)); - if !masked_trait { - match i.for_ { - clean::ResolvedPath { did, .. } | - clean::BorrowedRef { - type_: box clean::ResolvedPath { did, .. }, .. - } => { - dids.insert(did); - } - ref t => { - let did = t.primitive_type().and_then(|t| { - self.primitive_locations.get(&t).cloned() - }); + match i.for_ { + clean::ResolvedPath { did, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { did, .. }, .. + } => { + dids.insert(did); + } + ref t => { + let did = t.primitive_type().and_then(|t| { + self.primitive_locations.get(&t).cloned() + }); - if let Some(did) = did { - dids.insert(did); - } + if let Some(did) = did { + dids.insert(did); } } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index d6e26057ea81c..73810b3fe81d7 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -33,6 +33,18 @@ use std::fmt; use std::rc::Rc; use std::u32; +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] +pub struct Label { + pub ident: Ident, + pub span: Span, +} + +impl fmt::Debug for Label { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "label({:?})", self.ident) + } +} + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -1078,23 +1090,23 @@ pub enum ExprKind { /// A while loop, with an optional label /// /// `'label: while expr { block }` - While(P<Expr>, P<Block>, Option<SpannedIdent>), + While(P<Expr>, P<Block>, Option<Label>), /// A while-let loop, with an optional label /// /// `'label: while let pat = expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. - WhileLet(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), + WhileLet(P<Pat>, P<Expr>, P<Block>, Option<Label>), /// A for loop, with an optional label /// /// `'label: for pat in expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. - ForLoop(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), + ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` - Loop(P<Block>, Option<SpannedIdent>), + Loop(P<Block>, Option<Label>), /// A `match` block. Match(P<Expr>, Vec<Arm>), /// A closure (for example, `move |a, b, c| a + b + c`) @@ -1133,9 +1145,9 @@ pub enum ExprKind { /// A referencing operation (`&a` or `&mut a`) AddrOf(Mutability, P<Expr>), /// A `break`, with an optional label to break, and an optional expression - Break(Option<SpannedIdent>, Option<P<Expr>>), + Break(Option<Label>, Option<P<Expr>>), /// A `continue`, with an optional label - Continue(Option<SpannedIdent>), + Continue(Option<Label>), /// A `return`, with an optional value to be returned Ret(Option<P<Expr>>), diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 612d8501fb2af..025aa94ce06f5 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -786,7 +786,7 @@ impl<'a> ExtCtxt<'a> { /// substitute; we never hit resolve/type-checking so the dummy /// value doesn't have to match anything) pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { - panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg)); + self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise(); } /// Emit `msg` attached to `sp`, without immediately stopping diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 11988a8f89d70..44a073545a730 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -455,7 +455,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { suggested_limit)); err.emit(); self.cx.trace_macros_diag(); - panic!(FatalError); + FatalError.raise(); } Some(result) diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 6b08448107a9f..ec4e6ced1b273 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -116,9 +116,10 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T while self.p.token != token::Eof { match panictry!(self.p.parse_item()) { Some(item) => ret.push(item), - None => panic!(self.p.diagnostic().span_fatal(self.p.span, + None => self.p.diagnostic().span_fatal(self.p.span, &format!("expected item, found `{}`", - self.p.this_token_to_string()))) + self.p.this_token_to_string())) + .raise() } } Some(ret) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 124477620c27f..3e3c1618fffb2 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -573,7 +573,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { Some(i) => token::NtItem(i), None => { p.fatal("expected an item keyword").emit(); - panic!(FatalError); + FatalError.raise(); } }, "block" => token::NtBlock(panictry!(p.parse_block())), @@ -581,7 +581,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { Some(s) => token::NtStmt(s), None => { p.fatal("expected a statement").emit(); - panic!(FatalError); + FatalError.raise(); } }, "pat" => token::NtPat(panictry!(p.parse_pat())), @@ -597,7 +597,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", &token_str[..])).emit(); - panic!(FatalError) + FatalError.raise() } }, "path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d86603e94e9df..9efb4faa63535 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -222,10 +222,10 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) Success(m) => m, Failure(sp, tok) => { let s = parse_failure_msg(tok); - panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); + sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } Error(sp, s) => { - panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); + sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } }; diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 5a7b53153fd61..8512e215ca765 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1954,7 +1954,7 @@ impl FeatureChecker { .span_note(ca_span, "`#![feature(custom_attribute)]` declared here") .emit(); - panic!(FatalError); + FatalError.raise(); } if let (Some(span), None) = (self.copy_closures, self.clone_closures) { @@ -1963,7 +1963,7 @@ impl FeatureChecker { .span_note(span, "`#![feature(copy_closures)]` declared here") .emit(); - panic!(FatalError); + FatalError.raise(); } } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 1e605ba3ecdfb..0f8fe57e380e5 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -193,6 +193,10 @@ pub trait Folder : Sized { noop_fold_macro_def(def, self) } + fn fold_label(&mut self, label: Label) -> Label { + noop_fold_label(label, self) + } + fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { noop_fold_lifetime(l, self) } @@ -696,6 +700,13 @@ pub fn noop_fold_generic_params<T: Folder>( params.move_map(|p| fld.fold_generic_param(p)) } +pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label { + Label { + ident: fld.fold_ident(label.ident), + span: fld.new_span(label.span), + } +} + pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime { Lifetime { id: fld.new_id(l.id), @@ -1206,30 +1217,26 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu folder.fold_block(tr), fl.map(|x| folder.fold_expr(x))) } - ExprKind::While(cond, body, opt_ident) => { + ExprKind::While(cond, body, opt_label) => { ExprKind::While(folder.fold_expr(cond), folder.fold_block(body), - opt_ident.map(|label| respan(folder.new_span(label.span), - folder.fold_ident(label.node)))) + opt_label.map(|label| folder.fold_label(label))) } - ExprKind::WhileLet(pat, expr, body, opt_ident) => { + ExprKind::WhileLet(pat, expr, body, opt_label) => { ExprKind::WhileLet(folder.fold_pat(pat), folder.fold_expr(expr), folder.fold_block(body), - opt_ident.map(|label| respan(folder.new_span(label.span), - folder.fold_ident(label.node)))) + opt_label.map(|label| folder.fold_label(label))) } - ExprKind::ForLoop(pat, iter, body, opt_ident) => { + ExprKind::ForLoop(pat, iter, body, opt_label) => { ExprKind::ForLoop(folder.fold_pat(pat), folder.fold_expr(iter), folder.fold_block(body), - opt_ident.map(|label| respan(folder.new_span(label.span), - folder.fold_ident(label.node)))) + opt_label.map(|label| folder.fold_label(label))) } - ExprKind::Loop(body, opt_ident) => { + ExprKind::Loop(body, opt_label) => { ExprKind::Loop(folder.fold_block(body), - opt_ident.map(|label| respan(folder.new_span(label.span), - folder.fold_ident(label.node)))) + opt_label.map(|label| folder.fold_label(label))) } ExprKind::Match(expr, arms) => { ExprKind::Match(folder.fold_expr(expr), @@ -1278,15 +1285,13 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu }); ExprKind::Path(qself, folder.fold_path(path)) } - ExprKind::Break(opt_ident, opt_expr) => { - ExprKind::Break(opt_ident.map(|label| respan(folder.new_span(label.span), - folder.fold_ident(label.node))), + ExprKind::Break(opt_label, opt_expr) => { + ExprKind::Break(opt_label.map(|label| folder.fold_label(label)), opt_expr.map(|e| folder.fold_expr(e))) } - ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label| - respan(folder.new_span(label.span), - folder.fold_ident(label.node))) - ), + ExprKind::Continue(opt_label) => { + ExprKind::Continue(opt_label.map(|label| folder.fold_label(label))) + } ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))), ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| { InlineAsm { diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index d7f7ff554db4b..3b4c5da10f20b 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -54,7 +54,7 @@ macro_rules! panictry { Ok(e) => e, Err(mut e) => { e.emit(); - panic!(FatalError); + FatalError.raise() } } }) diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 49362f0779921..63aa5d28ce8dc 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -265,7 +265,7 @@ fn read_block_comment(rdr: &mut StringReader, while level > 0 { debug!("=== block comment level {}", level); if rdr.is_eof() { - panic!(rdr.fatal("unterminated block comment")); + rdr.fatal("unterminated block comment").raise(); } if rdr.ch_is('\n') { trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 9828995362a35..b95c91548d00b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -90,7 +90,7 @@ impl<'a> StringReader<'a> { Ok(tok) => tok, Err(_) => { self.emit_fatal_errors(); - panic!(FatalError); + FatalError.raise(); } } } @@ -191,7 +191,7 @@ impl<'a> StringReader<'a> { let mut sr = StringReader::new_raw(sess, filemap); if sr.advance_token().is_err() { sr.emit_fatal_errors(); - panic!(FatalError); + FatalError.raise(); } sr } @@ -216,7 +216,7 @@ impl<'a> StringReader<'a> { if sr.advance_token().is_err() { sr.emit_fatal_errors(); - panic!(FatalError); + FatalError.raise(); } sr } @@ -647,7 +647,7 @@ impl<'a> StringReader<'a> { "unterminated block comment" }; let last_bpos = self.pos; - panic!(self.fatal_span_(start_bpos, last_bpos, msg)); + self.fatal_span_(start_bpos, last_bpos, msg).raise(); } let n = self.ch.unwrap(); match n { @@ -808,9 +808,9 @@ impl<'a> StringReader<'a> { for _ in 0..n_digits { if self.is_eof() { let last_bpos = self.pos; - panic!(self.fatal_span_(start_bpos, - last_bpos, - "unterminated numeric character escape")); + self.fatal_span_(start_bpos, + last_bpos, + "unterminated numeric character escape").raise(); } if self.ch_is(delim) { let last_bpos = self.pos; @@ -1025,9 +1025,9 @@ impl<'a> StringReader<'a> { } }, None => { - panic!(self.fatal_span_(start_bpos, - self.pos, - "unterminated unicode escape (found EOF)")); + self.fatal_span_(start_bpos, + self.pos, + "unterminated unicode escape (found EOF)").raise(); } } self.bump(); @@ -1283,9 +1283,9 @@ impl<'a> StringReader<'a> { // lifetimes shouldn't end with a single quote // if we find one, then this is an invalid character literal if self.ch_is('\'') { - panic!(self.fatal_span_verbose( - start_with_quote, self.next_pos, - String::from("character literal may only contain one codepoint"))); + self.fatal_span_verbose(start_with_quote, self.next_pos, + String::from("character literal may only contain one codepoint")) + .raise(); } @@ -1332,9 +1332,8 @@ impl<'a> StringReader<'a> { break; } } - panic!(self.fatal_span_verbose( - start_with_quote, pos, - String::from("character literal may only contain one codepoint"))); + self.fatal_span_verbose(start_with_quote, pos, + String::from("character literal may only contain one codepoint")).raise(); } let id = if valid { @@ -1364,9 +1363,9 @@ impl<'a> StringReader<'a> { while !self.ch_is('"') { if self.is_eof() { let last_bpos = self.pos; - panic!(self.fatal_span_(start_bpos, - last_bpos, - "unterminated double quote string")); + self.fatal_span_(start_bpos, + last_bpos, + "unterminated double quote string").raise(); } let ch_start = self.pos; @@ -1399,15 +1398,15 @@ impl<'a> StringReader<'a> { if self.is_eof() { let last_bpos = self.pos; - panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); + self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise(); } else if !self.ch_is('"') { let last_bpos = self.pos; let curr_char = self.ch.unwrap(); - panic!(self.fatal_span_char(start_bpos, - last_bpos, - "found invalid character; only `#` is allowed \ - in raw string delimitation", - curr_char)); + self.fatal_span_char(start_bpos, + last_bpos, + "found invalid character; only `#` is allowed \ + in raw string delimitation", + curr_char).raise(); } self.bump(); let content_start_bpos = self.pos; @@ -1416,7 +1415,7 @@ impl<'a> StringReader<'a> { 'outer: loop { if self.is_eof() { let last_bpos = self.pos; - panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); + self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise(); } // if self.ch_is('"') { // content_end_bpos = self.pos; @@ -1573,9 +1572,9 @@ impl<'a> StringReader<'a> { // character before position `start` are an // ascii single quote and ascii 'b'. let pos = self.pos; - panic!(self.fatal_span_verbose(start - BytePos(2), - pos, - "unterminated byte constant".to_string())); + self.fatal_span_verbose(start - BytePos(2), + pos, + "unterminated byte constant".to_string()).raise(); } let id = if valid { @@ -1599,7 +1598,7 @@ impl<'a> StringReader<'a> { while !self.ch_is('"') { if self.is_eof() { let pos = self.pos; - panic!(self.fatal_span_(start, pos, "unterminated double quote byte string")); + self.fatal_span_(start, pos, "unterminated double quote byte string").raise(); } let ch_start = self.pos; @@ -1631,15 +1630,15 @@ impl<'a> StringReader<'a> { if self.is_eof() { let pos = self.pos; - panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")); + self.fatal_span_(start_bpos, pos, "unterminated raw string").raise(); } else if !self.ch_is('"') { let pos = self.pos; let ch = self.ch.unwrap(); - panic!(self.fatal_span_char(start_bpos, + self.fatal_span_char(start_bpos, pos, "found invalid character; only `#` is allowed in raw \ string delimitation", - ch)); + ch).raise(); } self.bump(); let content_start_bpos = self.pos; @@ -1648,7 +1647,7 @@ impl<'a> StringReader<'a> { match self.ch { None => { let pos = self.pos; - panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")) + self.fatal_span_(start_bpos, pos, "unterminated raw string").raise() } Some('"') => { content_end_bpos = self.pos; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a9b1e4aaa6006..b671f81c2a84b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -212,8 +212,8 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) Err(e) => { let msg = format!("couldn't read {:?}: {}", path.display(), e); match spanopt { - Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)), - None => panic!(sess.span_diagnostic.fatal(&msg)) + Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(), + None => sess.span_diagnostic.fatal(&msg).raise() } } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8213d604b91b2..d393cab471850 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -23,7 +23,7 @@ use ast::{Field, FnDecl}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use ast::GenericParam; use ast::{Ident, ImplItem, IsAuto, Item, ItemKind}; -use ast::{Lifetime, LifetimeDef, Lit, LitKind, UintTy}; +use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy}; use ast::Local; use ast::MacStmtStyle; use ast::Mac_; @@ -1325,15 +1325,17 @@ impl<'a> Parser<'a> { self.check_keyword(keywords::Extern) } - fn get_label(&mut self) -> ast::Ident { - match self.token { + fn eat_label(&mut self) -> Option<Label> { + let ident = match self.token { token::Lifetime(ref ident) => *ident, token::Interpolated(ref nt) => match nt.0 { token::NtLifetime(lifetime) => lifetime.ident, - _ => self.bug("not a lifetime"), + _ => return None, }, - _ => self.bug("not a lifetime"), - } + _ => return None, + }; + self.bump(); + Some(Label { ident, span: self.prev_span }) } /// parse a TyKind::BareFn type: @@ -2317,11 +2319,8 @@ impl<'a> Parser<'a> { let lo = self.prev_span; return self.parse_while_expr(None, lo, attrs); } - if self.token.is_lifetime() { - let label = Spanned { node: self.get_label(), - span: self.span }; - let lo = self.span; - self.bump(); + if let Some(label) = self.eat_label() { + let lo = label.span; self.expect(&token::Colon)?; if self.eat_keyword(keywords::While) { return self.parse_while_expr(Some(label), lo, attrs) @@ -2339,16 +2338,8 @@ impl<'a> Parser<'a> { return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { - let ex = if self.token.is_lifetime() { - let ex = ExprKind::Continue(Some(Spanned{ - node: self.get_label(), - span: self.span - })); - self.bump(); - ex - } else { - ExprKind::Continue(None) - }; + let label = self.eat_label(); + let ex = ExprKind::Continue(label); let hi = self.prev_span; return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } @@ -2376,16 +2367,7 @@ impl<'a> Parser<'a> { ex = ExprKind::Ret(None); } } else if self.eat_keyword(keywords::Break) { - let lt = if self.token.is_lifetime() { - let spanned_lt = Spanned { - node: self.get_label(), - span: self.span - }; - self.bump(); - Some(spanned_lt) - } else { - None - }; + let label = self.eat_label(); let e = if self.token.can_begin_expr() && !(self.token == token::OpenDelim(token::Brace) && self.restrictions.contains( @@ -2394,7 +2376,7 @@ impl<'a> Parser<'a> { } else { None }; - ex = ExprKind::Break(lt, e); + ex = ExprKind::Break(label, e); hi = self.prev_span; } else if self.eat_keyword(keywords::Yield) { if self.token.can_begin_expr() { @@ -3291,7 +3273,7 @@ impl<'a> Parser<'a> { } /// Parse a 'for' .. 'in' expression ('for' token already eaten) - pub fn parse_for_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, + pub fn parse_for_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { // Parse: `for <src_pat> in <src_expr> <src_loop_block>` @@ -3309,25 +3291,25 @@ impl<'a> Parser<'a> { attrs.extend(iattrs); let hi = self.prev_span; - Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs)) + Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs)) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) - pub fn parse_while_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, + pub fn parse_while_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { if self.token.is_keyword(keywords::Let) { - return self.parse_while_let_expr(opt_ident, span_lo, attrs); + return self.parse_while_let_expr(opt_label, span_lo, attrs); } let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); - return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs)); + return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs)); } /// Parse a 'while let' expression ('while' token already eaten) - pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, + pub fn parse_while_let_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { self.expect_keyword(keywords::Let)?; @@ -3337,17 +3319,17 @@ impl<'a> Parser<'a> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); - return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); + return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_label), attrs)); } // parse `loop {...}`, `loop` token already eaten - pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, + pub fn parse_loop_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let span = span_lo.to(body.span); - Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs)) + Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs)) } /// Parse a `do catch {...}` expression (`do catch` token already eaten) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 345c592a01100..ae459c668aae4 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -2104,9 +2104,9 @@ impl<'a> State<'a> { ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => { self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?; } - ast::ExprKind::While(ref test, ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + ast::ExprKind::While(ref test, ref blk, opt_label) => { + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.word_space(":")?; } self.head("while")?; @@ -2114,9 +2114,9 @@ impl<'a> State<'a> { self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } - ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_label) => { + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.word_space(":")?; } self.head("while let")?; @@ -2127,9 +2127,9 @@ impl<'a> State<'a> { self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } - ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => { + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.word_space(":")?; } self.head("for")?; @@ -2140,9 +2140,9 @@ impl<'a> State<'a> { self.s.space()?; self.print_block_with_attrs(blk, attrs)?; } - ast::ExprKind::Loop(ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + ast::ExprKind::Loop(ref blk, opt_label) => { + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.word_space(":")?; } self.head("loop")?; @@ -2238,11 +2238,11 @@ impl<'a> State<'a> { ast::ExprKind::Path(Some(ref qself), ref path) => { self.print_qpath(path, qself, true)? } - ast::ExprKind::Break(opt_ident, ref opt_expr) => { + ast::ExprKind::Break(opt_label, ref opt_expr) => { self.s.word("break")?; self.s.space()?; - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.s.space()?; } if let Some(ref expr) = *opt_expr { @@ -2250,11 +2250,11 @@ impl<'a> State<'a> { self.s.space()?; } } - ast::ExprKind::Continue(opt_ident) => { + ast::ExprKind::Continue(opt_label) => { self.s.word("continue")?; self.s.space()?; - if let Some(ident) = opt_ident { - self.print_ident(ident.node)?; + if let Some(label) = opt_label { + self.print_ident(label.ident)?; self.s.space()? } } diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index eff7dd57f08af..e73550d0719a4 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -123,7 +123,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { match i.node { ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => { let diag = self.cx.span_diagnostic; - panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests")); + diag.span_fatal(i.span, "unsafe functions cannot be used for tests").raise(); } _ => { debug!("this is a test function"); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 8aeacf79cee7d..bbb123dab2868 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -101,6 +101,9 @@ pub trait Visitor<'ast>: Sized { fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) { walk_variant(self, v, g, item_id) } + fn visit_label(&mut self, label: &'ast Label) { + walk_label(self, label) + } fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { walk_lifetime(self, lifetime) } @@ -163,25 +166,6 @@ macro_rules! walk_list { } } -pub fn walk_opt_name<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_name: Option<Name>) { - if let Some(name) = opt_name { - visitor.visit_name(span, name); - } -} - -pub fn walk_opt_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) { - if let Some(ident) = opt_ident { - visitor.visit_ident(span, ident); - } -} - -pub fn walk_opt_sp_ident<'a, V: Visitor<'a>>(visitor: &mut V, - opt_sp_ident: &Option<Spanned<Ident>>) { - if let Some(ref sp_ident) = *opt_sp_ident { - visitor.visit_ident(sp_ident.span, sp_ident.node); - } -} - pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) { visitor.visit_name(span, ident.name); } @@ -204,6 +188,10 @@ pub fn walk_local<'a, V: Visitor<'a>>(visitor: &mut V, local: &'a Local) { walk_list!(visitor, visit_expr, &local.init); } +pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) { + visitor.visit_ident(label.span, label.ident); +} + pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) { visitor.visit_ident(lifetime.span, lifetime.ident); } @@ -226,7 +214,9 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) { visitor.visit_ident(item.span, item.ident); match item.node { ItemKind::ExternCrate(opt_name) => { - walk_opt_name(visitor, item.span, opt_name) + if let Some(name) = opt_name { + visitor.visit_name(item.span, name); + } } ItemKind::Use(ref use_tree) => { visitor.visit_use_tree(use_tree, item.id, false) @@ -622,7 +612,9 @@ pub fn walk_struct_def<'a, V: Visitor<'a>>(visitor: &mut V, struct_definition: & pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) { visitor.visit_vis(&struct_field.vis); - walk_opt_ident(visitor, struct_field.span, struct_field.ident); + if let Some(ident) = struct_field.ident { + visitor.visit_ident(struct_field.span, ident); + } visitor.visit_ty(&struct_field.ty); walk_list!(visitor, visit_attribute, &struct_field.attrs); } @@ -708,10 +700,10 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { visitor.visit_block(if_block); walk_list!(visitor, visit_expr, optional_else); } - ExprKind::While(ref subexpression, ref block, ref opt_sp_ident) => { + ExprKind::While(ref subexpression, ref block, ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_expr(subexpression); visitor.visit_block(block); - walk_opt_sp_ident(visitor, opt_sp_ident); } ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => { visitor.visit_pat(pattern); @@ -719,21 +711,21 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { visitor.visit_block(if_block); walk_list!(visitor, visit_expr, optional_else); } - ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => { + ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_pat(pattern); visitor.visit_expr(subexpression); visitor.visit_block(block); - walk_opt_sp_ident(visitor, opt_sp_ident); } - ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => { + ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_pat(pattern); visitor.visit_expr(subexpression); visitor.visit_block(block); - walk_opt_sp_ident(visitor, opt_sp_ident); } - ExprKind::Loop(ref block, ref opt_sp_ident) => { + ExprKind::Loop(ref block, ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); visitor.visit_block(block); - walk_opt_sp_ident(visitor, opt_sp_ident); } ExprKind::Match(ref subexpression, ref arms) => { visitor.visit_expr(subexpression); @@ -775,12 +767,12 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { } visitor.visit_path(path, expression.id) } - ExprKind::Break(ref opt_sp_ident, ref opt_expr) => { - walk_opt_sp_ident(visitor, opt_sp_ident); + ExprKind::Break(ref opt_label, ref opt_expr) => { + walk_list!(visitor, visit_label, opt_label); walk_list!(visitor, visit_expr, opt_expr); } - ExprKind::Continue(ref opt_sp_ident) => { - walk_opt_sp_ident(visitor, opt_sp_ident); + ExprKind::Continue(ref opt_label) => { + walk_list!(visitor, visit_label, opt_label); } ExprKind::Ret(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index f375847e705bc..22e78e9b426b2 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -92,7 +92,7 @@ impl MultiItemModifier for ProcMacroDerive { } err.emit(); - panic!(FatalError); + FatalError.raise(); } }; @@ -103,13 +103,13 @@ impl MultiItemModifier for ProcMacroDerive { // fail if there have been errors emitted Ok(_) if ecx.parse_sess.span_diagnostic.err_count() > error_count_before => { ecx.struct_span_fatal(span, msg).emit(); - panic!(FatalError); + FatalError.raise(); } Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(), Err(_) => { // FIXME: handle this better ecx.struct_span_fatal(span, msg).emit(); - panic!(FatalError); + FatalError.raise(); } } }) diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs index 5fcedbf50c60f..12400e363f4b0 100644 --- a/src/libsyntax_ext/proc_macro_impl.rs +++ b/src/libsyntax_ext/proc_macro_impl.rs @@ -51,7 +51,7 @@ impl base::AttrProcMacro for AttrProcMacro { } err.emit(); - panic!(FatalError); + FatalError.raise(); } } } @@ -86,7 +86,7 @@ impl base::ProcMacro for BangProcMacro { } err.emit(); - panic!(FatalError); + FatalError.raise(); } } } diff --git a/src/llvm b/src/llvm index 2717444753318..bc344d5bc23c6 160000 --- a/src/llvm +++ b/src/llvm @@ -1 +1 @@ -Subproject commit 2717444753318e461e0c3b30dacd03ffbac96903 +Subproject commit bc344d5bc23c61ff9baf82d268a0edf199933cc3 diff --git a/src/rustllvm/ArchiveWrapper.cpp b/src/rustllvm/ArchiveWrapper.cpp index b110013ceaed3..93157cd681942 100644 --- a/src/rustllvm/ArchiveWrapper.cpp +++ b/src/rustllvm/ArchiveWrapper.cpp @@ -42,7 +42,6 @@ struct RustArchiveIterator { enum class LLVMRustArchiveKind { Other, GNU, - MIPS64, BSD, COFF, }; @@ -51,8 +50,6 @@ static Archive::Kind fromRust(LLVMRustArchiveKind Kind) { switch (Kind) { case LLVMRustArchiveKind::GNU: return Archive::K_GNU; - case LLVMRustArchiveKind::MIPS64: - return Archive::K_MIPS64; case LLVMRustArchiveKind::BSD: return Archive::K_BSD; case LLVMRustArchiveKind::COFF: @@ -235,9 +232,16 @@ LLVMRustWriteArchive(char *Dst, size_t NumMembers, Members.push_back(std::move(*MOrErr)); } } - auto Pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false); - if (!Pair.second) + auto Result = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false); +#if LLVM_VERSION_GE(6, 0) + if (!Result) return LLVMRustResult::Success; - LLVMRustSetLastError(Pair.second.message().c_str()); + LLVMRustSetLastError(toString(std::move(Result)).c_str()); +#else + if (!Result.second) + return LLVMRustResult::Success; + LLVMRustSetLastError(Result.second.message().c_str()); +#endif + return LLVMRustResult::Failure; } diff --git a/src/rustllvm/PassWrapper.cpp b/src/rustllvm/PassWrapper.cpp index 54a73a04bfa9e..b2f1229891d26 100644 --- a/src/rustllvm/PassWrapper.cpp +++ b/src/rustllvm/PassWrapper.cpp @@ -23,9 +23,15 @@ #include "llvm/Support/FileSystem.h" #include "llvm/Support/Host.h" #include "llvm/Target/TargetMachine.h" -#include "llvm/Target/TargetSubtargetInfo.h" #include "llvm/Transforms/IPO/PassManagerBuilder.h" +#if LLVM_VERSION_GE(6, 0) +#include "llvm/CodeGen/TargetSubtargetInfo.h" +#include "llvm/IR/IntrinsicInst.h" +#else +#include "llvm/Target/TargetSubtargetInfo.h" +#endif + #if LLVM_VERSION_GE(4, 0) #include "llvm/Transforms/IPO/AlwaysInliner.h" #include "llvm/Transforms/IPO/FunctionImport.h" @@ -210,20 +216,15 @@ extern "C" bool LLVMRustHasFeature(LLVMTargetMachineRef TM, enum class LLVMRustCodeModel { Other, - Default, - JITDefault, Small, Kernel, Medium, Large, + None, }; static CodeModel::Model fromRust(LLVMRustCodeModel Model) { switch (Model) { - case LLVMRustCodeModel::Default: - return CodeModel::Default; - case LLVMRustCodeModel::JITDefault: - return CodeModel::JITDefault; case LLVMRustCodeModel::Small: return CodeModel::Small; case LLVMRustCodeModel::Kernel: @@ -360,7 +361,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( bool TrapUnreachable, bool Singlethread) { - auto CM = fromRust(RustCM); auto OptLevel = fromRust(RustOptLevel); auto RM = fromRust(RustReloc); @@ -399,6 +399,13 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine( Options.ThreadModel = ThreadModel::Single; } +#if LLVM_VERSION_GE(6, 0) + Optional<CodeModel::Model> CM; +#else + CodeModel::Model CM = CodeModel::Model::Default; +#endif + if (RustCM != LLVMRustCodeModel::None) + CM = fromRust(RustCM); TargetMachine *TM = TheTarget->createTargetMachine( Trip.getTriple(), RealCPU, Feature, Options, RM, CM, OptLevel); return wrap(TM); diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index 0fe533d447bc3..611d63f6a4d14 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -315,7 +315,11 @@ extern "C" void LLVMRustRemoveFunctionAttributes(LLVMValueRef Fn, // enable fpmath flag UnsafeAlgebra extern "C" void LLVMRustSetHasUnsafeAlgebra(LLVMValueRef V) { if (auto I = dyn_cast<Instruction>(unwrap<Value>(V))) { +#if LLVM_VERSION_GE(6, 0) + I->setFast(true); +#else I->setHasUnsafeAlgebra(true); +#endif } } @@ -457,9 +461,13 @@ enum class LLVMRustDIFlags : uint32_t { FlagStaticMember = (1 << 12), FlagLValueReference = (1 << 13), FlagRValueReference = (1 << 14), - FlagMainSubprogram = (1 << 21), + FlagExternalTypeRef = (1 << 15), + FlagIntroducedVirtual = (1 << 18), + FlagBitField = (1 << 19), + FlagNoReturn = (1 << 20), + FlagMainSubprogram = (1 << 21), // Do not add values that are not supported by the minimum LLVM - // version we support! + // version we support! see llvm/include/llvm/IR/DebugInfoFlags.def }; inline LLVMRustDIFlags operator&(LLVMRustDIFlags A, LLVMRustDIFlags B) { @@ -544,7 +552,19 @@ static unsigned fromRust(LLVMRustDIFlags Flags) { if (isSet(Flags & LLVMRustDIFlags::FlagRValueReference)) { Result |= DINode::DIFlags::FlagRValueReference; } + if (isSet(Flags & LLVMRustDIFlags::FlagExternalTypeRef)) { + Result |= DINode::DIFlags::FlagExternalTypeRef; + } + if (isSet(Flags & LLVMRustDIFlags::FlagIntroducedVirtual)) { + Result |= DINode::DIFlags::FlagIntroducedVirtual; + } + if (isSet(Flags & LLVMRustDIFlags::FlagBitField)) { + Result |= DINode::DIFlags::FlagBitField; + } #if LLVM_RUSTLLVM || LLVM_VERSION_GE(4, 0) + if (isSet(Flags & LLVMRustDIFlags::FlagNoReturn)) { + Result |= DINode::DIFlags::FlagNoReturn; + } if (isSet(Flags & LLVMRustDIFlags::FlagMainSubprogram)) { Result |= DINode::DIFlags::FlagMainSubprogram; } diff --git a/src/rustllvm/llvm-rebuild-trigger b/src/rustllvm/llvm-rebuild-trigger index 7f54d9276bfc7..2635ca73303e7 100644 --- a/src/rustllvm/llvm-rebuild-trigger +++ b/src/rustllvm/llvm-rebuild-trigger @@ -1,4 +1,4 @@ # If this file is modified, then llvm will be (optionally) cleaned and then rebuilt. # The actual contents of this file do not matter, but to trigger a change on the # build bots then the contents should be changed so git updates the mtime. -2017-11-08 +2018-01-25 diff --git a/src/test/codegen/noreturnflag.rs b/src/test/codegen/noreturnflag.rs new file mode 100644 index 0000000000000..24a5a4e44cb29 --- /dev/null +++ b/src/test/codegen/noreturnflag.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// min-llvm-version 4.0 + +// compile-flags: -g -C no-prepopulate-passes + +// CHECK: {{.*}}DISubprogram{{.*}}name: "foo"{{.*}}DIFlagNoReturn + +fn foo() -> ! { + loop {} +} + +pub fn main() { + foo(); +} diff --git a/src/test/codegen/x86_mmx.rs b/src/test/codegen/x86_mmx.rs index bedda63bbff31..dc9f63c35db2e 100644 --- a/src/test/codegen/x86_mmx.rs +++ b/src/test/codegen/x86_mmx.rs @@ -22,9 +22,7 @@ pub struct i8x8(u64); #[no_mangle] pub fn a(a: &mut i8x8, b: i8x8) -> i8x8 { - // CHECK-LABEL: define x86_mmx @a(x86_mmx*{{.*}}, x86_mmx{{.*}}) - // CHECK: store x86_mmx %b, x86_mmx* %a - // CHECK: ret x86_mmx %b + // CHECK-LABEL: define void @a(x86_mmx*{{.*}}, x86_mmx*{{.*}}, x86_mmx*{{.*}}) *a = b; return b } diff --git a/src/test/compile-fail/E0617.rs b/src/test/compile-fail/E0617.rs index 7b769ff4ae2e0..9375fd9cade35 100644 --- a/src/test/compile-fail/E0617.rs +++ b/src/test/compile-fail/E0617.rs @@ -17,16 +17,22 @@ extern { fn main() { unsafe { printf(::std::ptr::null(), 0f32); - //~^ ERROR can't pass `f32` to variadic function, cast to `c_double` [E0617] + //~^ ERROR can't pass `f32` to variadic function + //~| HELP cast the value to `c_double` printf(::std::ptr::null(), 0i8); - //~^ ERROR can't pass `i8` to variadic function, cast to `c_int` [E0617] + //~^ ERROR can't pass `i8` to variadic function + //~| HELP cast the value to `c_int` printf(::std::ptr::null(), 0i16); - //~^ ERROR can't pass `i16` to variadic function, cast to `c_int` [E0617] + //~^ ERROR can't pass `i16` to variadic function + //~| HELP cast the value to `c_int` printf(::std::ptr::null(), 0u8); - //~^ ERROR can't pass `u8` to variadic function, cast to `c_uint` [E0617] + //~^ ERROR can't pass `u8` to variadic function + //~| HELP cast the value to `c_uint` printf(::std::ptr::null(), 0u16); - //~^ ERROR can't pass `u16` to variadic function, cast to `c_uint` [E0617] + //~^ ERROR can't pass `u16` to variadic function + //~| HELP cast the value to `c_uint` printf(::std::ptr::null(), printf); - //~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function, cast to `unsafe extern "C" fn(*const i8, ...)` [E0617] + //~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function + //~| HELP cast the value to `unsafe extern "C" fn(*const i8, ...)` } } diff --git a/src/test/compile-fail/dep-graph-variance-alias.rs b/src/test/compile-fail/dep-graph-variance-alias.rs index 9b621a13fc484..18cfd00180428 100644 --- a/src/test/compile-fail/dep-graph-variance-alias.rs +++ b/src/test/compile-fail/dep-graph-variance-alias.rs @@ -23,7 +23,7 @@ struct Foo<T> { f: T } -#[rustc_if_this_changed] +#[rustc_if_this_changed(Krate)] type TypeAlias<T> = Foo<T>; #[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK diff --git a/src/test/compile-fail/empty-never-array.rs b/src/test/compile-fail/empty-never-array.rs new file mode 100644 index 0000000000000..53b24e1731932 --- /dev/null +++ b/src/test/compile-fail/empty-never-array.rs @@ -0,0 +1,27 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(never_type)] + +enum Helper<T, U> { + T(T, [!; 0]), + #[allow(dead_code)] + U(U), +} + +fn transmute<T, U>(t: T) -> U { + let Helper::U(u) = Helper::T(t, []); + //~^ ERROR refutable pattern in local binding: `T(_, _)` not covered + u +} + +fn main() { + println!("{:?}", transmute::<&str, (*const u8, u64)>("type safety")); +} diff --git a/src/test/compile-fail/impl-trait/infinite-impl-trait-issue-38064.rs b/src/test/compile-fail/impl-trait/infinite-impl-trait-issue-38064.rs new file mode 100644 index 0000000000000..abde9689bd6b6 --- /dev/null +++ b/src/test/compile-fail/impl-trait/infinite-impl-trait-issue-38064.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that attempts to construct infinite types via impl trait fail +// in a graceful way. +// +// Regression test for #38064. + +// error-pattern:overflow evaluating the requirement `impl Quux` + +#![feature(conservative_impl_trait)] + +trait Quux {} + +fn foo() -> impl Quux { + struct Foo<T>(T); + impl<T> Quux for Foo<T> {} + Foo(bar()) +} + +fn bar() -> impl Quux { + struct Bar<T>(T); + impl<T> Quux for Bar<T> {} + Bar(foo()) +} + +// effectively: +// struct Foo(Bar); +// struct Bar(Foo); +// should produce an error about infinite size + +fn main() { foo(); } diff --git a/src/test/compile-fail/issue-32201.rs b/src/test/compile-fail/issue-32201.rs index bcc53df68a323..bf9f8ecbc8097 100644 --- a/src/test/compile-fail/issue-32201.rs +++ b/src/test/compile-fail/issue-32201.rs @@ -17,6 +17,7 @@ fn bar(_: *const u8) {} fn main() { unsafe { foo(0, bar); - //~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function, cast to `fn(*const u8)` + //~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function + //~| HELP cast the value to `fn(*const u8)` } } diff --git a/src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs b/src/test/compile-fail/panic-runtime/auxiliary/depends.rs similarity index 100% rename from src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs rename to src/test/compile-fail/panic-runtime/auxiliary/depends.rs diff --git a/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs b/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs index 0681f991067b1..7cfdacbd983c1 100644 --- a/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs +++ b/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs @@ -9,7 +9,7 @@ // except according to those terms. // aux-build:needs-panic-runtime.rs -// aux-build:runtime-depending-on-panic-runtime.rs +// aux-build:depends.rs // error-pattern:cannot depend on a crate that needs a panic runtime -extern crate runtime_depending_on_panic_runtime; +extern crate depends; diff --git a/src/test/run-make/codegen-options-parsing/Makefile b/src/test/run-make/codegen-options-parsing/Makefile index 81e06043c87ae..fda96a8b1fb5e 100644 --- a/src/test/run-make/codegen-options-parsing/Makefile +++ b/src/test/run-make/codegen-options-parsing/Makefile @@ -16,11 +16,11 @@ all: $(RUSTC) -C extra-filename=foo dummy.rs 2>&1 #Option taking no argument $(RUSTC) -C lto= dummy.rs 2>&1 | \ - $(CGREP) 'codegen option `lto` takes no value' + $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or' $(RUSTC) -C lto=1 dummy.rs 2>&1 | \ - $(CGREP) 'codegen option `lto` takes no value' + $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or' $(RUSTC) -C lto=foo dummy.rs 2>&1 | \ - $(CGREP) 'codegen option `lto` takes no value' + $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or' $(RUSTC) -C lto dummy.rs # Should not link dead code... diff --git a/src/test/run-make/no-integrated-as/Makefile b/src/test/run-make/no-integrated-as/Makefile new file mode 100644 index 0000000000000..78e3025b99ad4 --- /dev/null +++ b/src/test/run-make/no-integrated-as/Makefile @@ -0,0 +1,7 @@ +-include ../tools.mk + +all: +ifeq ($(TARGET),x86_64-unknown-linux-gnu) + $(RUSTC) hello.rs -C no_integrated_as + $(call RUN,hello) +endif diff --git a/src/test/run-make/no-integrated-as/hello.rs b/src/test/run-make/no-integrated-as/hello.rs new file mode 100644 index 0000000000000..68e7f6d94d139 --- /dev/null +++ b/src/test/run-make/no-integrated-as/hello.rs @@ -0,0 +1,13 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + println!("Hello, world!"); +} diff --git a/src/test/run-pass/align-with-extern-c-fn.rs b/src/test/run-pass/align-with-extern-c-fn.rs index db25960df4c87..15e3b4b03eb27 100644 --- a/src/test/run-pass/align-with-extern-c-fn.rs +++ b/src/test/run-pass/align-with-extern-c-fn.rs @@ -14,10 +14,10 @@ #![feature(attr_literals)] #[repr(align(16))] -pub struct A { - y: i64, -} +pub struct A(i64); pub extern "C" fn foo(x: A) {} -fn main() {} +fn main() { + foo(A(0)); +} diff --git a/src/test/run-pass/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs b/src/test/run-pass/auxiliary/trait_xc_call_aux.rs similarity index 100% rename from src/test/run-pass/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs rename to src/test/run-pass/auxiliary/trait_xc_call_aux.rs diff --git a/src/test/run-pass/conditional-compile-arch.rs b/src/test/run-pass/conditional-compile-arch.rs index 3d8bf9333fd2a..1efbfb927fa75 100644 --- a/src/test/run-pass/conditional-compile-arch.rs +++ b/src/test/run-pass/conditional-compile-arch.rs @@ -28,6 +28,9 @@ pub fn main() { } #[cfg(target_arch = "mips64")] pub fn main() { } +#[cfg(target_arch = "powerpc")] +pub fn main() { } + #[cfg(target_arch = "powerpc64")] pub fn main() { } diff --git a/src/test/run-pass/fat-lto.rs b/src/test/run-pass/fat-lto.rs new file mode 100644 index 0000000000000..453eede261cc0 --- /dev/null +++ b/src/test/run-pass/fat-lto.rs @@ -0,0 +1,17 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Clto=fat +// no-prefer-dynamic + +fn main() { + println!("hello!"); +} + diff --git a/src/test/run-pass/intrinsic-alignment.rs b/src/test/run-pass/intrinsic-alignment.rs index c7aa78e768f48..5a6a6e0163ede 100644 --- a/src/test/run-pass/intrinsic-alignment.rs +++ b/src/test/run-pass/intrinsic-alignment.rs @@ -19,7 +19,8 @@ mod rusti { } } -#[cfg(any(target_os = "cloudabi", +#[cfg(any(target_os = "android", + target_os = "cloudabi", target_os = "dragonfly", target_os = "emscripten", target_os = "freebsd", @@ -80,15 +81,3 @@ mod m { } } } - -#[cfg(target_os = "android")] -mod m { - #[main] - #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] - pub fn main() { - unsafe { - assert_eq!(::rusti::pref_align_of::<u64>(), 8); - assert_eq!(::rusti::min_align_of::<u64>(), 8); - } - } -} diff --git a/src/test/run-pass/issue-38763.rs b/src/test/run-pass/issue-38763.rs new file mode 100644 index 0000000000000..01cc8265a399f --- /dev/null +++ b/src/test/run-pass/issue-38763.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-emscripten + +#![feature(i128_type)] + +#[repr(C)] +pub struct Foo(i128); + +#[no_mangle] +pub extern "C" fn foo(x: Foo) -> Foo { x } + +fn main() { + foo(Foo(1)); +} diff --git a/src/test/run-pass/issue-47364.rs b/src/test/run-pass/issue-47364.rs new file mode 100644 index 0000000000000..2847ac2a0ba43 --- /dev/null +++ b/src/test/run-pass/issue-47364.rs @@ -0,0 +1,66 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -C codegen-units=8 -O + +fn main() { + nom_sql::selection(b"x "); +} + +pub enum Err<P>{ + Position(P), + NodePosition(u32), +} + +pub enum IResult<I,O> { + Done(I,O), + Error(Err<I>), + Incomplete(u32, u64) +} + +pub fn multispace<T: Copy>(input: T) -> ::IResult<i8, i8> { + ::IResult::Done(0, 0) +} + +mod nom_sql { + fn where_clause(i: &[u8]) -> ::IResult<&[u8], Option<String>> { + let X = match ::multispace(i) { + ::IResult::Done(..) => ::IResult::Done(i, None::<String>), + _ => ::IResult::Error(::Err::NodePosition(0)), + }; + match X { + ::IResult::Done(_, _) => ::IResult::Done(i, None), + _ => X + } + } + + pub fn selection(i: &[u8]) { + let Y = match { + match { + where_clause(i) + } { + ::IResult::Done(_, o) => ::IResult::Done(i, Some(o)), + ::IResult::Error(_) => ::IResult::Done(i, None), + _ => ::IResult::Incomplete(0, 0), + } + } { + ::IResult::Done(z, _) => ::IResult::Done(z, None::<String>), + _ => return () + }; + match Y { + ::IResult::Done(x, _) => { + let bytes = b"; "; + let len = x.len(); + bytes[len]; + } + _ => () + } + } +} diff --git a/src/test/run-pass/issue-47673.rs b/src/test/run-pass/issue-47673.rs new file mode 100644 index 0000000000000..92f54a44f63c9 --- /dev/null +++ b/src/test/run-pass/issue-47673.rs @@ -0,0 +1,16 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(use_nested_groups)] +#![allow(unused_import)] + +use {{}, {}}; + +fn main() {} diff --git a/src/test/run-pass/rec-align-u64.rs b/src/test/run-pass/rec-align-u64.rs index 3ff8961166d96..85c1b2adb79ec 100644 --- a/src/test/run-pass/rec-align-u64.rs +++ b/src/test/run-pass/rec-align-u64.rs @@ -38,7 +38,8 @@ struct Outer { } -#[cfg(any(target_os = "cloudabi", +#[cfg(any(target_os = "android", + target_os = "cloudabi", target_os = "dragonfly", target_os = "emscripten", target_os = "freebsd", @@ -85,15 +86,6 @@ mod m { } } -#[cfg(target_os = "android")] -mod m { - #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] - pub mod m { - pub fn align() -> usize { 8 } - pub fn size() -> usize { 16 } - } -} - pub fn main() { unsafe { let x = Outer {c8: 22, t: Inner {c64: 44}}; diff --git a/src/test/run-pass/signal-alternate-stack-cleanup.rs b/src/test/run-pass/signal-alternate-stack-cleanup.rs index 508fbe80ca4be..eae431aa4044d 100644 --- a/src/test/run-pass/signal-alternate-stack-cleanup.rs +++ b/src/test/run-pass/signal-alternate-stack-cleanup.rs @@ -35,7 +35,7 @@ fn main() { unsafe { // Install signal hander that runs on alternate signal stack. let mut action: sigaction = std::mem::zeroed(); - action.sa_flags = SA_SIGINFO | SA_ONSTACK; + action.sa_flags = (SA_ONSTACK | SA_SIGINFO) as _; action.sa_sigaction = signal_handler as sighandler_t; sigaction(SIGWINCH, &action, std::ptr::null_mut()); diff --git a/src/test/run-pass/simd-target-feature-mixup.rs b/src/test/run-pass/simd-target-feature-mixup.rs new file mode 100644 index 0000000000000..2c9ef59709dbf --- /dev/null +++ b/src/test/run-pass/simd-target-feature-mixup.rs @@ -0,0 +1,188 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-emscripten + +#![feature(repr_simd, target_feature, cfg_target_feature)] + +use std::process::{Command, ExitStatus}; +use std::env; + +fn main() { + if let Some(level) = env::args().nth(1) { + return test::main(&level) + } + + let me = env::current_exe().unwrap(); + for level in ["sse", "avx", "avx512"].iter() { + let status = Command::new(&me).arg(level).status().unwrap(); + if status.success() { + println!("success with {}", level); + continue + } + + // We don't actually know if our computer has the requisite target features + // for the test below. Testing for that will get added to libstd later so + // for now just asume sigill means this is a machine that can't run this test. + if is_sigill(status) { + println!("sigill with {}, assuming spurious", level); + continue + } + panic!("invalid status at {}: {}", level, status); + } +} + +#[cfg(unix)] +fn is_sigill(status: ExitStatus) -> bool { + use std::os::unix::prelude::*; + status.signal() == Some(4) +} + +#[cfg(windows)] +fn is_sigill(status: ExitStatus) -> bool { + status.code() == Some(0xc000001d) +} + +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +#[allow(bad_style)] +mod test { + // An SSE type + #[repr(simd)] + #[derive(PartialEq, Debug, Clone, Copy)] + struct __m128i(u64, u64); + + // An AVX type + #[repr(simd)] + #[derive(PartialEq, Debug, Clone, Copy)] + struct __m256i(u64, u64, u64, u64); + + // An AVX-512 type + #[repr(simd)] + #[derive(PartialEq, Debug, Clone, Copy)] + struct __m512i(u64, u64, u64, u64, u64, u64, u64, u64); + + pub fn main(level: &str) { + unsafe { + main_normal(level); + main_sse(level); + if level == "sse" { + return + } + main_avx(level); + if level == "avx" { + return + } + main_avx512(level); + } + } + + macro_rules! mains { + ($( + $(#[$attr:meta])* + unsafe fn $main:ident(level: &str) { + ... + } + )*) => ($( + $(#[$attr])* + unsafe fn $main(level: &str) { + let m128 = __m128i(1, 2); + let m256 = __m256i(3, 4, 5, 6); + let m512 = __m512i(7, 8, 9, 10, 11, 12, 13, 14); + assert_eq!(id_sse_128(m128), m128); + assert_eq!(id_sse_256(m256), m256); + assert_eq!(id_sse_512(m512), m512); + + if level == "sse" { + return + } + assert_eq!(id_avx_128(m128), m128); + assert_eq!(id_avx_256(m256), m256); + assert_eq!(id_avx_512(m512), m512); + + if level == "avx" { + return + } + assert_eq!(id_avx512_128(m128), m128); + assert_eq!(id_avx512_256(m256), m256); + assert_eq!(id_avx512_512(m512), m512); + } + )*) + } + + mains! { + unsafe fn main_normal(level: &str) { ... } + #[target_feature(enable = "sse2")] + unsafe fn main_sse(level: &str) { ... } + #[target_feature(enable = "avx")] + unsafe fn main_avx(level: &str) { ... } + #[target_feature(enable = "avx512bw")] + unsafe fn main_avx512(level: &str) { ... } + } + + + #[target_feature(enable = "sse2")] + unsafe fn id_sse_128(a: __m128i) -> __m128i { + assert_eq!(a, __m128i(1, 2)); + a.clone() + } + + #[target_feature(enable = "sse2")] + unsafe fn id_sse_256(a: __m256i) -> __m256i { + assert_eq!(a, __m256i(3, 4, 5, 6)); + a.clone() + } + + #[target_feature(enable = "sse2")] + unsafe fn id_sse_512(a: __m512i) -> __m512i { + assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14)); + a.clone() + } + + #[target_feature(enable = "avx")] + unsafe fn id_avx_128(a: __m128i) -> __m128i { + assert_eq!(a, __m128i(1, 2)); + a.clone() + } + + #[target_feature(enable = "avx")] + unsafe fn id_avx_256(a: __m256i) -> __m256i { + assert_eq!(a, __m256i(3, 4, 5, 6)); + a.clone() + } + + #[target_feature(enable = "avx")] + unsafe fn id_avx_512(a: __m512i) -> __m512i { + assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14)); + a.clone() + } + + #[target_feature(enable = "avx512bw")] + unsafe fn id_avx512_128(a: __m128i) -> __m128i { + assert_eq!(a, __m128i(1, 2)); + a.clone() + } + + #[target_feature(enable = "avx512bw")] + unsafe fn id_avx512_256(a: __m256i) -> __m256i { + assert_eq!(a, __m256i(3, 4, 5, 6)); + a.clone() + } + + #[target_feature(enable = "avx512bw")] + unsafe fn id_avx512_512(a: __m512i) -> __m512i { + assert_eq!(a, __m512i(7, 8, 9, 10, 11, 12, 13, 14)); + a.clone() + } +} + +#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))] +mod test { + pub fn main(level: &str) {} +} diff --git a/src/test/run-pass/thinlto/all-crates.rs b/src/test/run-pass/thinlto/all-crates.rs new file mode 100644 index 0000000000000..772a9ec8293e4 --- /dev/null +++ b/src/test/run-pass/thinlto/all-crates.rs @@ -0,0 +1,17 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Clto=thin +// no-prefer-dynamic +// min-llvm-version 4.0 + +fn main() { + println!("hello!"); +} diff --git a/src/test/run-pass/thinlto/thin-lto-inlines2.rs b/src/test/run-pass/thinlto/thin-lto-inlines2.rs index 0e8ad08a5f680..6020f72415dad 100644 --- a/src/test/run-pass/thinlto/thin-lto-inlines2.rs +++ b/src/test/run-pass/thinlto/thin-lto-inlines2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z thinlto -C codegen-units=8 -O -C lto +// compile-flags: -C codegen-units=8 -O -C lto=thin // aux-build:thin-lto-inlines-aux.rs // min-llvm-version 4.0 // no-prefer-dynamic diff --git a/src/test/run-pass/trait-inheritance-cross-trait-call-xc.rs b/src/test/run-pass/trait-inheritance-cross-trait-call-xc.rs index e1610c1db07fd..3e68bea57c660 100644 --- a/src/test/run-pass/trait-inheritance-cross-trait-call-xc.rs +++ b/src/test/run-pass/trait-inheritance-cross-trait-call-xc.rs @@ -8,10 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// aux-build:trait_inheritance_cross_trait_call_xc_aux.rs +// aux-build:trait_xc_call_aux.rs -extern crate trait_inheritance_cross_trait_call_xc_aux as aux; +extern crate trait_xc_call_aux as aux; use aux::Foo; diff --git a/src/test/rustdoc-js/from_u.js b/src/test/rustdoc-js/from_u.js index 0296788f7a0f5..34b98a758df9d 100644 --- a/src/test/rustdoc-js/from_u.js +++ b/src/test/rustdoc-js/from_u.js @@ -15,7 +15,5 @@ const EXPECTED = { { 'path': 'std::char', 'name': 'from_u32' }, { 'path': 'std::str', 'name': 'from_utf8' }, { 'path': 'std::string::String', 'name': 'from_utf8' }, - { 'path': 'std::i32', 'name': 'from_unsigned' }, - { 'path': 'std::i128', 'name': 'from_unsigned' }, ], }; diff --git a/src/test/rustdoc/auxiliary/masked.rs b/src/test/rustdoc/auxiliary/masked.rs new file mode 100644 index 0000000000000..e0d53a72220f9 --- /dev/null +++ b/src/test/rustdoc/auxiliary/masked.rs @@ -0,0 +1,20 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[derive(Clone)] +pub struct MaskedStruct; + +pub trait MaskedTrait { + fn masked_method(); +} + +impl MaskedTrait for String { + fn masked_method() {} +} diff --git a/src/test/rustdoc/intra-links.rs b/src/test/rustdoc/intra-links.rs index aa6f553875441..4726323e11cef 100644 --- a/src/test/rustdoc/intra-links.rs +++ b/src/test/rustdoc/intra-links.rs @@ -10,7 +10,13 @@ // @has intra_links/index.html // @has - '//a/@href' '../intra_links/struct.ThisType.html' +// @has - '//a/@href' '../intra_links/struct.ThisType.html#method.this_method' // @has - '//a/@href' '../intra_links/enum.ThisEnum.html' +// @has - '//a/@href' '../intra_links/enum.ThisEnum.html#ThisVariant.v' +// @has - '//a/@href' '../intra_links/trait.ThisTrait.html' +// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#tymethod.this_associated_method' +// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedtype.ThisAssociatedType' +// @has - '//a/@href' '../intra_links/trait.ThisTrait.html#associatedconstant.THIS_ASSOCIATED_CONST' // @has - '//a/@href' '../intra_links/trait.ThisTrait.html' // @has - '//a/@href' '../intra_links/type.ThisAlias.html' // @has - '//a/@href' '../intra_links/union.ThisUnion.html' @@ -23,8 +29,13 @@ //! In this crate we would like to link to: //! //! * [`ThisType`](ThisType) +//! * [`ThisType::this_method`](ThisType::this_method) //! * [`ThisEnum`](ThisEnum) +//! * [`ThisEnum::ThisVariant`](ThisEnum::ThisVariant) //! * [`ThisTrait`](ThisTrait) +//! * [`ThisTrait::this_associated_method`](ThisTrait::this_associated_method) +//! * [`ThisTrait::ThisAssociatedType`](ThisTrait::ThisAssociatedType) +//! * [`ThisTrait::THIS_ASSOCIATED_CONST`](ThisTrait::THIS_ASSOCIATED_CONST) //! * [`ThisAlias`](ThisAlias) //! * [`ThisUnion`](ThisUnion) //! * [`this_function`](this_function()) @@ -45,8 +56,16 @@ macro_rules! this_macro { } pub struct ThisType; + +impl ThisType { + pub fn this_method() {} +} pub enum ThisEnum { ThisVariant, } -pub trait ThisTrait {} +pub trait ThisTrait { + type ThisAssociatedType; + const THIS_ASSOCIATED_CONST: u8; + fn this_associated_method(); +} pub type ThisAlias = Result<(), ()>; pub union ThisUnion { this_field: usize, } diff --git a/src/test/rustdoc/masked.rs b/src/test/rustdoc/masked.rs new file mode 100644 index 0000000000000..1f398da84e57e --- /dev/null +++ b/src/test/rustdoc/masked.rs @@ -0,0 +1,40 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:masked.rs + +#![feature(doc_masked)] + +#![crate_name = "foo"] + +#[doc(masked)] +extern crate masked; + +// @!has 'search-index.js' 'masked_method' + +// @!has 'foo/struct.String.html' 'MaskedTrait' +// @!has 'foo/struct.String.html' 'masked_method' +pub use std::string::String; + +// @!has 'foo/trait.Clone.html' 'MaskedStruct' +pub use std::clone::Clone; + +// @!has 'foo/struct.MyStruct.html' 'MaskedTrait' +// @!has 'foo/struct.MyStruct.html' 'masked_method' +pub struct MyStruct; + +impl masked::MaskedTrait for MyStruct { + fn masked_method() {} +} + +// @!has 'foo/trait.MyTrait.html' 'MaskedStruct' +pub trait MyTrait {} + +impl MyTrait for masked::MaskedStruct {} diff --git a/src/test/ui/issue-47511.rs b/src/test/ui/issue-47511.rs new file mode 100644 index 0000000000000..df4ff301bc9ce --- /dev/null +++ b/src/test/ui/issue-47511.rs @@ -0,0 +1,35 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #47511: anonymous lifetimes can appear +// unconstrained in a return type, but only if they appear just once +// in the input, as the input to a projection. + +fn f(_: X) -> X { + //~^ ERROR return type references an anonymous lifetime + unimplemented!() +} + +fn g<'a>(_: X<'a>) -> X<'a> { + //~^ ERROR return type references lifetime `'a`, which is not constrained + unimplemented!() +} + +type X<'a> = <&'a () as Trait>::Value; + +trait Trait { + type Value; +} + +impl<'a> Trait for &'a () { + type Value = (); +} + +fn main() {} diff --git a/src/test/ui/issue-47511.stderr b/src/test/ui/issue-47511.stderr new file mode 100644 index 0000000000000..fabd6b6c25396 --- /dev/null +++ b/src/test/ui/issue-47511.stderr @@ -0,0 +1,16 @@ +error[E0581]: return type references an anonymous lifetime which is not constrained by the fn input types + --> $DIR/issue-47511.rs:15:15 + | +15 | fn f(_: X) -> X { + | ^ + | + = note: lifetimes appearing in an associated type are not considered constrained + +error[E0581]: return type references lifetime `'a`, which is not constrained by the fn input types + --> $DIR/issue-47511.rs:20:23 + | +20 | fn g<'a>(_: X<'a>) -> X<'a> { + | ^^^^^ + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/issue-47706.rs b/src/test/ui/issue-47706.rs new file mode 100644 index 0000000000000..24a0f66f5b1c4 --- /dev/null +++ b/src/test/ui/issue-47706.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Foo { + foo: Option<i32>, +} + +impl Foo { + pub fn new(foo: Option<i32>, _: ()) -> Foo { + Foo { foo } + } + + pub fn map(self) -> Option<Foo> { + self.foo.map(Foo::new) + } + //~^^ ERROR function is expected to take 1 argument, but it takes 2 arguments [E0593] +} diff --git a/src/test/ui/issue-47706.stderr b/src/test/ui/issue-47706.stderr new file mode 100644 index 0000000000000..0916dc64292e3 --- /dev/null +++ b/src/test/ui/issue-47706.stderr @@ -0,0 +1,13 @@ +error[E0601]: main function not found + +error[E0593]: function is expected to take 1 argument, but it takes 2 arguments + --> $DIR/issue-47706.rs:21:18 + | +16 | pub fn new(foo: Option<i32>, _: ()) -> Foo { + | ------------------------------------------ takes 2 arguments +... +21 | self.foo.map(Foo::new) + | ^^^ expected function that takes 1 argument + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/variadic-ffi-3.rs b/src/test/ui/variadic-ffi-3.rs index 12beebc181baf..9807952c636e1 100644 --- a/src/test/ui/variadic-ffi-3.rs +++ b/src/test/ui/variadic-ffi-3.rs @@ -31,11 +31,11 @@ fn main() { //~| expected type `extern "C" fn(isize, u8, ...)` //~| found type `extern "C" fn(isize, u8) {bar}` - foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function, cast to `c_double` - foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function, cast to `c_int` - foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function, cast to `c_int` - foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function, cast to `c_uint` - foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function, cast to `c_int` - foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function, cast to `c_uint` + foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function + foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function + foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function + foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function + foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function + foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function } } diff --git a/src/test/ui/variadic-ffi-3.stderr b/src/test/ui/variadic-ffi-3.stderr index be158c1e39896..54275fbc4f29f 100644 --- a/src/test/ui/variadic-ffi-3.stderr +++ b/src/test/ui/variadic-ffi-3.stderr @@ -34,41 +34,41 @@ error[E0308]: mismatched types = note: expected type `extern "C" fn(isize, u8, ...)` found type `extern "C" fn(isize, u8) {bar}` -error[E0617]: can't pass `f32` to variadic function, cast to `c_double` +error[E0617]: can't pass `f32` to variadic function --> $DIR/variadic-ffi-3.rs:34:19 | -34 | foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function, cast to `c_double` - | ^^^^ +34 | foo(1, 2, 3f32); //~ ERROR can't pass `f32` to variadic function + | ^^^^ help: cast the value to `c_double`: `3f32 as c_double` -error[E0617]: can't pass `bool` to variadic function, cast to `c_int` +error[E0617]: can't pass `bool` to variadic function --> $DIR/variadic-ffi-3.rs:35:19 | -35 | foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function, cast to `c_int` - | ^^^^ +35 | foo(1, 2, true); //~ ERROR can't pass `bool` to variadic function + | ^^^^ help: cast the value to `c_int`: `true as c_int` -error[E0617]: can't pass `i8` to variadic function, cast to `c_int` +error[E0617]: can't pass `i8` to variadic function --> $DIR/variadic-ffi-3.rs:36:19 | -36 | foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function, cast to `c_int` - | ^^^ +36 | foo(1, 2, 1i8); //~ ERROR can't pass `i8` to variadic function + | ^^^ help: cast the value to `c_int`: `1i8 as c_int` -error[E0617]: can't pass `u8` to variadic function, cast to `c_uint` +error[E0617]: can't pass `u8` to variadic function --> $DIR/variadic-ffi-3.rs:37:19 | -37 | foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function, cast to `c_uint` - | ^^^ +37 | foo(1, 2, 1u8); //~ ERROR can't pass `u8` to variadic function + | ^^^ help: cast the value to `c_uint`: `1u8 as c_uint` -error[E0617]: can't pass `i16` to variadic function, cast to `c_int` +error[E0617]: can't pass `i16` to variadic function --> $DIR/variadic-ffi-3.rs:38:19 | -38 | foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function, cast to `c_int` - | ^^^^ +38 | foo(1, 2, 1i16); //~ ERROR can't pass `i16` to variadic function + | ^^^^ help: cast the value to `c_int`: `1i16 as c_int` -error[E0617]: can't pass `u16` to variadic function, cast to `c_uint` +error[E0617]: can't pass `u16` to variadic function --> $DIR/variadic-ffi-3.rs:39:19 | -39 | foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function, cast to `c_uint` - | ^^^^ +39 | foo(1, 2, 1u16); //~ ERROR can't pass `u16` to variadic function + | ^^^^ help: cast the value to `c_uint`: `1u16 as c_uint` error: aborting due to 10 previous errors