From 08b0aca05e3709766fd7e0e01ec56a8511e4c46b Mon Sep 17 00:00:00 2001 From: Jeremy Fitzhardinge Date: Tue, 9 Apr 2019 13:40:21 -0700 Subject: [PATCH 01/17] string: implement From<&String> for String Allow Strings to be created from borrowed Strings. This is mostly to make things like passing &String to an `impl Into` parameter frictionless. --- src/liballoc/string.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index a3e2098695f70..ace2b3ebf5599 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -2179,6 +2179,14 @@ impl From<&str> for String { } } +#[stable(feature = "from_ref_string", since = "1.35.0")] +impl From<&String> for String { + #[inline] + fn from(s: &String) -> String { + s.clone() + } +} + // note: test pulls in libstd, which causes errors here #[cfg(not(test))] #[stable(feature = "string_from_box", since = "1.18.0")] From eba03d462e987a969d21aa74632e4d96dbe9751e Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Fri, 12 Apr 2019 01:23:23 -0500 Subject: [PATCH 02/17] Fix convert module's documentation links --- src/libcore/convert.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index e903bd936c484..dbe0d204aca8d 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -213,7 +213,7 @@ pub trait AsMut { /// /// # Generic Implementations /// -/// - [`From`]` for U` implies `Into for T` +/// - [`From`]` for U` implies `Into for T` /// - [`Into`]` is reflexive, which means that `Into for T` is implemented /// /// # Implementing `Into` for conversions to external types @@ -273,7 +273,7 @@ pub trait AsMut { /// [`Option`]: ../../std/option/enum.Option.html /// [`Result`]: ../../std/result/enum.Result.html /// [`String`]: ../../std/string/struct.String.html -/// [From]: trait.From.html +/// [`From`]: trait.From.html /// [`into`]: trait.Into.html#tymethod.into #[stable(feature = "rust1", since = "1.0.0")] pub trait Into: Sized { @@ -285,18 +285,18 @@ pub trait Into: Sized { /// Used to do value-to-value conversions while consuming the input value. It is the reciprocal of /// [`Into`]. /// -/// One should always prefer implementing [`From`] over [`Into`] -/// because implementing [`From`] automatically provides one with a implementation of [`Into`] +/// One should always prefer implementing `From` over [`Into`] +/// because implementing `From` automatically provides one with a implementation of [`Into`] /// thanks to the blanket implementation in the standard library. /// /// Only implement [`Into`] if a conversion to a type outside the current crate is required. -/// [`From`] cannot do these type of conversions because of Rust's orphaning rules. +/// `From` cannot do these type of conversions because of Rust's orphaning rules. /// See [`Into`] for more details. /// -/// Prefer using [`Into`] over using [`From`] when specifying trait bounds on a generic function. +/// Prefer using [`Into`] over using `From` when specifying trait bounds on a generic function. /// This way, types that directly implement [`Into`] can be used as arguments as well. /// -/// The [`From`] is also very useful when performing error handling. When constructing a function +/// The `From` is also very useful when performing error handling. When constructing a function /// that is capable of failing, the return type will generally be of the form `Result`. /// The `From` trait simplifies error handling by allowing a function to return a single error type /// that encapsulate multiple error types. See the "Examples" section and [the book][book] for more @@ -306,8 +306,8 @@ pub trait Into: Sized { /// /// # Generic Implementations /// -/// - [`From`]` for U` implies [`Into`]` for T` -/// - [`From`] is reflexive, which means that `From for T` is implemented +/// - `From` for U` implies [`Into`]` for T` +/// - `From` is reflexive, which means that `From for T` is implemented /// /// # Examples /// @@ -361,7 +361,7 @@ pub trait Into: Sized { /// [`Option`]: ../../std/option/enum.Option.html /// [`Result`]: ../../std/result/enum.Result.html /// [`String`]: ../../std/string/struct.String.html -/// [`Into`]: trait.Into.html +/// [`Into`]: trait.Into.html /// [`from`]: trait.From.html#tymethod.from /// [book]: ../../book/ch09-00-error-handling.html #[stable(feature = "rust1", since = "1.0.0")] @@ -422,7 +422,7 @@ pub trait TryInto: Sized { /// /// # Generic Implementations /// -/// - `TryFrom for U` implies [`TryInto`]` for T` +/// - `TryFrom for U` implies [`TryInto`]` for T` /// - [`try_from`] is reflexive, which means that `TryFrom for T` /// is implemented and cannot fail -- the associated `Error` type for /// calling `T::try_from()` on a value of type `T` is `Infallible`. From 6bf94cd3ff4af69c8128fb64d28a60f6c9385c19 Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 10:34:42 -0500 Subject: [PATCH 03/17] Remove dangling ` in Into documentation --- src/libcore/convert.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index dbe0d204aca8d..e981001bd6415 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -214,7 +214,7 @@ pub trait AsMut { /// # Generic Implementations /// /// - [`From`]` for U` implies `Into for T` -/// - [`Into`]` is reflexive, which means that `Into for T` is implemented +/// - [`Into`] is reflexive, which means that `Into for T` is implemented /// /// # Implementing `Into` for conversions to external types /// From b701d32ca80e58d41cadc18755567794d5e65bd9 Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 10:38:06 -0500 Subject: [PATCH 04/17] Remove broken links to self in Into documentation --- src/libcore/convert.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index e981001bd6415..2498159edc708 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -203,9 +203,9 @@ pub trait AsMut { /// A value-to-value conversion that consumes the input value. The /// opposite of [`From`]. /// -/// One should only implement [`Into`] if a conversion to a type outside the current crate is -/// required. Otherwise one should always prefer implementing [`From`] over [`Into`] because -/// implementing [`From`] automatically provides one with a implementation of [`Into`] thanks to +/// One should only implement `Into` if a conversion to a type outside the current crate is +/// required. Otherwise one should always prefer implementing [`From`] over `Into` because +/// implementing [`From`] automatically provides one with a implementation of `Into` thanks to /// the blanket implementation in the standard library. [`From`] cannot do these type of /// conversions because of Rust's orphaning rules. /// @@ -214,7 +214,7 @@ pub trait AsMut { /// # Generic Implementations /// /// - [`From`]` for U` implies `Into for T` -/// - [`Into`] is reflexive, which means that `Into for T` is implemented +/// - `Into` is reflexive, which means that `Into for T` is implemented /// /// # Implementing `Into` for conversions to external types /// From 4a33ece38216a42764a0c74ae99baf183b888817 Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 11:57:02 -0500 Subject: [PATCH 05/17] Remove blank lines in AsRef documentation --- src/libcore/convert.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 2498159edc708..e04c01e50c0bc 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -104,7 +104,6 @@ pub const fn identity(x: T) -> T { x } /// If you need to do a costly conversion it is better to implement [`From`] with type /// `&T` or write a custom function. /// -/// /// `AsRef` has the same signature as [`Borrow`], but `Borrow` is different in few aspects: /// /// - Unlike `AsRef`, `Borrow` has a blanket impl for any `T`, and can be used to accept either @@ -149,7 +148,6 @@ pub const fn identity(x: T) -> T { x } /// let s = "hello".to_string(); /// is_hello(s); /// ``` -/// #[stable(feature = "rust1", since = "1.0.0")] pub trait AsRef { /// Performs the conversion. From 27ff5360ab661a3c20ab7256529aeb61b4e2b215 Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 15:25:13 -0500 Subject: [PATCH 06/17] Reorder blank lines in AsMut documentation --- src/libcore/convert.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index e04c01e50c0bc..86846d3842c08 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -180,6 +180,7 @@ pub trait AsRef { /// write a function `add_one`that takes all arguments that can be converted to `&mut u64`. /// Because [`Box`] implements `AsMut` `add_one` accepts arguments of type /// `&mut Box` as well: +/// /// ``` /// fn add_one>(num: &mut T) { /// *num.as_mut() += 1; @@ -189,8 +190,8 @@ pub trait AsRef { /// add_one(&mut boxed_num); /// assert_eq!(*boxed_num, 1); /// ``` -/// [`Box`]: ../../std/boxed/struct.Box.html /// +/// [`Box`]: ../../std/boxed/struct.Box.html #[stable(feature = "rust1", since = "1.0.0")] pub trait AsMut { /// Performs the conversion. From 1e48da6c81ba407b83189adff90abbd224dc1b62 Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 18:14:44 -0500 Subject: [PATCH 07/17] Escape &str in convert docs --- src/libcore/convert.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 86846d3842c08..df178e0ff0a61 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -132,7 +132,7 @@ pub const fn identity(x: T) -> T { x } /// converted a the specified type `T`. /// /// For example: By creating a generic function that takes an `AsRef` we express that we -/// want to accept all references that can be converted to &str as an argument. +/// want to accept all references that can be converted to `&str` as an argument. /// Since both [`String`] and `&str` implement `AsRef` we can accept both as input argument. /// /// [`String`]: ../../std/string/struct.String.html @@ -312,7 +312,8 @@ pub trait Into: Sized { /// /// [`String`] implements `From<&str>`: /// -/// An explicit conversion from a &str to a String is done as follows: +/// An explicit conversion from a `&str` to a String is done as follows: +/// /// ``` /// let string = "hello".to_string(); /// let other_string = String::from("hello"); From 1f5d510604be553906cdcbb2eddaf5277faf543d Mon Sep 17 00:00:00 2001 From: Chris Gregory Date: Sat, 13 Apr 2019 21:23:31 -0500 Subject: [PATCH 08/17] Fix stray ` in previous change --- src/libcore/convert.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index df178e0ff0a61..9da1c02dea065 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -305,7 +305,7 @@ pub trait Into: Sized { /// /// # Generic Implementations /// -/// - `From` for U` implies [`Into`]` for T` +/// - `From for U` implies [`Into`]` for T` /// - `From` is reflexive, which means that `From for T` is implemented /// /// # Examples From c921aaed3967ba03b55ce1a6e2e4b192d95599c5 Mon Sep 17 00:00:00 2001 From: topecongiro Date: Fri, 10 May 2019 14:44:43 +0900 Subject: [PATCH 09/17] Include expression to wait for to the span of Await --- src/libsyntax/parse/parser.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d97d1e2f0f4ee..ddc145e1cded2 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2764,6 +2764,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Paren))?; let expr = self.parse_expr()?; self.expect(&token::CloseDelim(token::Paren))?; + hi = self.prev_span; ex = ExprKind::Await(ast::AwaitOrigin::MacroLike, expr); } else if self.token.is_path_start() { let path = self.parse_path(PathStyle::Expr)?; From 1ea7c5fb2039933c67af8ba8629694733c0cd36e Mon Sep 17 00:00:00 2001 From: topecongiro Date: Sat, 11 May 2019 00:22:18 +0900 Subject: [PATCH 10/17] Update ui test --- src/test/ui/feature-gate/await-macro.stderr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/ui/feature-gate/await-macro.stderr b/src/test/ui/feature-gate/await-macro.stderr index b6833655f6d8a..699a7a8886e89 100644 --- a/src/test/ui/feature-gate/await-macro.stderr +++ b/src/test/ui/feature-gate/await-macro.stderr @@ -2,7 +2,7 @@ error[E0658]: `await!()` macro syntax is unstable, and will soon be remove --> $DIR/await-macro.rs:9:5 | LL | await!(bar()); - | ^^^^^ + | ^^^^^^^^^^^^^ | = note: for more information, see https://github.com/rust-lang/rust/issues/50547 = help: add #![feature(await_macro)] to the crate attributes to enable From e392db6cd424983fc7542fa9b35d30de89db439a Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 12 May 2019 14:16:50 -0700 Subject: [PATCH 11/17] Update rustc book CLI docs. --- src/doc/rustc/src/command-line-arguments.md | 167 ++++++++++++++++++-- src/librustc/session/config.rs | 3 +- 2 files changed, 157 insertions(+), 13 deletions(-) diff --git a/src/doc/rustc/src/command-line-arguments.md b/src/doc/rustc/src/command-line-arguments.md index dfb40284ef6cf..bd7f6630ea2ad 100644 --- a/src/doc/rustc/src/command-line-arguments.md +++ b/src/doc/rustc/src/command-line-arguments.md @@ -17,28 +17,147 @@ to `#[cfg(verbose)]` and `#[cfg(feature = "serde")]` respectively. ## `-L`: add a directory to the library search path -When looking for external crates, a directory passed to this flag will be searched. +When looking for external crates or libraries, a directory passed to this flag +will be searched. + +The kind of search path can optionally be specified with the form `-L +KIND=PATH` where `KIND` may be one of: + +- `dependency` — Only search for transitive dependencies in this directory. +- `crate` — Only search for this crate's direct dependencies in this + directory. +- `native` — Only search for native libraries in this directory. +- `framework` — Only search for macOS frameworks in this directory. +- `all` — Search for all library kinds in this directory. This is the default + if `KIND` is not specified. ## `-l`: link the generated crate to a native library This flag allows you to specify linking to a specific native library when building a crate. +The kind of library can optionally be specified with the form `-l KIND=lib` +where `KIND` may be one of: + +- `dylib` — A native dynamic library. +- `static` — A native static library (such as a `.a` archive). +- `framework` — A macOS framework. + +The kind of library can be specified in a [`#[link]` +attribute][link-attribute]. If the kind is not specified in the `link` +attribute or on the command-line, it will link a dynamic library if available, +otherwise it will use a static library. If the kind is specified on the +command-line, it will override the kind specified in a `link` attribute. + +The name used in a `link` attribute may be overridden using the form `-l +ATTR_NAME:LINK_NAME` where `ATTR_NAME` is the name in the `link` attribute, +and `LINK_NAME` is the name of the actual library that will be linked. + +[link-attribute]: ../reference/items/external-blocks.html#the-link-attribute + ## `--crate-type`: a list of types of crates for the compiler to emit -This instructs `rustc` on which crate type to build. +This instructs `rustc` on which crate type to build. This flag accepts a +comma-separated list of values, and may be specified multiple times. The valid +crate types are: + +- `lib` — Generates a library kind preferred by the compiler, currently + defaults to `rlib`. +- `rlib` — A Rust static library. +- `staticlib` — A native static library. +- `dylib` — A Rust dynamic library. +- `cdylib` — A native dynamic library. +- `bin` — A runnable executable program. +- `proc-macro` — Generates a format suitable for a procedural macro library + that may be loaded by the compiler. + +The crate type may be specified with the [`crate_type` attribute][crate_type]. +The `--crate-type` command-line value will override the `crate_type` +attribute. + +More details may be found in the [linkage chapter] of the reference. + +[linkage chapter]: ../reference/linkage.html +[crate_type]: ../reference/linkage.html ## `--crate-name`: specify the name of the crate being built This informs `rustc` of the name of your crate. -## `--emit`: emit output other than a crate - -Instead of producing a crate, this flag can print out things like the assembly or LLVM-IR. +## `--edition`: specify the edition to use + +This flag takes a value of `2015` or `2018`. The default is `2015`. More +information about editions may be found in the [edition guide]. + +[edition guide]: ../edition-guide/introduction.html + +## `--emit`: specifies the types of output files to generate + +This flag controls the types of output files generated by the compiler. It +accepts a comma-separated list of values, and may be specified multiple times. +The valid emit kinds are: + +- `asm` — Generates a file with the crate's assembly code. The default output + filename is `CRATE_NAME.s`. +- `dep-info` — Generates a file with Makefile syntax that indicates all the + source files that were loaded to generate the crate. The default output + filename is `CRATE_NAME.d`. +- `link` — Generates the crates specified by `--crate-type`. The default + output filenames depend on the crate type and platform. This is the default + if `--emit` is not specified. +- `llvm-bc` — Generates a binary file containing the [LLVM bitcode]. The + default output filename is `CRATE_NAME.bc`. +- `llvm-ir` — Generates a file containing [LLVM IR]. The default output + filename is `CRATE_NAME.ll`. +- `metadata` — Generates a file containing metadata about the crate. The + default output filename is `CRATE_NAME.rmeta`. +- `mir` — Generates a file containing rustc's mid-level intermediate + representation. The default output filename is `CRATE_NAME.mir`. +- `obj` — Generates a native object file. The default output filename is + `CRATE_NAME.o`. + +The output filename can be set with the `-o` flag. A suffix may be added to +the filename with the `-C extra-filename` flag. The files are written to the +current directory unless the `--out-dir` flag is used. Each emission type may +also specify the output filename with the form `KIND=PATH`, which takes +precedence over the `-o` flag. + +[LLVM bitcode]: https://llvm.org/docs/BitCodeFormat.html +[LLVM IR]: https://llvm.org/docs/LangRef.html ## `--print`: print compiler information -This flag prints out various information about the compiler. +This flag prints out various information about the compiler. This flag may be +specified multiple times, and the information is printed in the order the +flags are specified. Specifying a `--print` flag will usually disable the +`--emit` step and will only print the requested information. The valid types +of print values are: + +- `crate-name` — The name of the crate. +- `file-names` — The names of the files created by the `link` emit kind. +- `sysroot` — Path to the sysroot. +- `cfg` — List of cfg values. See [conditional compilation] for more + information about cfg values. +- `target-list` — List of known targets. The target may be selected with the + `--target` flag. +- `target-cpus` — List of available CPU values for the current target. The + target CPU may be selected with the `-C target-cpu=val` flag. +- `target-features` — List of available target features for the current + target. Target features may be enabled with the `-C target-feature=val` + flag. +- `relocation-models` — List of relocation models. Relocation models may be + selected with the `-C relocation-model=val` flag. +- `code-models` — List of code models. Code models may be selected with the + `-C code-model=val` flag. +- `tls-models` — List of Thread Local Storage models supported. The model may + be selected with the `-Z tls-model=val` flag. +- `native-static-libs` — This may be used when creating a `staticlib` crate + type. If this is the only flag, it will perform a full compilation and + include a diagnostic note that indicates the linker flags to use when + linking the resulting static library. The note starts with the text + `native-static-libs:` to make it easier to fetch the output. + +[conditional compilation]: ../reference/conditional-compilation.html ## `-g`: include debug information @@ -54,7 +173,8 @@ This flag controls the output filename. ## `--out-dir`: directory to write the output in -The outputted crate will be written to this directory. +The outputted crate will be written to this directory. This flag is ignored if +the `-o` flag is used. ## `--explain`: provide a detailed explanation of an error message @@ -111,8 +231,9 @@ This flag, when combined with other flags, makes them produce extra output. ## `--extern`: specify where an external library is located -This flag allows you to pass the name and location of an external crate that will -be linked into the crate you're buildling. +This flag allows you to pass the name and location of an external crate that +will be linked into the crate you are building. This flag may be specified +multiple times. The format of the value should be `CRATENAME=PATH`. ## `--sysroot`: Override the system root @@ -121,8 +242,32 @@ distribution; this flag allows that to be overridden. ## `--error-format`: control how errors are produced -This flag lets you control the format of errors. +This flag lets you control the format of messages. Messages are printed to +stderr. The valid options are: + +- `human` — Human-readable output. This is the default. +- `json` — Structured JSON output. +- `short` — Short, one-line messages. ## `--color`: configure coloring of output -This flag lets you control color settings of the output. +This flag lets you control color settings of the output. The valid options +are: + +- `auto` — Use colors if output goes to a tty. This is the default. +- `always` — Always use colors. +- `never` — Never colorize output. + +## `--remap-path-prefix`: remap source names in output + +Remap source path prefixes in all output, including compiler diagnostics, +debug information, macro expansions, etc. It takes a value of the form +`FROM=TO` where a path prefix equal to `FROM` is rewritten to the value `TO`. +The `FROM` may itself contain an `=` symbol, but the `TO` value may not. This +flag may be specified multiple times. + +This is useful for normalizing build products, for example by removing the +current directory out of pathnames emitted into the object files. The +replacement is purely textual, with no consideration of the current system's +pathname syntax. For example `--remap-path-prefix foo=bar` will match +`foo/lib.rs` but not `./foo/lib.rs`. diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 12427daa38381..b938ae504d842 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -1744,8 +1744,7 @@ pub fn rustc_short_optgroups() -> Vec { opt::multi_s( "", "print", - "Comma separated list of compiler information to \ - print on stdout", + "Compiler information to print on stdout", "[crate-name|file-names|sysroot|cfg|target-list|\ target-cpus|target-features|relocation-models|\ code-models|tls-models|target-spec-json|native-static-libs]", From d29f0d23c3624047a3f3671a8e352783e8796373 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 12 May 2019 19:55:16 +0300 Subject: [PATCH 12/17] Move token tree related lexer state to a separate struct We only used a bunch of fields when tokenizing into a token tree, so let's move them out of the base lexer --- src/libsyntax/parse/lexer/mod.rs | 14 +--- src/libsyntax/parse/lexer/tokentrees.rs | 96 +++++++++++++++++-------- src/libsyntax/parse/mod.rs | 9 +-- 3 files changed, 71 insertions(+), 48 deletions(-) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2882acb0e780c..60494a6a2bdc7 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,15 +66,7 @@ pub struct StringReader<'a> { span: Span, /// The raw source span which *does not* take `override_span` into account span_src_raw: Span, - /// Stack of open delimiters and their spans. Used for error message. - open_braces: Vec<(token::DelimToken, Span)>, - crate unmatched_braces: Vec, - /// The type and spans for all braces - /// - /// Used only for error recovery when arriving to EOF with mismatched braces. - matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, - crate override_span: Option, - last_unclosed_found_span: Option, + override_span: Option, } impl<'a> StringReader<'a> { @@ -254,11 +246,7 @@ impl<'a> StringReader<'a> { token: token::Eof, span: syntax_pos::DUMMY_SP, span_src_raw: syntax_pos::DUMMY_SP, - open_braces: Vec::new(), - unmatched_braces: Vec::new(), - matching_delim_spans: Vec::new(), override_span, - last_unclosed_found_span: None, } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 0db36c84cdfeb..a6e176c02a09b 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,14 +1,42 @@ +use syntax_pos::Span; + use crate::print::pprust::token_to_string; use crate::parse::lexer::{StringReader, UnmatchedBrace}; use crate::parse::{token, PResult}; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { + crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { + let mut tt_reader = TokenTreesReader { + string_reader: self, + open_braces: Vec::new(), + unmatched_braces: Vec::new(), + matching_delim_spans: Vec::new(), + last_unclosed_found_span: None, + }; + let res = tt_reader.parse_all_token_trees(); + (res, tt_reader.unmatched_braces) + } +} + +struct TokenTreesReader<'a> { + string_reader: StringReader<'a>, + /// Stack of open delimiters and their spans. Used for error message. + open_braces: Vec<(token::DelimToken, Span)>, + unmatched_braces: Vec, + /// The type and spans for all braces + /// + /// Used only for error recovery when arriving to EOF with mismatched braces. + matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, + last_unclosed_found_span: Option, +} + +impl<'a> TokenTreesReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. - crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { + fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); - while self.token != token::Eof { + while self.string_reader.token != token::Eof { tts.push(self.parse_token_tree()?); } @@ -19,7 +47,7 @@ impl<'a> StringReader<'a> { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { - if let token::CloseDelim(..) = self.token { + if let token::CloseDelim(..) = self.string_reader.token { return TokenStream::new(tts); } @@ -34,11 +62,12 @@ impl<'a> StringReader<'a> { } fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { - let sm = self.sess.source_map(); - match self.token { + let sm = self.string_reader.sess.source_map(); + match self.string_reader.token { token::Eof => { let msg = "this file contains an un-closed delimiter"; - let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg); + let mut err = self.string_reader.sess.span_diagnostic + .struct_span_err(self.span(), msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); } @@ -46,13 +75,12 @@ impl<'a> StringReader<'a> { if let Some((delim, _)) = self.open_braces.last() { if let Some((_, open_sp, close_sp)) = self.matching_delim_spans.iter() .filter(|(d, open_sp, close_sp)| { - - if let Some(close_padding) = sm.span_to_margin(*close_sp) { - if let Some(open_padding) = sm.span_to_margin(*open_sp) { - return delim == d && close_padding != open_padding; + if let Some(close_padding) = sm.span_to_margin(*close_sp) { + if let Some(open_padding) = sm.span_to_margin(*open_sp) { + return delim == d && close_padding != open_padding; + } } - } - false + false }).next() // these are in reverse order as they get inserted on close, but { // we want the last open/first close err.span_label( @@ -69,11 +97,11 @@ impl<'a> StringReader<'a> { }, token::OpenDelim(delim) => { // The span for beginning of the delimited section - let pre_span = self.span; + let pre_span = self.span(); // Parse the open delimiter. - self.open_braces.push((delim, self.span)); - self.real_token(); + self.open_braces.push((delim, self.span())); + self.string_reader.real_token(); // Parse the token trees within the delimiters. // We stop at any delimiter so we can try to recover if the user @@ -81,9 +109,9 @@ impl<'a> StringReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let delim_span = DelimSpan::from_pair(pre_span, self.span); + let delim_span = DelimSpan::from_pair(pre_span, self.span()); - match self.token { + match self.string_reader.token { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); @@ -93,26 +121,26 @@ impl<'a> StringReader<'a> { self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.span), + (open_brace, open_brace_span, self.span()), ); } // Parse the close delimiter. - self.real_token(); + self.string_reader.real_token(); } // Incorrect delimiter. token::CloseDelim(other) => { let mut unclosed_delimiter = None; let mut candidate = None; - if self.last_unclosed_found_span != Some(self.span) { + if self.last_unclosed_found_span != Some(self.span()) { // do not complain about the same unclosed delimiter multiple times - self.last_unclosed_found_span = Some(self.span); + self.last_unclosed_found_span = Some(self.span()); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { unclosed_delimiter = Some(sp); }; - if let Some(current_padding) = sm.span_to_margin(self.span) { + if let Some(current_padding) = sm.span_to_margin(self.span()) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding @@ -126,7 +154,7 @@ impl<'a> StringReader<'a> { self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, found_delim: other, - found_span: self.span, + found_span: self.span(), unclosed_span: unclosed_delimiter, candidate_span: candidate, }); @@ -142,7 +170,7 @@ impl<'a> StringReader<'a> { // bar(baz( // } // Incorrect delimiter but matches the earlier `{` if !self.open_braces.iter().any(|&(b, _)| b == other) { - self.real_token(); + self.string_reader.real_token(); } } token::Eof => { @@ -162,22 +190,28 @@ impl<'a> StringReader<'a> { token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). - let token_str = token_to_string(&self.token); + let token_str = token_to_string(&self.string_reader.token); let msg = format!("unexpected close delimiter: `{}`", token_str); - let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg); - err.span_label(self.span, "unexpected close delimiter"); + let mut err = self.string_reader.sess.span_diagnostic + .struct_span_err(self.span(), &msg); + err.span_label(self.span(), "unexpected close delimiter"); Err(err) }, _ => { - let tt = TokenTree::Token(self.span, self.token.clone()); + let tt = TokenTree::Token(self.span(), self.string_reader.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. - let raw = self.span_src_raw; - self.real_token(); - let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token); + let raw = self.string_reader.span_src_raw; + self.string_reader.real_token(); + let is_joint = raw.hi() == self.string_reader.span_src_raw.lo() + && token::is_op(&self.string_reader.token); Ok((tt, if is_joint { Joint } else { NonJoint })) } } } + + fn span(&self) -> Span { + self.string_reader.span + } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index be44b964ba5a7..1ddafb969c4b3 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -295,7 +295,7 @@ pub fn source_file_to_stream( } /// Given a source file, produces a sequence of token trees. Returns any buffered errors from -/// parsing the token tream. +/// parsing the token stream. pub fn maybe_file_to_stream( sess: &ParseSess, source_file: Lrc, @@ -303,14 +303,15 @@ pub fn maybe_file_to_stream( ) -> Result<(TokenStream, Vec), Vec> { let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; srdr.real_token(); + let (token_trees, unmatched_braces) = srdr.into_token_trees(); - match srdr.parse_all_token_trees() { - Ok(stream) => Ok((stream, srdr.unmatched_braces)), + match token_trees { + Ok(stream) => Ok((stream, unmatched_braces)), Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); // Not using `emit_unclosed_delims` to use `db.buffer` - for unmatched in srdr.unmatched_braces { + for unmatched in unmatched_braces { let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), From b91e0a378690871fa744768f38d42bd90830bcd0 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 13 May 2019 12:06:37 +0300 Subject: [PATCH 13/17] move span and token to tt reader --- src/libsyntax/parse/lexer/mod.rs | 12 ----- src/libsyntax/parse/lexer/tokentrees.rs | 60 ++++++++++++++----------- src/libsyntax/parse/mod.rs | 3 +- 3 files changed, 35 insertions(+), 40 deletions(-) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 60494a6a2bdc7..9caa9ea807c1d 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -62,10 +62,6 @@ pub struct StringReader<'a> { // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time. src: Lrc, - token: token::Token, - span: Span, - /// The raw source span which *does not* take `override_span` into account - span_src_raw: Span, override_span: Option, } @@ -113,8 +109,6 @@ impl<'a> StringReader<'a> { sp: self.peek_span, }; self.advance_token()?; - self.span_src_raw = self.peek_span_src_raw; - Ok(ret_val) } @@ -151,9 +145,6 @@ impl<'a> StringReader<'a> { } } - self.token = t.tok.clone(); - self.span = t.sp; - Ok(t) } @@ -243,9 +234,6 @@ impl<'a> StringReader<'a> { peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), - token: token::Eof, - span: syntax_pos::DUMMY_SP, - span_src_raw: syntax_pos::DUMMY_SP, override_span, } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index a6e176c02a09b..1070d6dcb1b34 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -9,6 +9,8 @@ impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { string_reader: self, + token: token::Eof, + span: syntax_pos::DUMMY_SP, open_braces: Vec::new(), unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), @@ -21,6 +23,8 @@ impl<'a> StringReader<'a> { struct TokenTreesReader<'a> { string_reader: StringReader<'a>, + token: token::Token, + span: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, unmatched_braces: Vec, @@ -36,7 +40,8 @@ impl<'a> TokenTreesReader<'a> { fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); - while self.string_reader.token != token::Eof { + self.real_token(); + while self.token != token::Eof { tts.push(self.parse_token_tree()?); } @@ -47,7 +52,7 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { let mut tts = vec![]; loop { - if let token::CloseDelim(..) = self.string_reader.token { + if let token::CloseDelim(..) = self.token { return TokenStream::new(tts); } @@ -63,11 +68,11 @@ impl<'a> TokenTreesReader<'a> { fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.string_reader.sess.source_map(); - match self.string_reader.token { + match self.token { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span(), msg); + .struct_span_err(self.span, msg); for &(_, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); } @@ -97,11 +102,11 @@ impl<'a> TokenTreesReader<'a> { }, token::OpenDelim(delim) => { // The span for beginning of the delimited section - let pre_span = self.span(); + let pre_span = self.span; // Parse the open delimiter. - self.open_braces.push((delim, self.span())); - self.string_reader.real_token(); + self.open_braces.push((delim, self.span)); + self.real_token(); // Parse the token trees within the delimiters. // We stop at any delimiter so we can try to recover if the user @@ -109,9 +114,9 @@ impl<'a> TokenTreesReader<'a> { let tts = self.parse_token_trees_until_close_delim(); // Expand to cover the entire delimited token tree - let delim_span = DelimSpan::from_pair(pre_span, self.span()); + let delim_span = DelimSpan::from_pair(pre_span, self.span); - match self.string_reader.token { + match self.token { // Correct delimiter. token::CloseDelim(d) if d == delim => { let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); @@ -121,26 +126,26 @@ impl<'a> TokenTreesReader<'a> { self.matching_delim_spans.clear(); } else { self.matching_delim_spans.push( - (open_brace, open_brace_span, self.span()), + (open_brace, open_brace_span, self.span), ); } // Parse the close delimiter. - self.string_reader.real_token(); + self.real_token(); } // Incorrect delimiter. token::CloseDelim(other) => { let mut unclosed_delimiter = None; let mut candidate = None; - if self.last_unclosed_found_span != Some(self.span()) { + if self.last_unclosed_found_span != Some(self.span) { // do not complain about the same unclosed delimiter multiple times - self.last_unclosed_found_span = Some(self.span()); + self.last_unclosed_found_span = Some(self.span); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { unclosed_delimiter = Some(sp); }; - if let Some(current_padding) = sm.span_to_margin(self.span()) { + if let Some(current_padding) = sm.span_to_margin(self.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding @@ -154,7 +159,7 @@ impl<'a> TokenTreesReader<'a> { self.unmatched_braces.push(UnmatchedBrace { expected_delim: tok, found_delim: other, - found_span: self.span(), + found_span: self.span, unclosed_span: unclosed_delimiter, candidate_span: candidate, }); @@ -170,7 +175,7 @@ impl<'a> TokenTreesReader<'a> { // bar(baz( // } // Incorrect delimiter but matches the earlier `{` if !self.open_braces.iter().any(|&(b, _)| b == other) { - self.string_reader.real_token(); + self.real_token(); } } token::Eof => { @@ -190,28 +195,31 @@ impl<'a> TokenTreesReader<'a> { token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no // matching opening delimiter). - let token_str = token_to_string(&self.string_reader.token); + let token_str = token_to_string(&self.token); let msg = format!("unexpected close delimiter: `{}`", token_str); let mut err = self.string_reader.sess.span_diagnostic - .struct_span_err(self.span(), &msg); - err.span_label(self.span(), "unexpected close delimiter"); + .struct_span_err(self.span, &msg); + err.span_label(self.span, "unexpected close delimiter"); Err(err) }, _ => { - let tt = TokenTree::Token(self.span(), self.string_reader.token.clone()); + let tt = TokenTree::Token(self.span, self.token.clone()); // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. - let raw = self.string_reader.span_src_raw; - self.string_reader.real_token(); - let is_joint = raw.hi() == self.string_reader.span_src_raw.lo() - && token::is_op(&self.string_reader.token); + let raw = self.string_reader.peek_span_src_raw; + self.real_token(); + let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo() + && token::is_op(&self.token); Ok((tt, if is_joint { Joint } else { NonJoint })) } } } - fn span(&self) -> Span { - self.string_reader.span + fn real_token(&mut self) { + let t = self.string_reader.real_token(); + self.token = t.tok; + self.span = t.sp; } } + diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1ddafb969c4b3..4a9a7aec6adde 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -301,8 +301,7 @@ pub fn maybe_file_to_stream( source_file: Lrc, override_span: Option, ) -> Result<(TokenStream, Vec), Vec> { - let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; - srdr.real_token(); + let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; let (token_trees, unmatched_braces) = srdr.into_token_trees(); match token_trees { From e249f2e526cca687b78a766769c481cfb638f02e Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 13 May 2019 14:30:18 +0300 Subject: [PATCH 14/17] move raw span to tt reader See https://github.com/rust-lang/rust/pull/50838/files#r283296243 for explanation how jointness checking works with *next* pair --- src/libsyntax/parse/lexer/tokentrees.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 1070d6dcb1b34..4bfc5bb16c0bb 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -207,6 +207,8 @@ impl<'a> TokenTreesReader<'a> { // Note that testing for joint-ness here is done via the raw // source span as the joint-ness is a property of the raw source // rather than wanting to take `override_span` into account. + // Additionally, we actually check if the *next* pair of tokens + // is joint, but this is equivalent to checking the current pair. let raw = self.string_reader.peek_span_src_raw; self.real_token(); let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo() @@ -222,4 +224,3 @@ impl<'a> TokenTreesReader<'a> { self.span = t.sp; } } - From ea93215576ff04cab3bdb78c0d16ea7253488f40 Mon Sep 17 00:00:00 2001 From: Wesley Wiser Date: Mon, 13 May 2019 22:15:55 -0400 Subject: [PATCH 15/17] Bump measureme dependency to 0.3 measureme@0.3 adds a version header to the binary file format which will help reduce tool breakage in the future. --- Cargo.lock | 6 +++--- src/librustc/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 982070a243ee5..4417c25abcb4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1481,7 +1481,7 @@ dependencies = [ [[package]] name = "measureme" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2346,7 +2346,7 @@ dependencies = [ "jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", - "measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "measureme 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "polonius-engine 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -4174,7 +4174,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f" "checksum mdbook 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0ba0d44cb4089c741b9a91f3e5218298a40699c2f3a070a85014eed290c60819" -"checksum measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36bb2b263a6795d352035024d6b30ce465bb79a5e5280d74c3b5f8464c657bcc" +"checksum measureme 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d09de7dafa3aa334bc806447c7e4de69419723312f4b88b80b561dea66601ce8" "checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39" "checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 2468de99d60a8..4d50e80d4cf67 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -36,7 +36,7 @@ byteorder = { version = "1.1", features = ["i128"]} chalk-engine = { version = "0.9.0", default-features=false } rustc_fs_util = { path = "../librustc_fs_util" } smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } -measureme = "0.2.1" +measureme = "0.3" # Note that these dependencies are a lie, they're just here to get linkage to # work. From 7171bd1f69fb1c1a999b8e968fc693da325f3cc4 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Mon, 13 May 2019 21:54:47 -0700 Subject: [PATCH 16/17] README: Mention MSVC 2017+, not 2013(!) LLVM will soon require 2017+ [1] (and our in-tree version just rejected the version of 2015 I was using), so update the mention and provide a link. [1]: https://llvm.org/docs/GettingStarted.html#host-c-toolchain-both-compiler-and-standard-library --- README.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f6f796911b8fa..75d7823490a20 100644 --- a/README.md +++ b/README.md @@ -128,9 +128,15 @@ build. #### MSVC [windows-msvc]: #windows-msvc -MSVC builds of Rust additionally require an installation of Visual Studio 2013 -(or later) so `rustc` can use its linker. Make sure to check the “C++ tools” -option. +MSVC builds of Rust additionally require an installation of Visual Studio 2017 +(or later) so `rustc` can use its linker. The simplest way is to get the +[Visual Studio Build Tools] and check the “C++ build tools” workload. + +[Visual Studio Build Tools]: https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019 + +At last check (cmake 3.14.3 and msvc 16.0.3) using the 2019 tools fails to +build the in-tree LLVM build with a CMake error, so use 2017 instead by +including the “MSVC v141 – VS 2017 C++ x64/x86 build tools (v14.16)” component. With these dependencies installed, you can build the compiler in a `cmd.exe` shell with: From 65d09ea4682f4def84e890c9e42b5f24e6cc8443 Mon Sep 17 00:00:00 2001 From: Pulkit Goyal <7895pulkit@gmail.com> Date: Wed, 15 May 2019 16:22:39 +0300 Subject: [PATCH 17/17] Move `box` from the stable keyword to unstable keywords list Fixes #60849 --- src/libsyntax_pos/symbol.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index ec0ce4253fa2d..c2a18c9df83ba 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -30,7 +30,6 @@ symbols! { // Keywords that are used in stable Rust. As: "as", - Box: "box", Break: "break", Const: "const", Continue: "continue", @@ -69,6 +68,7 @@ symbols! { // Keywords that are used in unstable Rust or reserved for future use. Abstract: "abstract", Become: "become", + Box: "box", Do: "do", Final: "final", Macro: "macro",