diff --git a/src/doc/book/deref-coercions.md b/src/doc/book/deref-coercions.md index beb65c4ce358a..cabe66f5b2282 100644 --- a/src/doc/book/deref-coercions.md +++ b/src/doc/book/deref-coercions.md @@ -69,7 +69,7 @@ foo(&counted); All we’ve done is wrap our `String` in an `Rc`. But we can now pass the `Rc` around anywhere we’d have a `String`. The signature of `foo` didn’t change, but works just as well with either type. This example has two -conversions: `Rc` to `String` and then `String` to `&str`. Rust will do +conversions: `&Rc` to `&String` and then `&String` to `&str`. Rust will do this as many times as possible until the types match. Another very common implementation provided by the standard library is: diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 52ddd8ab5dac0..fafdf161b2dc2 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -445,8 +445,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let mut err = struct_span_err!(self.tcx.sess, span, E0277, "the trait bound `{}` is not satisfied", trait_ref.to_predicate()); - err.span_label(span, &format!("trait `{}` not satisfied", - trait_ref.to_predicate())); + err.span_label(span, &format!("the trait `{}` is not implemented \ + for `{}`", + trait_ref, + trait_ref.self_ty())); // Try to report a help message diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 5b0f43e3cf1f2..bb36fa1487eee 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -392,27 +392,30 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } -// When hashing a type this ends up affecting properties like symbol names. We -// want these symbol names to be calculated independent of other factors like -// what architecture you're compiling *from*. -// -// The hashing just uses the standard `Hash` trait, but the implementations of -// `Hash` for the `usize` and `isize` types are *not* architecture independent -// (e.g. they has 4 or 8 bytes). As a result we want to avoid `usize` and -// `isize` completely when hashing. To ensure that these don't leak in we use a -// custom hasher implementation here which inflates the size of these to a `u64` -// and `i64`. -struct WidenUsizeHasher { +/// When hashing a type this ends up affecting properties like symbol names. We +/// want these symbol names to be calculated independent of other factors like +/// what architecture you're compiling *from*. +/// +/// The hashing just uses the standard `Hash` trait, but the implementations of +/// `Hash` for the `usize` and `isize` types are *not* architecture independent +/// (e.g. they has 4 or 8 bytes). As a result we want to avoid `usize` and +/// `isize` completely when hashing. To ensure that these don't leak in we use a +/// custom hasher implementation here which inflates the size of these to a `u64` +/// and `i64`. +/// +/// The same goes for endianess: We always convert multi-byte integers to little +/// endian before hashing. +pub struct ArchIndependentHasher { inner: H, } -impl WidenUsizeHasher { - fn new(inner: H) -> WidenUsizeHasher { - WidenUsizeHasher { inner: inner } +impl ArchIndependentHasher { + pub fn new(inner: H) -> ArchIndependentHasher { + ArchIndependentHasher { inner: inner } } } -impl Hasher for WidenUsizeHasher { +impl Hasher for ArchIndependentHasher { fn write(&mut self, bytes: &[u8]) { self.inner.write(bytes) } @@ -425,44 +428,44 @@ impl Hasher for WidenUsizeHasher { self.inner.write_u8(i) } fn write_u16(&mut self, i: u16) { - self.inner.write_u16(i) + self.inner.write_u16(i.to_le()) } fn write_u32(&mut self, i: u32) { - self.inner.write_u32(i) + self.inner.write_u32(i.to_le()) } fn write_u64(&mut self, i: u64) { - self.inner.write_u64(i) + self.inner.write_u64(i.to_le()) } fn write_usize(&mut self, i: usize) { - self.inner.write_u64(i as u64) + self.inner.write_u64((i as u64).to_le()) } fn write_i8(&mut self, i: i8) { self.inner.write_i8(i) } fn write_i16(&mut self, i: i16) { - self.inner.write_i16(i) + self.inner.write_i16(i.to_le()) } fn write_i32(&mut self, i: i32) { - self.inner.write_i32(i) + self.inner.write_i32(i.to_le()) } fn write_i64(&mut self, i: i64) { - self.inner.write_i64(i) + self.inner.write_i64(i.to_le()) } fn write_isize(&mut self, i: isize) { - self.inner.write_i64(i as i64) + self.inner.write_i64((i as i64).to_le()) } } pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, H> { tcx: TyCtxt<'a, 'gcx, 'tcx>, - state: WidenUsizeHasher, + state: ArchIndependentHasher, } impl<'a, 'gcx, 'tcx, H: Hasher> TypeIdHasher<'a, 'gcx, 'tcx, H> { pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, state: H) -> Self { TypeIdHasher { tcx: tcx, - state: WidenUsizeHasher::new(state), + state: ArchIndependentHasher::new(state), } } @@ -493,6 +496,10 @@ impl<'a, 'gcx, 'tcx, H: Hasher> TypeIdHasher<'a, 'gcx, 'tcx, H> { pub fn def_path(&mut self, def_path: &ast_map::DefPath) { def_path.deterministic_hash_to(self.tcx, &mut self.state); } + + pub fn into_inner(self) -> H { + self.state.inner + } } impl<'a, 'gcx, 'tcx, H: Hasher> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx, H> { diff --git a/src/librustc_metadata/astencode.rs b/src/librustc_metadata/astencode.rs index c9dbedacbc1a5..f001f85131978 100644 --- a/src/librustc_metadata/astencode.rs +++ b/src/librustc_metadata/astencode.rs @@ -30,7 +30,7 @@ use rustc_serialize::Encodable; pub struct Ast<'tcx> { id_range: IdRange, item: Lazy, - side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)> + side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)>, } #[derive(RustcEncodable, RustcDecodable)] @@ -39,7 +39,7 @@ enum TableEntry<'tcx> { NodeType(Ty<'tcx>), ItemSubsts(ty::ItemSubsts<'tcx>), Adjustment(ty::adjustment::AutoAdjustment<'tcx>), - ConstQualif(ConstQualif) + ConstQualif(ConstQualif), } impl<'a, 'tcx> EncodeContext<'a, 'tcx> { @@ -48,7 +48,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { match ii { InlinedItemRef::Item(_, i) => id_visitor.visit_item(i), InlinedItemRef::TraitItem(_, ti) => id_visitor.visit_trait_item(ti), - InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii) + InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii), } let ii_pos = self.position(); @@ -58,12 +58,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let tables_count = { let mut visitor = SideTableEncodingIdVisitor { ecx: self, - count: 0 + count: 0, }; match ii { InlinedItemRef::Item(_, i) => visitor.visit_item(i), InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti), - InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii) + InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii), } visitor.count }; @@ -71,14 +71,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.lazy(&Ast { id_range: id_visitor.result(), item: Lazy::with_position(ii_pos), - side_tables: LazySeq::with_position_and_length(tables_pos, tables_count) + side_tables: LazySeq::with_position_and_length(tables_pos, tables_count), }) } } -struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> { +struct SideTableEncodingIdVisitor<'a, 'b: 'a, 'tcx: 'b> { ecx: &'a mut EncodeContext<'b, 'tcx>, - count: usize + count: usize, } impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> { @@ -114,10 +114,11 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata, let cnt = ast.id_range.max.as_usize() - ast.id_range.min.as_usize(); let start = tcx.sess.reserve_node_ids(cnt); - let id_ranges = [ast.id_range, IdRange { - min: start, - max: ast::NodeId::new(start.as_usize() + cnt) - }]; + let id_ranges = [ast.id_range, + IdRange { + min: start, + max: ast::NodeId::new(start.as_usize() + cnt), + }]; let ii = ast.item.decode((cdata, tcx, id_ranges)); let ii = ast_map::map_decoded_item(&tcx.map, @@ -129,7 +130,7 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata, let item_node_id = match ii { &InlinedItem::Item(_, ref i) => i.id, &InlinedItem::TraitItem(_, ref ti) => ti.id, - &InlinedItem::ImplItem(_, ref ii) => ii.id + &InlinedItem::ImplItem(_, ref ii) => ii.id, }; let inlined_did = tcx.map.local_def_id(item_node_id); tcx.register_item_type(inlined_did, tcx.lookup_item_type(orig_did)); diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index a87e61c4c944c..f0952fd145d59 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -54,7 +54,7 @@ pub struct ImportedFileMap { /// The end of this FileMap within the codemap of its original crate pub original_end_pos: syntax_pos::BytePos, /// The imported FileMap's representation within the local codemap - pub translated_filemap: Rc + pub translated_filemap: Rc, } pub struct CrateMetadata { @@ -141,8 +141,8 @@ impl CStore { self.metas.borrow_mut().insert(cnum, data); } - pub fn iter_crate_data(&self, mut i: I) where - I: FnMut(CrateNum, &Rc), + pub fn iter_crate_data(&self, mut i: I) + where I: FnMut(CrateNum, &Rc) { for (&k, v) in self.metas.borrow().iter() { i(k, v); @@ -150,12 +150,14 @@ impl CStore { } /// Like `iter_crate_data`, but passes source paths (if available) as well. - pub fn iter_crate_data_origins(&self, mut i: I) where - I: FnMut(CrateNum, &CrateMetadata, Option), + pub fn iter_crate_data_origins(&self, mut i: I) + where I: FnMut(CrateNum, &CrateMetadata, Option) { for (&k, v) in self.metas.borrow().iter() { let origin = self.opt_used_crate_source(k); - origin.as_ref().map(|cs| { assert!(k == cs.cnum); }); + origin.as_ref().map(|cs| { + assert!(k == cs.cnum); + }); i(k, &v, origin); } } @@ -167,10 +169,12 @@ impl CStore { } } - pub fn opt_used_crate_source(&self, cnum: CrateNum) - -> Option { - self.used_crate_sources.borrow_mut() - .iter().find(|source| source.cnum == cnum).cloned() + pub fn opt_used_crate_source(&self, cnum: CrateNum) -> Option { + self.used_crate_sources + .borrow_mut() + .iter() + .find(|source| source.cnum == cnum) + .cloned() } pub fn reset(&self) { @@ -182,19 +186,17 @@ impl CStore { self.statically_included_foreign_items.borrow_mut().clear(); } - pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec - { + pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { let mut ordering = Vec::new(); self.push_dependencies_in_postorder(&mut ordering, krate); ordering.reverse(); ordering } - pub fn push_dependencies_in_postorder(&self, - ordering: &mut Vec, - krate: CrateNum) - { - if ordering.contains(&krate) { return } + pub fn push_dependencies_in_postorder(&self, ordering: &mut Vec, krate: CrateNum) { + if ordering.contains(&krate) { + return; + } let data = self.get_crate_data(krate); for &dep in data.cnum_map.borrow().iter() { @@ -215,7 +217,8 @@ impl CStore { // In order to get this left-to-right dependency ordering, we perform a // topological sort of all crates putting the leaves at the right-most // positions. - pub fn do_get_used_crates(&self, prefer: LinkagePreference) + pub fn do_get_used_crates(&self, + prefer: LinkagePreference) -> Vec<(CrateNum, Option)> { let mut ordering = Vec::new(); for (&num, _) in self.metas.borrow().iter() { @@ -223,12 +226,16 @@ impl CStore { } info!("topological ordering: {:?}", ordering); ordering.reverse(); - let mut libs = self.used_crate_sources.borrow() + let mut libs = self.used_crate_sources + .borrow() .iter() - .map(|src| (src.cnum, match prefer { - LinkagePreference::RequireDynamic => src.dylib.clone().map(|p| p.0), - LinkagePreference::RequireStatic => src.rlib.clone().map(|p| p.0), - })) + .map(|src| { + (src.cnum, + match prefer { + LinkagePreference::RequireDynamic => src.dylib.clone().map(|p| p.0), + LinkagePreference::RequireStatic => src.rlib.clone().map(|p| p.0), + }) + }) .collect::>(); libs.sort_by(|&(a, _), &(b, _)| { let a = ordering.iter().position(|x| *x == a); @@ -243,9 +250,7 @@ impl CStore { self.used_libraries.borrow_mut().push((lib, kind)); } - pub fn get_used_libraries<'a>(&'a self) - -> &'a RefCell> { + pub fn get_used_libraries<'a>(&'a self) -> &'a RefCell> { &self.used_libraries } @@ -255,13 +260,11 @@ impl CStore { } } - pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell > { + pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell> { &self.used_link_args } - pub fn add_extern_mod_stmt_cnum(&self, - emod_id: ast::NodeId, - cnum: CrateNum) { + pub fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) { self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum); } @@ -273,8 +276,7 @@ impl CStore { self.statically_included_foreign_items.borrow().contains(&id) } - pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option - { + pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { self.extern_mod_crate_map.borrow().get(&emod_id).cloned() } @@ -288,14 +290,20 @@ impl CStore { } impl CrateMetadata { - pub fn name(&self) -> &str { &self.root.name } - pub fn hash(&self) -> Svh { self.root.hash } - pub fn disambiguator(&self) -> &str { &self.root.disambiguator } + pub fn name(&self) -> &str { + &self.root.name + } + pub fn hash(&self) -> Svh { + self.root.hash + } + pub fn disambiguator(&self) -> &str { + &self.root.disambiguator + } pub fn is_staged_api(&self) -> bool { - self.get_item_attrs(CRATE_DEF_INDEX).iter().any(|attr| { - attr.name() == "stable" || attr.name() == "unstable" - }) + self.get_item_attrs(CRATE_DEF_INDEX) + .iter() + .any(|attr| attr.name() == "stable" || attr.name() == "unstable") } pub fn is_allocator(&self) -> bool { diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index e18fd58144398..18b5c0fde1f4c 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -56,19 +56,23 @@ pub struct DecodeContext<'a, 'tcx: 'a> { // Cache the last used filemap for translating spans as an optimization. last_filemap_index: usize, - lazy_state: LazyState + lazy_state: LazyState, } /// Abstract over the various ways one can create metadata decoders. pub trait Metadata<'a, 'tcx>: Copy { fn raw_bytes(self) -> &'a [u8]; - fn cdata(self) -> Option<&'a CrateMetadata> { None } - fn tcx(self) -> Option> { None } + fn cdata(self) -> Option<&'a CrateMetadata> { + None + } + fn tcx(self) -> Option> { + None + } fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { let id_range = IdRange { min: NodeId::from_u32(u32::MIN), - max: NodeId::from_u32(u32::MAX) + max: NodeId::from_u32(u32::MAX), }; DecodeContext { opaque: opaque::Decoder::new(self.raw_bytes(), pos), @@ -77,7 +81,7 @@ pub trait Metadata<'a, 'tcx>: Copy { from_id_range: id_range, to_id_range: id_range, last_filemap_index: 0, - lazy_state: LazyState::NoNode + lazy_state: LazyState::NoNode, } } } @@ -92,21 +96,37 @@ impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob { } impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a CrateMetadata { - fn raw_bytes(self) -> &'a [u8] { self.blob.raw_bytes() } - fn cdata(self) -> Option<&'a CrateMetadata> { Some(self) } + fn raw_bytes(self) -> &'a [u8] { + self.blob.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self) + } } impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>) { - fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() } - fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) } - fn tcx(self) -> Option> { Some(self.1) } + fn raw_bytes(self) -> &'a [u8] { + self.0.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self.0) + } + fn tcx(self) -> Option> { + Some(self.1) + } } // HACK(eddyb) Only used by astencode to customize the from/to IdRange's. impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>, [IdRange; 2]) { - fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() } - fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) } - fn tcx(self) -> Option> { Some(self.1) } + fn raw_bytes(self) -> &'a [u8] { + self.0.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self.0) + } + fn tcx(self) -> Option> { + Some(self.1) + } fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { let mut dcx = (self.0, self.1).decoder(pos); @@ -125,12 +145,10 @@ impl<'a, 'tcx: 'a, T: Decodable> Lazy { } impl<'a, 'tcx: 'a, T: Decodable> LazySeq { - pub fn decode>(self, meta: M) -> impl Iterator + 'a { + pub fn decode>(self, meta: M) -> impl Iterator + 'a { let mut dcx = meta.decoder(self.position); dcx.lazy_state = LazyState::NodeStart(self.position); - (0..self.len).map(move |_| { - T::decode(&mut dcx).unwrap() - }) + (0..self.len).map(move |_| T::decode(&mut dcx).unwrap()) } } @@ -153,20 +171,15 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { r } - fn read_lazy_distance(&mut self, min_size: usize) - -> Result::Error> { + fn read_lazy_distance(&mut self, min_size: usize) -> Result::Error> { let distance = self.read_usize()?; let position = match self.lazy_state { - LazyState::NoNode => { - bug!("read_lazy_distance: outside of a metadata node") - } + LazyState::NoNode => bug!("read_lazy_distance: outside of a metadata node"), LazyState::NodeStart(start) => { assert!(distance + min_size <= start); start - distance - min_size } - LazyState::Previous(last_min_end) => { - last_min_end + distance - } + LazyState::Previous(last_min_end) => last_min_end + distance, }; self.lazy_state = LazyState::Previous(position + min_size); Ok(position) @@ -239,13 +252,15 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { // meaningful result if !self.from_id_range.contains(NodeId::from_u32(id)) { bug!("NodeId::decode: {} out of DecodeContext range ({:?} -> {:?})", - id, self.from_id_range, self.to_id_range); + id, + self.from_id_range, + self.to_id_range); } // Use wrapping arithmetic because otherwise it introduces control flow. // Maybe we should just have the control flow? -- aatch Ok(NodeId::from_u32(id.wrapping_sub(self.from_id_range.min.as_u32()) - .wrapping_add(self.to_id_range.min.as_u32()))) + .wrapping_add(self.to_id_range.min.as_u32()))) } } @@ -290,10 +305,9 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { // originate from the same filemap. let last_filemap = &imported_filemaps[self.last_filemap_index]; - if lo >= last_filemap.original_start_pos && - lo <= last_filemap.original_end_pos && - hi >= last_filemap.original_start_pos && - hi <= last_filemap.original_end_pos { + if lo >= last_filemap.original_start_pos && lo <= last_filemap.original_end_pos && + hi >= last_filemap.original_start_pos && + hi <= last_filemap.original_end_pos { last_filemap } else { let mut a = 0; @@ -313,10 +327,8 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { } }; - let lo = (lo - filemap.original_start_pos) + - filemap.translated_filemap.start_pos; - let hi = (hi - filemap.original_start_pos) + - filemap.translated_filemap.start_pos; + let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; + let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; Ok(syntax_pos::mk_sp(lo, hi)) } @@ -336,7 +348,7 @@ impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { assert!(pos >= SHORTHAND_OFFSET); let key = ty::CReaderCacheKey { cnum: self.cdata().cnum, - pos: pos - SHORTHAND_OFFSET + pos: pos - SHORTHAND_OFFSET, }; if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() { return Ok(ty); @@ -357,17 +369,18 @@ impl<'a, 'tcx> SpecializedDecoder> for DecodeContext Ok(ty::GenericPredicates { parent: Decodable::decode(self)?, predicates: (0..self.read_usize()?).map(|_| { - // Handle shorthands first, if we have an usize > 0x80. - if self.opaque.data[self.opaque.position()] & 0x80 != 0 { - let pos = self.read_usize()?; - assert!(pos >= SHORTHAND_OFFSET); - let pos = pos - SHORTHAND_OFFSET; - - self.with_position(pos, ty::Predicate::decode) - } else { - ty::Predicate::decode(self) - } - }).collect::, _>>()? + // Handle shorthands first, if we have an usize > 0x80. + if self.opaque.data[self.opaque.position()] & 0x80 != 0 { + let pos = self.read_usize()?; + assert!(pos >= SHORTHAND_OFFSET); + let pos = pos - SHORTHAND_OFFSET; + + self.with_position(pos, ty::Predicate::decode) + } else { + ty::Predicate::decode(self) + } + }) + .collect::, _>>()?, }) } } @@ -411,8 +424,7 @@ impl<'a, 'tcx> MetadataBlob { pub fn get_root(&self) -> CrateRoot { let slice = self.raw_bytes(); let offset = METADATA_HEADER.len(); - let pos = (((slice[offset + 0] as u32) << 24) | - ((slice[offset + 1] as u32) << 16) | + let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) | ((slice[offset + 2] as u32) << 8) | ((slice[offset + 3] as u32) << 0)) as usize; Lazy::with_position(pos).decode(self) @@ -421,9 +433,9 @@ impl<'a, 'tcx> MetadataBlob { /// Go through each item in the metadata and create a map from that /// item's def-key to the item's DefIndex. pub fn load_key_map(&self, index: LazySeq) -> FnvHashMap { - index.iter_enumerated(self.raw_bytes()).map(|(index, item)| { - (item.decode(self).def_key.decode(self), index) - }).collect() + index.iter_enumerated(self.raw_bytes()) + .map(|(index, item)| (item.decode(self).def_key.decode(self), index)) + .collect() } pub fn list_crate_metadata(&self, out: &mut io::Write) -> io::Result<()> { @@ -440,7 +452,7 @@ impl<'a, 'tcx> MetadataBlob { impl<'tcx> EntryKind<'tcx> { fn to_def(&self, did: DefId) -> Option { Some(match *self { - EntryKind::Const => Def::Const(did), + EntryKind::Const => Def::Const(did), EntryKind::AssociatedConst(_) => Def::AssociatedConst(did), EntryKind::ImmStatic | EntryKind::ForeignImmStatic => Def::Static(did, false), @@ -462,9 +474,7 @@ impl<'tcx> EntryKind<'tcx> { EntryKind::Impl(_) | EntryKind::DefaultImpl(_) | EntryKind::Field | - EntryKind::Closure (_) => { - return None - } + EntryKind::Closure(_) => return None, }) } } @@ -476,23 +486,29 @@ impl<'a, 'tcx> CrateMetadata { fn entry(&self, item_id: DefIndex) -> Entry<'tcx> { match self.maybe_entry(item_id) { - None => bug!("entry: id not found: {:?} in crate {:?} with number {}", - item_id, - self.name, - self.cnum), - Some(d) => d.decode(self) + None => { + bug!("entry: id not found: {:?} in crate {:?} with number {}", + item_id, + self.name, + self.cnum) + } + Some(d) => d.decode(self), } } fn local_def_id(&self, index: DefIndex) -> DefId { DefId { krate: self.cnum, - index: index + index: index, } } fn item_name(&self, item: &Entry<'tcx>) -> ast::Name { - item.def_key.decode(self).disambiguated_data.data.get_opt_name() + item.def_key + .decode(self) + .disambiguated_data + .data + .get_opt_name() .expect("no name in item_name") } @@ -502,55 +518,66 @@ impl<'a, 'tcx> CrateMetadata { pub fn get_trait_def(&self, item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx> { + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::TraitDef<'tcx> { let data = match self.entry(item_id).kind { EntryKind::Trait(data) => data.decode(self), - _ => bug!() + _ => bug!(), }; - ty::TraitDef::new(data.unsafety, data.paren_sugar, + ty::TraitDef::new(data.unsafety, + data.paren_sugar, tcx.lookup_generics(self.local_def_id(item_id)), data.trait_ref.decode((self, tcx)), self.def_path(item_id).unwrap().deterministic_hash(tcx)) } - fn get_variant(&self, item: &Entry<'tcx>, index: DefIndex) + fn get_variant(&self, + item: &Entry<'tcx>, + index: DefIndex) -> (ty::VariantDefData<'tcx, 'tcx>, Option) { let data = match item.kind { EntryKind::Variant(data) | EntryKind::Struct(data) | EntryKind::Union(data) => data.decode(self), - _ => bug!() + _ => bug!(), }; - let fields = item.children.decode(self).map(|index| { - let f = self.entry(index); - ty::FieldDefData::new(self.local_def_id(index), - self.item_name(&f), - f.visibility) - }).collect(); + let fields = item.children + .decode(self) + .map(|index| { + let f = self.entry(index); + ty::FieldDefData::new(self.local_def_id(index), self.item_name(&f), f.visibility) + }) + .collect(); (ty::VariantDefData { - did: self.local_def_id(data.struct_ctor.unwrap_or(index)), - name: self.item_name(item), - fields: fields, - disr_val: ConstInt::Infer(data.disr), - ctor_kind: data.ctor_kind, - }, data.struct_ctor) - } - - pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + did: self.local_def_id(data.struct_ctor.unwrap_or(index)), + name: self.item_name(item), + fields: fields, + disr_val: ConstInt::Infer(data.disr), + ctor_kind: data.ctor_kind, + }, + data.struct_ctor) + } + + pub fn get_adt_def(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::AdtDefMaster<'tcx> { let item = self.entry(item_id); let did = self.local_def_id(item_id); let mut ctor_index = None; let variants = if let EntryKind::Enum = item.kind { - item.children.decode(self).map(|index| { - let (variant, struct_ctor) = self.get_variant(&self.entry(index), index); - assert_eq!(struct_ctor, None); - variant - }).collect() - } else{ + item.children + .decode(self) + .map(|index| { + let (variant, struct_ctor) = self.get_variant(&self.entry(index), index); + assert_eq!(struct_ctor, None); + variant + }) + .collect() + } else { let (variant, struct_ctor) = self.get_variant(&item, item_id); ctor_index = struct_ctor; vec![variant] @@ -559,7 +586,7 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::Enum => ty::AdtKind::Enum, EntryKind::Struct(_) => ty::AdtKind::Struct, EntryKind::Union(_) => ty::AdtKind::Union, - _ => bug!("get_adt_def called on a non-ADT {:?}", did) + _ => bug!("get_adt_def called on a non-ADT {:?}", did), }; let adt = tcx.intern_adt_def(did, kind, variants); @@ -572,33 +599,41 @@ impl<'a, 'tcx> CrateMetadata { // to support recursive structures for variant in &adt.variants { for field in &variant.fields { - debug!("evaluating the type of {:?}::{:?}", variant.name, field.name); + debug!("evaluating the type of {:?}::{:?}", + variant.name, + field.name); let ty = self.get_type(field.did.index, tcx); field.fulfill_ty(ty); debug!("evaluating the type of {:?}::{:?}: {:?}", - variant.name, field.name, ty); + variant.name, + field.name, + ty); } } adt } - pub fn get_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + pub fn get_predicates(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::GenericPredicates<'tcx> { self.entry(item_id).predicates.unwrap().decode((self, tcx)) } - pub fn get_super_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + pub fn get_super_predicates(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::GenericPredicates<'tcx> { match self.entry(item_id).kind { - EntryKind::Trait(data) => { - data.decode(self).super_predicates.decode((self, tcx)) - } - _ => bug!() + EntryKind::Trait(data) => data.decode(self).super_predicates.decode((self, tcx)), + _ => bug!(), } } - pub fn get_generics(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + pub fn get_generics(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::Generics<'tcx> { self.entry(item_id).generics.unwrap().decode((self, tcx)) } @@ -622,7 +657,7 @@ impl<'a, 'tcx> CrateMetadata { fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> { match self.entry(id).kind { EntryKind::Impl(data) => data.decode(self), - _ => bug!() + _ => bug!(), } } @@ -634,7 +669,8 @@ impl<'a, 'tcx> CrateMetadata { self.get_impl_data(id).polarity } - pub fn get_custom_coerce_unsized_kind(&self, id: DefIndex) + pub fn get_custom_coerce_unsized_kind(&self, + id: DefIndex) -> Option { self.get_impl_data(id).coerce_unsized_kind } @@ -674,21 +710,25 @@ impl<'a, 'tcx> CrateMetadata { if let Some(def) = self.get_def(child_index) { callback(def::Export { def: def, - name: self.item_name(&self.entry(child_index)) + name: self.item_name(&self.entry(child_index)), }); } } continue; } - EntryKind::Impl(_) | EntryKind::DefaultImpl(_) => continue, + EntryKind::Impl(_) | + EntryKind::DefaultImpl(_) => continue, _ => {} } let def_key = child.def_key.decode(self); - if let (Some(def), Some(name)) = (self.get_def(child_index), - def_key.disambiguated_data.data.get_opt_name()) { - callback(def::Export { def: def, name: name }); + if let (Some(def), Some(name)) = + (self.get_def(child_index), def_key.disambiguated_data.data.get_opt_name()) { + callback(def::Export { + def: def, + name: name, + }); // For non-reexport structs and variants add their constructors to children. // Reexport lists automatically contain constructors when necessary. match def { @@ -696,7 +736,10 @@ impl<'a, 'tcx> CrateMetadata { if let Some(ctor_def_id) = self.get_struct_ctor_def_id(child_index) { let ctor_kind = self.get_ctor_kind(child_index); let ctor_def = Def::StructCtor(ctor_def_id, ctor_kind); - callback(def::Export { def: ctor_def, name: name }); + callback(def::Export { + def: ctor_def, + name: name, + }); } } Def::Variant(def_id) => { @@ -704,7 +747,10 @@ impl<'a, 'tcx> CrateMetadata { // value namespace, they are reserved for possible future use. let ctor_kind = self.get_ctor_kind(child_index); let ctor_def = Def::VariantCtor(def_id, ctor_kind); - callback(def::Export { def: ctor_def, name: name }); + callback(def::Export { + def: ctor_def, + name: name, + }); } _ => {} } @@ -719,7 +765,9 @@ impl<'a, 'tcx> CrateMetadata { } } - pub fn maybe_get_item_ast(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) + pub fn maybe_get_item_ast(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefIndex) -> Option<&'tcx InlinedItem> { debug!("Looking up item: {:?}", id); let item_doc = self.entry(id); @@ -737,12 +785,16 @@ impl<'a, 'tcx> CrateMetadata { self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some() } - pub fn maybe_get_item_mir(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) + pub fn maybe_get_item_mir(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefIndex) -> Option> { self.entry(id).mir.map(|mir| mir.decode((self, tcx))) } - pub fn get_impl_or_trait_item(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + pub fn get_impl_or_trait_item(&self, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option> { let item = self.entry(id); let parent_and_name = || { @@ -769,9 +821,11 @@ impl<'a, 'tcx> CrateMetadata { let ity = item.ty.unwrap().decode((self, tcx)); let fty = match ity.sty { ty::TyFnDef(.., fty) => fty, - _ => bug!( - "the type {:?} of the method {:?} is not a function?", - ity, name) + _ => { + bug!("the type {:?} of the method {:?} is not a function?", + ity, + name) + } }; let data = data.decode(self); @@ -799,7 +853,7 @@ impl<'a, 'tcx> CrateMetadata { container: container.with_def_id(parent), })) } - _ => return None + _ => return None, }) } @@ -821,7 +875,7 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::Struct(data) => { data.decode(self).struct_ctor.map(|index| self.local_def_id(index)) } - _ => None + _ => None, } } @@ -838,17 +892,22 @@ impl<'a, 'tcx> CrateMetadata { } pub fn get_struct_field_names(&self, id: DefIndex) -> Vec { - self.entry(id).children.decode(self).map(|index| { - self.item_name(&self.entry(index)) - }).collect() + self.entry(id) + .children + .decode(self) + .map(|index| self.item_name(&self.entry(index))) + .collect() } fn get_attributes(&self, item: &Entry<'tcx>) -> Vec { - item.attributes.decode(self).map(|mut attr| { - // Need new unique IDs: old thread-local IDs won't map to new threads. - attr.node.id = attr::mk_attr_id(); - attr - }).collect() + item.attributes + .decode(self) + .map(|mut attr| { + // Need new unique IDs: old thread-local IDs won't map to new threads. + attr.node.id = attr::mk_attr_id(); + attr + }) + .collect() } // Translate a DefId from the current compilation environment to a DefId @@ -856,7 +915,10 @@ impl<'a, 'tcx> CrateMetadata { fn reverse_translate_def_id(&self, did: DefId) -> Option { for (local, &global) in self.cnum_map.borrow().iter_enumerated() { if global == did.krate { - return Some(DefId { krate: local, index: did.index }); + return Some(DefId { + krate: local, + index: did.index, + }); } } @@ -864,9 +926,11 @@ impl<'a, 'tcx> CrateMetadata { } pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec { - self.entry(id).inherent_impls.decode(self).map(|index| { - self.local_def_id(index) - }).collect() + self.entry(id) + .inherent_impls + .decode(self) + .map(|index| self.local_def_id(index)) + .collect() } pub fn get_implementations_for_trait(&self, filter: Option, result: &mut Vec) { @@ -875,7 +939,7 @@ impl<'a, 'tcx> CrateMetadata { let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) { Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)), Some(None) => return, - None => None + None => None, }; // FIXME(eddyb) Make this O(1) instead of O(n). @@ -884,9 +948,7 @@ impl<'a, 'tcx> CrateMetadata { continue; } - result.extend(trait_impls.impls.decode(self).map(|index| { - self.local_def_id(index) - })); + result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index))); if filter.is_some() { break; @@ -898,7 +960,7 @@ impl<'a, 'tcx> CrateMetadata { self.entry(id).def_key.decode(self).parent.and_then(|parent_index| { match self.entry(parent_index).kind { EntryKind::Trait(_) => Some(self.local_def_id(parent_index)), - _ => None + _ => None, } }) } @@ -909,10 +971,15 @@ impl<'a, 'tcx> CrateMetadata { } pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> { - self.root.dylib_dependency_formats.decode(self).enumerate().flat_map(|(i, link)| { - let cnum = CrateNum::new(i + 1); - link.map(|link| (self.cnum_map.borrow()[cnum], link)) - }).collect() + self.root + .dylib_dependency_formats + .decode(self) + .enumerate() + .flat_map(|(i, link)| { + let cnum = CrateNum::new(i + 1); + link.map(|link| (self.cnum_map.borrow()[cnum], link)) + }) + .collect() } pub fn get_missing_lang_items(&self) -> Vec { @@ -924,7 +991,7 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::Fn(data) | EntryKind::ForeignFn(data) => data.decode(self).arg_names, EntryKind::Method(data) => data.decode(self).fn_data.arg_names, - _ => LazySeq::empty() + _ => LazySeq::empty(), }; arg_names.decode(self).collect() } @@ -937,7 +1004,7 @@ impl<'a, 'tcx> CrateMetadata { let constness = match self.entry(id).kind { EntryKind::Method(data) => data.decode(self).fn_data.constness, EntryKind::Fn(data) => data.decode(self).constness, - _ => hir::Constness::NotConst + _ => hir::Constness::NotConst, }; constness == hir::Constness::Const } @@ -953,16 +1020,14 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::ForeignImmStatic | EntryKind::ForeignMutStatic => true, - EntryKind::Fn(_) | EntryKind::ForeignFn(_) => { - self.get_generics(id, tcx).types.is_empty() - } + EntryKind::Fn(_) | + EntryKind::ForeignFn(_) => self.get_generics(id, tcx).types.is_empty(), _ => false, }; if applicable { - attr::contains_extern_indicator(tcx.sess.diagnostic(), - &self.get_attributes(&item)) + attr::contains_extern_indicator(tcx.sess.diagnostic(), &self.get_attributes(&item)) } else { false } @@ -973,36 +1038,38 @@ impl<'a, 'tcx> CrateMetadata { EntryKind::ForeignImmStatic | EntryKind::ForeignMutStatic | EntryKind::ForeignFn(_) => true, - _ => false + _ => false, } } pub fn is_defaulted_trait(&self, trait_id: DefIndex) -> bool { match self.entry(trait_id).kind { EntryKind::Trait(data) => data.decode(self).has_default_impl, - _ => bug!() + _ => bug!(), } } pub fn is_default_impl(&self, impl_id: DefIndex) -> bool { - match self.entry(impl_id).kind { + match self.entry(impl_id).kind { EntryKind::DefaultImpl(_) => true, - _ => false + _ => false, } } pub fn closure_kind(&self, closure_id: DefIndex) -> ty::ClosureKind { match self.entry(closure_id).kind { EntryKind::Closure(data) => data.decode(self).kind, - _ => bug!() + _ => bug!(), } } - pub fn closure_ty(&self, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + pub fn closure_ty(&self, + closure_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::ClosureTy<'tcx> { match self.entry(closure_id).kind { EntryKind::Closure(data) => data.decode(self).ty.decode((self, tcx)), - _ => bug!() + _ => bug!(), } } @@ -1045,7 +1112,8 @@ impl<'a, 'tcx> CrateMetadata { /// file they represent, just information about length, line breaks, and /// multibyte characters. This information is enough to generate valid debuginfo /// for items inlined from other crates. - pub fn imported_filemaps(&'a self, local_codemap: &codemap::CodeMap) + pub fn imported_filemaps(&'a self, + local_codemap: &codemap::CodeMap) -> Ref<'a, Vec> { { let filemaps = self.codemap_import_info.borrow(); @@ -1057,67 +1125,66 @@ impl<'a, 'tcx> CrateMetadata { let external_codemap = self.root.codemap.decode(self); let imported_filemaps = external_codemap.map(|filemap_to_import| { - // Try to find an existing FileMap that can be reused for the filemap to - // be imported. A FileMap is reusable if it is exactly the same, just - // positioned at a different offset within the codemap. - let reusable_filemap = { - local_codemap.files - .borrow() - .iter() - .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) - .map(|rc| rc.clone()) - }; + // Try to find an existing FileMap that can be reused for the filemap to + // be imported. A FileMap is reusable if it is exactly the same, just + // positioned at a different offset within the codemap. + let reusable_filemap = { + local_codemap.files + .borrow() + .iter() + .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) + .map(|rc| rc.clone()) + }; - match reusable_filemap { - Some(fm) => { - cstore::ImportedFileMap { - original_start_pos: filemap_to_import.start_pos, - original_end_pos: filemap_to_import.end_pos, - translated_filemap: fm - } - } - None => { - // We can't reuse an existing FileMap, so allocate a new one - // containing the information we need. - let syntax_pos::FileMap { - name, - abs_path, - start_pos, - end_pos, - lines, - multibyte_chars, - .. - } = filemap_to_import; - - let source_length = (end_pos - start_pos).to_usize(); - - // Translate line-start positions and multibyte character - // position into frame of reference local to file. - // `CodeMap::new_imported_filemap()` will then translate those - // coordinates to their new global frame of reference when the - // offset of the FileMap is known. - let mut lines = lines.into_inner(); - for pos in &mut lines { - *pos = *pos - start_pos; - } - let mut multibyte_chars = multibyte_chars.into_inner(); - for mbc in &mut multibyte_chars { - mbc.pos = mbc.pos - start_pos; + match reusable_filemap { + Some(fm) => { + cstore::ImportedFileMap { + original_start_pos: filemap_to_import.start_pos, + original_end_pos: filemap_to_import.end_pos, + translated_filemap: fm, + } } + None => { + // We can't reuse an existing FileMap, so allocate a new one + // containing the information we need. + let syntax_pos::FileMap { name, + abs_path, + start_pos, + end_pos, + lines, + multibyte_chars, + .. } = filemap_to_import; + + let source_length = (end_pos - start_pos).to_usize(); + + // Translate line-start positions and multibyte character + // position into frame of reference local to file. + // `CodeMap::new_imported_filemap()` will then translate those + // coordinates to their new global frame of reference when the + // offset of the FileMap is known. + let mut lines = lines.into_inner(); + for pos in &mut lines { + *pos = *pos - start_pos; + } + let mut multibyte_chars = multibyte_chars.into_inner(); + for mbc in &mut multibyte_chars { + mbc.pos = mbc.pos - start_pos; + } - let local_version = local_codemap.new_imported_filemap(name, - abs_path, - source_length, - lines, - multibyte_chars); - cstore::ImportedFileMap { - original_start_pos: start_pos, - original_end_pos: end_pos, - translated_filemap: local_version + let local_version = local_codemap.new_imported_filemap(name, + abs_path, + source_length, + lines, + multibyte_chars); + cstore::ImportedFileMap { + original_start_pos: start_pos, + original_end_pos: end_pos, + translated_filemap: local_version, + } } } - } - }).collect(); + }) + .collect(); // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. *self.codemap_import_info.borrow_mut() = imported_filemaps; @@ -1151,8 +1218,7 @@ fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, fm2: &syntax_pos::FileMa } for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) { - if (mb1.bytes != mb2.bytes) || - ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { + if (mb1.bytes != mb2.bytes) || ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { return false; } } diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index 6ae5fc1657aa7..76b778fb61f25 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -799,9 +799,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va s.as_ptr() as *const c_char, s.len() as c_uint, !null_terminated as Bool); - - let gsym = token::gensym("str"); - let sym = format!("str{}", gsym.0); + let sym = cx.generate_local_symbol_name("str"); let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{ bug!("symbol `{}` is already defined", sym); }); diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs index 15f7132e52d2f..0dc10aa7759ea 100644 --- a/src/librustc_trans/consts.rs +++ b/src/librustc_trans/consts.rs @@ -30,7 +30,6 @@ use rustc::hir; use std::ffi::{CStr, CString}; use syntax::ast; use syntax::attr; -use syntax::parse::token; pub fn ptrcast(val: ValueRef, ty: Type) -> ValueRef { unsafe { @@ -44,10 +43,7 @@ pub fn addr_of_mut(ccx: &CrateContext, kind: &str) -> ValueRef { unsafe { - // FIXME: this totally needs a better name generation scheme, perhaps a simple global - // counter? Also most other uses of gensym in trans. - let gsym = token::gensym("_"); - let name = format!("{}{}", kind, gsym.0); + let name = ccx.generate_local_symbol_name(kind); let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{ bug!("symbol `{}` is already defined", name); }); diff --git a/src/librustc_trans/context.rs b/src/librustc_trans/context.rs index 1b67516a9e6d0..2a72d42296d19 100644 --- a/src/librustc_trans/context.rs +++ b/src/librustc_trans/context.rs @@ -166,6 +166,9 @@ pub struct LocalCrateContext<'tcx> { type_of_depth: Cell, symbol_map: Rc>, + + /// A counter that is used for generating local symbol names + local_gen_sym_counter: Cell, } // Implement DepTrackingMapConfig for `trait_cache` @@ -688,6 +691,7 @@ impl<'tcx> LocalCrateContext<'tcx> { n_llvm_insns: Cell::new(0), type_of_depth: Cell::new(0), symbol_map: symbol_map, + local_gen_sym_counter: Cell::new(0), }; let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = { @@ -1021,6 +1025,16 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> { self.shared().empty_substs_for_def_id(item_def_id) } + + /// Generate a new symbol name with the given prefix. This symbol name must + /// only be used for definitions with `internal` or `private` linkage. + pub fn generate_local_symbol_name(&self, prefix: &str) -> String { + let idx = self.local().local_gen_sym_counter.get(); + self.local().local_gen_sym_counter.set(idx + 1); + // Include a '.' character, so there can be no accidental conflicts with + // user defined names + format!("{}.{}", prefix, idx) + } } pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'tcx>); diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index 570b844f80adf..2804e3ffe37dd 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -16,7 +16,7 @@ use self::EnumDiscriminantInfo::*; use super::utils::{debug_context, DIB, span_start, bytes_to_bits, size_and_align_of, get_namespace_and_span_for_item, create_DIArray, is_node_local_to_unit}; use super::namespace::mangled_name_of_item; -use super::type_names::{compute_debuginfo_type_name, push_debuginfo_type_name}; +use super::type_names::compute_debuginfo_type_name; use super::{CrateDebugContext}; use context::SharedCrateContext; use session::Session; @@ -26,8 +26,11 @@ use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor, DICompositeType, DI use rustc::hir::def::CtorKind; use rustc::hir::def_id::DefId; +use rustc::ty::fold::TypeVisitor; use rustc::ty::subst::Substs; +use rustc::ty::util::TypeIdHasher; use rustc::hir; +use rustc_data_structures::blake2b; use {type_of, machine, monomorphize}; use common::CrateContext; use type_::Type; @@ -38,6 +41,7 @@ use util::common::path2cstr; use libc::{c_uint, c_longlong}; use std::ffi::CString; +use std::fmt::Write; use std::path::Path; use std::ptr; use std::rc::Rc; @@ -46,6 +50,7 @@ use syntax::ast; use syntax::parse::token; use syntax_pos::{self, Span}; + // From DWARF 5. // See http://www.dwarfstd.org/ShowIssue.php?issue=140129.1 const DW_LANG_RUST: c_uint = 0x1c; @@ -138,219 +143,58 @@ impl<'tcx> TypeMap<'tcx> { // ID will be generated and stored for later lookup. fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, type_: Ty<'tcx>) -> UniqueTypeId { - - // basic type -> {:name of the type:} - // tuple -> {tuple_(:param-uid:)*} - // struct -> {struct_:svh: / :node-id:_<(:param-uid:),*> } - // enum -> {enum_:svh: / :node-id:_<(:param-uid:),*> } - // enum variant -> {variant_:variant-name:_:enum-uid:} - // reference (&) -> {& :pointee-uid:} - // mut reference (&mut) -> {&mut :pointee-uid:} - // ptr (*) -> {* :pointee-uid:} - // mut ptr (*mut) -> {*mut :pointee-uid:} - // unique ptr (box) -> {box :pointee-uid:} - // @-ptr (@) -> {@ :pointee-uid:} - // sized vec ([T; x]) -> {[:size:] :element-uid:} - // unsized vec ([T]) -> {[] :element-uid:} - // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } - // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ - // :return-type-uid: : (:bounds:)*} - // function -> { fn( (:param-uid:)* <,_...> ) -> \ - // :return-type-uid:} - + // Let's see if we already have something in the cache match self.type_to_unique_id.get(&type_).cloned() { Some(unique_type_id) => return unique_type_id, None => { /* generate one */} }; - let mut unique_type_id = String::with_capacity(256); - unique_type_id.push('{'); - - match type_.sty { - ty::TyNever | - ty::TyBool | - ty::TyChar | - ty::TyStr | - ty::TyInt(_) | - ty::TyUint(_) | - ty::TyFloat(_) => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyAdt(def, substs) => { - unique_type_id.push_str(&(String::from(def.descr()) + " ")); - from_def_id_and_substs(self, cx, def.did, substs, &mut unique_type_id); - } - ty::TyTuple(component_types) if component_types.is_empty() => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyTuple(component_types) => { - unique_type_id.push_str("tuple "); - for &component_type in component_types { - let component_type_id = - self.get_unique_type_id_of_type(cx, component_type); - let component_type_id = - self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(&component_type_id[..]); - } - }, - ty::TyBox(inner_type) => { - unique_type_id.push_str("box "); - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => { - unique_type_id.push('*'); - if mutbl == hir::MutMutable { - unique_type_id.push_str("mut"); - } - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => { - unique_type_id.push('&'); - if mutbl == hir::MutMutable { - unique_type_id.push_str("mut"); - } - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyArray(inner_type, len) => { - unique_type_id.push_str(&format!("[{}]", len)); + let mut type_id_hasher = TypeIdHasher::new(cx.tcx(), + DebugInfoTypeIdHasher::new()); + type_id_hasher.visit_ty(type_); + let hash = type_id_hasher.into_inner().into_hash(); - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TySlice(inner_type) => { - unique_type_id.push_str("[]"); + let mut unique_type_id = String::with_capacity(TYPE_ID_HASH_LENGTH * 2); - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyTrait(ref trait_data) => { - unique_type_id.push_str("trait "); - - let principal = cx.tcx().erase_late_bound_regions_and_normalize( - &trait_data.principal); - - from_def_id_and_substs(self, - cx, - principal.def_id, - principal.substs, - &mut unique_type_id); - }, - ty::TyFnDef(.., &ty::BareFnTy{ unsafety, abi, ref sig } ) | - ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => { - if unsafety == hir::Unsafety::Unsafe { - unique_type_id.push_str("unsafe "); - } + for byte in hash.into_iter() { + write!(&mut unique_type_id, "{:x}", byte).unwrap(); + } - unique_type_id.push_str(abi.name()); + let key = self.unique_id_interner.intern(&unique_type_id); + self.type_to_unique_id.insert(type_, UniqueTypeId(key)); - unique_type_id.push_str(" fn("); + return UniqueTypeId(key); - let sig = cx.tcx().erase_late_bound_regions_and_normalize(sig); + // The hasher we are using to generate the UniqueTypeId. We want + // something that provides more than the 64 bits of the DefaultHasher. + const TYPE_ID_HASH_LENGTH: usize = 20; - for ¶meter_type in &sig.inputs { - let parameter_type_id = - self.get_unique_type_id_of_type(cx, parameter_type); - let parameter_type_id = - self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(¶meter_type_id[..]); - unique_type_id.push(','); - } - - if sig.variadic { - unique_type_id.push_str("..."); - } + struct DebugInfoTypeIdHasher { + state: blake2b::Blake2bCtx + } - unique_type_id.push_str(")->"); - let return_type_id = self.get_unique_type_id_of_type(cx, sig.output); - let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(&return_type_id[..]); - }, - ty::TyClosure(_, substs) if substs.upvar_tys.is_empty() => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyClosure(_, substs) => { - unique_type_id.push_str("closure "); - for upvar_type in substs.upvar_tys { - let upvar_type_id = - self.get_unique_type_id_of_type(cx, upvar_type); - let upvar_type_id = - self.get_unique_type_id_as_string(upvar_type_id); - unique_type_id.push_str(&upvar_type_id[..]); - } - }, - _ => { - bug!("get_unique_type_id_of_type() - unexpected type: {:?}", - type_) + impl ::std::hash::Hasher for DebugInfoTypeIdHasher { + fn finish(&self) -> u64 { + unimplemented!() } - }; - - unique_type_id.push('}'); - - // Trim to size before storing permanently - unique_type_id.shrink_to_fit(); - - let key = self.unique_id_interner.intern(&unique_type_id); - self.type_to_unique_id.insert(type_, UniqueTypeId(key)); - return UniqueTypeId(key); - - fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>, - cx: &CrateContext<'a, 'tcx>, - def_id: DefId, - substs: &Substs<'tcx>, - output: &mut String) { - // First, find out the 'real' def_id of the type. Items inlined from - // other crates have to be mapped back to their source. - let def_id = if let Some(node_id) = cx.tcx().map.as_local_node_id(def_id) { - if cx.tcx().map.is_inlined_node_id(node_id) { - // The given def_id identifies the inlined copy of a - // type definition, let's take the source of the copy. - cx.defid_for_inlined_node(node_id).unwrap() - } else { - def_id - } - } else { - def_id - }; + #[inline] + fn write(&mut self, bytes: &[u8]) { + blake2b::blake2b_update(&mut self.state, bytes); + } + } - // Get the crate name/disambiguator as first part of the identifier. - let crate_name = if def_id.is_local() { - cx.tcx().crate_name.clone() - } else { - cx.sess().cstore.original_crate_name(def_id.krate) - }; - let crate_disambiguator = cx.tcx().crate_disambiguator(def_id.krate); - - output.push_str(&crate_name[..]); - output.push_str("/"); - output.push_str(&crate_disambiguator[..]); - output.push_str("/"); - // Add the def-index as the second part - output.push_str(&format!("{:x}", def_id.index.as_usize())); - - if substs.types().next().is_some() { - output.push('<'); - - for type_parameter in substs.types() { - let param_type_id = - type_map.get_unique_type_id_of_type(cx, type_parameter); - let param_type_id = - type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(¶m_type_id[..]); - output.push(','); + impl DebugInfoTypeIdHasher { + fn new() -> DebugInfoTypeIdHasher { + DebugInfoTypeIdHasher { + state: blake2b::blake2b_new(TYPE_ID_HASH_LENGTH, &[]) } + } - output.push('>'); + fn into_hash(self) -> [u8; TYPE_ID_HASH_LENGTH] { + let mut hash = [0u8; TYPE_ID_HASH_LENGTH]; + blake2b::blake2b_final(self.state, &mut hash); + hash } } } @@ -1927,15 +1771,17 @@ pub fn create_global_var_metadata(cx: &CrateContext, return; } + let tcx = cx.tcx(); + // Don't create debuginfo for globals inlined from other crates. The other // crate should already contain debuginfo for it. More importantly, the // global might not even exist in un-inlined form anywhere which would lead // to a linker errors. - if cx.tcx().map.is_inlined_node_id(node_id) { + if tcx.map.is_inlined_node_id(node_id) { return; } - let node_def_id = cx.tcx().map.local_def_id(node_id); + let node_def_id = tcx.map.local_def_id(node_id); let (var_scope, span) = get_namespace_and_span_for_item(cx, node_def_id); let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP { @@ -1946,9 +1792,9 @@ pub fn create_global_var_metadata(cx: &CrateContext, }; let is_local_to_unit = is_node_local_to_unit(cx, node_id); - let variable_type = cx.tcx().node_id_to_type(node_id); + let variable_type = tcx.erase_regions(&tcx.node_id_to_type(node_id)); let type_metadata = type_metadata(cx, variable_type, span); - let var_name = cx.tcx().item_name(node_def_id).to_string(); + let var_name = tcx.item_name(node_def_id).to_string(); let linkage_name = mangled_name_of_item(cx, node_def_id, ""); let var_name = CString::new(var_name).unwrap(); diff --git a/src/libstd/collections/mod.rs b/src/libstd/collections/mod.rs index 504b3a76bd295..b9e92a01b2f8e 100644 --- a/src/libstd/collections/mod.rs +++ b/src/libstd/collections/mod.rs @@ -15,7 +15,7 @@ //! standard implementations, it should be possible for two libraries to //! communicate without significant data conversion. //! -//! To get this out of the way: you should probably just use `Vec` or `HashMap`. +//! To get this out of the way: you should probably just use [`Vec`] or [`HashMap`]. //! These two collections cover most use cases for generic data storage and //! processing. They are exceptionally good at doing what they do. All the other //! collections in the standard library have specific use cases where they are @@ -25,10 +25,10 @@ //! //! Rust's collections can be grouped into four major categories: //! -//! * Sequences: `Vec`, `VecDeque`, `LinkedList` -//! * Maps: `HashMap`, `BTreeMap` -//! * Sets: `HashSet`, `BTreeSet` -//! * Misc: `BinaryHeap` +//! * Sequences: [`Vec`], [`VecDeque`], [`LinkedList`] +//! * Maps: [`HashMap`], [`BTreeMap`] +//! * Sets: [`HashSet`], [`BTreeSet`] +//! * Misc: [`BinaryHeap`] //! //! # When Should You Use Which Collection? //! @@ -46,13 +46,13 @@ //! * You want a heap-allocated array. //! //! ### Use a `VecDeque` when: -//! * You want a `Vec` that supports efficient insertion at both ends of the +//! * You want a [`Vec`] that supports efficient insertion at both ends of the //! sequence. //! * You want a queue. //! * You want a double-ended queue (deque). //! //! ### Use a `LinkedList` when: -//! * You want a `Vec` or `VecDeque` of unknown size, and can't tolerate +//! * You want a [`Vec`] or [`VecDeque`] of unknown size, and can't tolerate //! amortization. //! * You want to efficiently split and append lists. //! * You are *absolutely* certain you *really*, *truly*, want a doubly linked @@ -92,38 +92,38 @@ //! Throughout the documentation, we will follow a few conventions. For all //! operations, the collection's size is denoted by n. If another collection is //! involved in the operation, it contains m elements. Operations which have an -//! *amortized* cost are suffixed with a `*`. Operations with an *expected* +//! *amortized* cost are suffixed with a `*`. Operations with an *expected* //! cost are suffixed with a `~`. //! //! All amortized costs are for the potential need to resize when capacity is -//! exhausted. If a resize occurs it will take O(n) time. Our collections never +//! exhausted. If a resize occurs it will take O(n) time. Our collections never //! automatically shrink, so removal operations aren't amortized. Over a //! sufficiently large series of operations, the average cost per operation will //! deterministically equal the given cost. //! -//! Only HashMap has expected costs, due to the probabilistic nature of hashing. -//! It is theoretically possible, though very unlikely, for HashMap to +//! Only [`HashMap`] has expected costs, due to the probabilistic nature of hashing. +//! It is theoretically possible, though very unlikely, for [`HashMap`] to //! experience worse performance. //! //! ## Sequences //! -//! | | get(i) | insert(i) | remove(i) | append | split_off(i) | -//! |--------------|----------------|-----------------|----------------|--------|----------------| -//! | Vec | O(1) | O(n-i)* | O(n-i) | O(m)* | O(n-i) | -//! | VecDeque | O(1) | O(min(i, n-i))* | O(min(i, n-i)) | O(m)* | O(min(i, n-i)) | -//! | LinkedList | O(min(i, n-i)) | O(min(i, n-i)) | O(min(i, n-i)) | O(1) | O(min(i, n-i)) | +//! | | get(i) | insert(i) | remove(i) | append | split_off(i) | +//! |----------------|----------------|-----------------|----------------|--------|----------------| +//! | [`Vec`] | O(1) | O(n-i)* | O(n-i) | O(m)* | O(n-i) | +//! | [`VecDeque`] | O(1) | O(min(i, n-i))* | O(min(i, n-i)) | O(m)* | O(min(i, n-i)) | +//! | [`LinkedList`] | O(min(i, n-i)) | O(min(i, n-i)) | O(min(i, n-i)) | O(1) | O(min(i, n-i)) | //! -//! Note that where ties occur, Vec is generally going to be faster than VecDeque, and VecDeque -//! is generally going to be faster than LinkedList. +//! Note that where ties occur, [`Vec`] is generally going to be faster than [`VecDeque`], and +//! [`VecDeque`] is generally going to be faster than [`LinkedList`]. //! //! ## Maps //! //! For Sets, all operations have the cost of the equivalent Map operation. //! -//! | | get | insert | remove | predecessor | append | -//! |----------|-----------|----------|----------|-------------|--------| -//! | HashMap | O(1)~ | O(1)~* | O(1)~ | N/A | N/A | -//! | BTreeMap | O(log n) | O(log n) | O(log n) | O(log n) | O(n+m) | +//! | | get | insert | remove | predecessor | append | +//! |--------------|-----------|----------|----------|-------------|--------| +//! | [`HashMap`] | O(1)~ | O(1)~* | O(1)~ | N/A | N/A | +//! | [`BTreeMap`] | O(log n) | O(log n) | O(log n) | O(log n) | O(n+m) | //! //! # Correct and Efficient Usage of Collections //! @@ -136,7 +136,7 @@ //! ## Capacity Management //! //! Many collections provide several constructors and methods that refer to -//! "capacity". These collections are generally built on top of an array. +//! "capacity". These collections are generally built on top of an array. //! Optimally, this array would be exactly the right size to fit only the //! elements stored in the collection, but for the collection to do this would //! be very inefficient. If the backing array was exactly the right size at all @@ -157,29 +157,29 @@ //! information to do this itself. Therefore, it is up to us programmers to give //! it hints. //! -//! Any `with_capacity` constructor will instruct the collection to allocate +//! Any `with_capacity()` constructor will instruct the collection to allocate //! enough space for the specified number of elements. Ideally this will be for //! exactly that many elements, but some implementation details may prevent -//! this. `Vec` and `VecDeque` can be relied on to allocate exactly the -//! requested amount, though. Use `with_capacity` when you know exactly how many +//! this. [`Vec`] and [`VecDeque`] can be relied on to allocate exactly the +//! requested amount, though. Use `with_capacity()` when you know exactly how many //! elements will be inserted, or at least have a reasonable upper-bound on that //! number. //! -//! When anticipating a large influx of elements, the `reserve` family of +//! When anticipating a large influx of elements, the `reserve()` family of //! methods can be used to hint to the collection how much room it should make -//! for the coming items. As with `with_capacity`, the precise behavior of +//! for the coming items. As with `with_capacity()`, the precise behavior of //! these methods will be specific to the collection of interest. //! //! For optimal performance, collections will generally avoid shrinking -//! themselves. If you believe that a collection will not soon contain any more -//! elements, or just really need the memory, the `shrink_to_fit` method prompts +//! themselves. If you believe that a collection will not soon contain any more +//! elements, or just really need the memory, the `shrink_to_fit()` method prompts //! the collection to shrink the backing array to the minimum size capable of //! holding its elements. //! //! Finally, if ever you're interested in what the actual capacity of the -//! collection is, most collections provide a `capacity` method to query this -//! information on demand. This can be useful for debugging purposes, or for -//! use with the `reserve` methods. +//! collection is, most collections provide a `capacity()` method to query this +//! information on demand. This can be useful for debugging purposes, or for +//! use with the `reserve()` methods. //! //! ## Iterators //! @@ -194,15 +194,15 @@ //! //! All of the standard collections provide several iterators for performing //! bulk manipulation of their contents. The three primary iterators almost -//! every collection should provide are `iter`, `iter_mut`, and `into_iter`. +//! every collection should provide are `iter()`, `iter_mut()`, and `into_iter()`. //! Some of these are not provided on collections where it would be unsound or //! unreasonable to provide them. //! -//! `iter` provides an iterator of immutable references to all the contents of a -//! collection in the most "natural" order. For sequence collections like `Vec`, +//! `iter()` provides an iterator of immutable references to all the contents of a +//! collection in the most "natural" order. For sequence collections like [`Vec`], //! this means the items will be yielded in increasing order of index starting -//! at 0. For ordered collections like `BTreeMap`, this means that the items -//! will be yielded in sorted order. For unordered collections like `HashMap`, +//! at 0. For ordered collections like [`BTreeMap`], this means that the items +//! will be yielded in sorted order. For unordered collections like [`HashMap`], //! the items will be yielded in whatever order the internal representation made //! most convenient. This is great for reading through all the contents of the //! collection. @@ -214,8 +214,8 @@ //! } //! ``` //! -//! `iter_mut` provides an iterator of *mutable* references in the same order as -//! `iter`. This is great for mutating all the contents of the collection. +//! `iter_mut()` provides an iterator of *mutable* references in the same order as +//! `iter()`. This is great for mutating all the contents of the collection. //! //! ``` //! let mut vec = vec![1, 2, 3, 4]; @@ -224,12 +224,12 @@ //! } //! ``` //! -//! `into_iter` transforms the actual collection into an iterator over its +//! `into_iter()` transforms the actual collection into an iterator over its //! contents by-value. This is great when the collection itself is no longer -//! needed, and the values are needed elsewhere. Using `extend` with `into_iter` +//! needed, and the values are needed elsewhere. Using `extend()` with `into_iter()` //! is the main way that contents of one collection are moved into another. -//! `extend` automatically calls `into_iter`, and takes any `T: IntoIterator`. -//! Calling `collect` on an iterator itself is also a great way to convert one +//! `extend()` automatically calls `into_iter()`, and takes any `T: `[`IntoIterator`]. +//! Calling `collect()` on an iterator itself is also a great way to convert one //! collection into another. Both of these methods should internally use the //! capacity management tools discussed in the previous section to do this as //! efficiently as possible. @@ -248,9 +248,9 @@ //! ``` //! //! Iterators also provide a series of *adapter* methods for performing common -//! threads to sequences. Among the adapters are functional favorites like `map`, -//! `fold`, `skip`, and `take`. Of particular interest to collections is the -//! `rev` adapter, that reverses any iterator that supports this operation. Most +//! threads to sequences. Among the adapters are functional favorites like `map()`, +//! `fold()`, `skip()` and `take()`. Of particular interest to collections is the +//! `rev()` adapter, that reverses any iterator that supports this operation. Most //! collections provide reversible iterators as the way to iterate over them in //! reverse order. //! @@ -263,27 +263,27 @@ //! //! Several other collection methods also return iterators to yield a sequence //! of results but avoid allocating an entire collection to store the result in. -//! This provides maximum flexibility as `collect` or `extend` can be called to +//! This provides maximum flexibility as `collect()` or `extend()` can be called to //! "pipe" the sequence into any collection if desired. Otherwise, the sequence //! can be looped over with a `for` loop. The iterator can also be discarded //! after partial use, preventing the computation of the unused items. //! //! ## Entries //! -//! The `entry` API is intended to provide an efficient mechanism for +//! The `entry()` API is intended to provide an efficient mechanism for //! manipulating the contents of a map conditionally on the presence of a key or //! not. The primary motivating use case for this is to provide efficient //! accumulator maps. For instance, if one wishes to maintain a count of the //! number of times each key has been seen, they will have to perform some //! conditional logic on whether this is the first time the key has been seen or -//! not. Normally, this would require a `find` followed by an `insert`, +//! not. Normally, this would require a `find()` followed by an `insert()`, //! effectively duplicating the search effort on each insertion. //! //! When a user calls `map.entry(&key)`, the map will search for the key and //! then yield a variant of the `Entry` enum. //! //! If a `Vacant(entry)` is yielded, then the key *was not* found. In this case -//! the only valid operation is to `insert` a value into the entry. When this is +//! the only valid operation is to `insert()` a value into the entry. When this is //! done, the vacant entry is consumed and converted into a mutable reference to //! the value that was inserted. This allows for further manipulation of the //! value beyond the lifetime of the search itself. This is useful if complex @@ -291,14 +291,14 @@ //! just inserted. //! //! If an `Occupied(entry)` is yielded, then the key *was* found. In this case, -//! the user has several options: they can `get`, `insert`, or `remove` the +//! the user has several options: they can `get()`, `insert()` or `remove()` the //! value of the occupied entry. Additionally, they can convert the occupied //! entry into a mutable reference to its value, providing symmetry to the -//! vacant `insert` case. +//! vacant `insert()` case. //! //! ### Examples //! -//! Here are the two primary ways in which `entry` is used. First, a simple +//! Here are the two primary ways in which `entry()` is used. First, a simple //! example where the logic performed on the values is trivial. //! //! #### Counting the number of times each character in a string occurs @@ -322,7 +322,7 @@ //! ``` //! //! When the logic to be performed on the value is more complex, we may simply -//! use the `entry` API to ensure that the value is initialized, and perform the +//! use the `entry()` API to ensure that the value is initialized and perform the //! logic afterwards. //! //! #### Tracking the inebriation of customers at a bar @@ -406,6 +406,16 @@ //! // ...but the key hasn't changed. b is still "baz", not "xyz". //! assert_eq!(map.keys().next().unwrap().b, "baz"); //! ``` +//! +//! [`Vec`]: ../../std/vec/struct.Vec.html +//! [`HashMap`]: ../../std/collections/struct.HashMap.html +//! [`VecDeque`]: ../../std/collections/struct.VecDeque.html +//! [`LinkedList`]: ../../std/collections/struct.LinkedList.html +//! [`BTreeMap`]: ../../std/collections/struct.BTreeMap.html +//! [`HashSet`]: ../../std/collections/struct.HashSet.html +//! [`BTreeSet`]: ../../std/collections/struct.BTreeSet.html +//! [`BinaryHeap`]: ../../std/collections/struct.BinaryHeap.html +//! [`IntoIterator`]: ../../std/iter/trait.IntoIterator.html #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/path.rs b/src/libstd/path.rs index d6a5dfe551800..a55318d388396 100644 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -1294,14 +1294,18 @@ impl Into for PathBuf { /// This type supports a number of operations for inspecting a path, including /// breaking the path into its components (separated by `/` or `\`, depending on /// the platform), extracting the file name, determining whether the path is -/// absolute, and so on. More details about the overall approach can be found in -/// the module documentation. +/// absolute, and so on. /// /// This is an *unsized* type, meaning that it must always be used behind a -/// pointer like `&` or [`Box`]. +/// pointer like `&` or [`Box`]. For an owned version of this type, +/// see [`PathBuf`]. /// /// [`str`]: ../primitive.str.html /// [`Box`]: ../boxed/struct.Box.html +/// [`PathBuf`]: struct.PathBuf.html +/// +/// More details about the overall approach can be found in +/// the module documentation. /// /// # Examples /// diff --git a/src/test/codegen/consts.rs b/src/test/codegen/consts.rs index 36a582ca73709..33b4221b73338 100644 --- a/src/test/codegen/consts.rs +++ b/src/test/codegen/consts.rs @@ -19,12 +19,12 @@ // CHECK: @STATIC = {{.*}}, align 4 // This checks the constants from inline_enum_const -// CHECK: @ref{{[0-9]+}} = {{.*}}, align 2 +// CHECK: @ref.{{[0-9]+}} = {{.*}}, align 2 // This checks the constants from {low,high}_align_const, they share the same // constant, but the alignment differs, so the higher one should be used -// CHECK: [[LOW_HIGH:@ref[0-9]+]] = {{.*}}, align 4 -// CHECK: [[LOW_HIGH_REF:@const[0-9]+]] = {{.*}} [[LOW_HIGH]] +// CHECK: [[LOW_HIGH:@ref.[0-9]+]] = {{.*}}, align 4 +// CHECK: [[LOW_HIGH_REF:@const.[0-9]+]] = {{.*}} [[LOW_HIGH]] #[derive(Copy, Clone)] diff --git a/src/test/compile-fail/E0277.rs b/src/test/compile-fail/E0277.rs index 12f9417f944cd..e4cb50cd3f253 100644 --- a/src/test/compile-fail/E0277.rs +++ b/src/test/compile-fail/E0277.rs @@ -19,6 +19,6 @@ fn some_func(foo: T) { fn main() { some_func(5i32); //~^ ERROR the trait bound `i32: Foo` is not satisfied - //~| NOTE trait `i32: Foo` not satisfied + //~| NOTE the trait `Foo` is not implemented for `i32` //~| NOTE required by `some_func` } diff --git a/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs b/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs index 084616964674f..5a19aecf667f0 100644 --- a/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs +++ b/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs @@ -32,5 +32,5 @@ fn ice(a: A) { let r = loop {}; r = r + a; //~^ ERROR the trait bound `(): Add` is not satisfied - //~| NOTE trait `(): Add` not satisfied + //~| NOTE the trait `Add` is not implemented for `()` } diff --git a/src/test/compile-fail/cast-rfc0401.rs b/src/test/compile-fail/cast-rfc0401.rs index ee622a17ab12c..0c373057c76e0 100644 --- a/src/test/compile-fail/cast-rfc0401.rs +++ b/src/test/compile-fail/cast-rfc0401.rs @@ -92,7 +92,7 @@ fn main() let _ = v as *const [u8]; //~ ERROR cannot cast let _ = fat_v as *const Foo; //~^ ERROR the trait bound `[u8]: std::marker::Sized` is not satisfied - //~| NOTE trait `[u8]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[u8]` //~| NOTE `[u8]` does not have a constant size known at compile-time //~| NOTE required for the cast to the object type `Foo` let _ = foo as *const str; //~ ERROR casting @@ -107,7 +107,7 @@ fn main() let a : *const str = "hello"; let _ = a as *const Foo; //~^ ERROR the trait bound `str: std::marker::Sized` is not satisfied - //~| NOTE trait `str: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE `str` does not have a constant size known at compile-time //~| NOTE required for the cast to the object type `Foo` diff --git a/src/test/compile-fail/const-unsized.rs b/src/test/compile-fail/const-unsized.rs index a73164b957c83..226b567c546e5 100644 --- a/src/test/compile-fail/const-unsized.rs +++ b/src/test/compile-fail/const-unsized.rs @@ -12,25 +12,25 @@ use std::fmt::Debug; const CONST_0: Debug+Sync = *(&0 as &(Debug+Sync)); //~^ ERROR `std::fmt::Debug + Sync + 'static: std::marker::Sized` is not satisfied -//~| NOTE `std::fmt::Debug + Sync + 'static: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + Sync + 'static` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size const CONST_FOO: str = *"foo"; //~^ ERROR `str: std::marker::Sized` is not satisfied -//~| NOTE `str: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size static STATIC_1: Debug+Sync = *(&1 as &(Debug+Sync)); //~^ ERROR `std::fmt::Debug + Sync + 'static: std::marker::Sized` is not satisfied -//~| NOTE `std::fmt::Debug + Sync + 'static: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + Sync + 'static` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size static STATIC_BAR: str = *"bar"; //~^ ERROR `str: std::marker::Sized` is not satisfied -//~| NOTE `str: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size diff --git a/src/test/compile-fail/impl-trait/auto-trait-leak.rs b/src/test/compile-fail/impl-trait/auto-trait-leak.rs index 60ad266e7f7da..f055d20e1343b 100644 --- a/src/test/compile-fail/impl-trait/auto-trait-leak.rs +++ b/src/test/compile-fail/impl-trait/auto-trait-leak.rs @@ -26,7 +26,7 @@ fn send(_: T) {} fn main() { send(before()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `[closure //~| NOTE required because it appears within the type `impl std::ops::Fn<(i32,)>` @@ -34,7 +34,7 @@ fn main() { send(after()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `[closure //~| NOTE required because it appears within the type `impl std::ops::Fn<(i32,)>` @@ -54,7 +54,7 @@ fn after() -> impl Fn(i32) { fn cycle1() -> impl Clone { send(cycle2().clone()); //~^ ERROR the trait bound `std::rc::Rc: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc` //~| NOTE `std::rc::Rc` cannot be sent between threads safely //~| NOTE required because it appears within the type `impl std::clone::Clone` //~| NOTE required by `send` @@ -65,7 +65,7 @@ fn cycle1() -> impl Clone { fn cycle2() -> impl Clone { send(cycle1().clone()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `impl std::clone::Clone` //~| NOTE required by `send` diff --git a/src/test/compile-fail/on-unimplemented/multiple-impls.rs b/src/test/compile-fail/on-unimplemented/multiple-impls.rs index cc7c2f4f796d9..0ad9f21e0983f 100644 --- a/src/test/compile-fail/on-unimplemented/multiple-impls.rs +++ b/src/test/compile-fail/on-unimplemented/multiple-impls.rs @@ -42,17 +42,17 @@ impl Index> for [i32] { fn main() { Index::index(&[] as &[i32], 2u32); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index` is not implemented for `[i32]` //~| NOTE trait message //~| NOTE required by Index::index(&[] as &[i32], Foo(2u32)); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index>` is not implemented for `[i32]` //~| NOTE on impl for Foo //~| NOTE required by Index::index(&[] as &[i32], Bar(2u32)); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index>` is not implemented for `[i32]` //~| NOTE on impl for Bar //~| NOTE required by } diff --git a/src/test/compile-fail/on-unimplemented/on-impl.rs b/src/test/compile-fail/on-unimplemented/on-impl.rs index c22e48bede4ef..a7c599330a070 100644 --- a/src/test/compile-fail/on-unimplemented/on-impl.rs +++ b/src/test/compile-fail/on-unimplemented/on-impl.rs @@ -29,8 +29,9 @@ impl Index for [i32] { #[rustc_error] fn main() { - Index::::index(&[1, 2, 3] as &[i32], 2u32); //~ ERROR E0277 - //~| NOTE not satisfied - //~| NOTE a usize is required - //~| NOTE required by + Index::::index(&[1, 2, 3] as &[i32], 2u32); + //~^ ERROR E0277 + //~| NOTE the trait `Index` is not implemented for `[i32]` + //~| NOTE a usize is required + //~| NOTE required by } diff --git a/src/test/compile-fail/on-unimplemented/on-trait.rs b/src/test/compile-fail/on-unimplemented/on-trait.rs index 9ea2809374cd8..ef7695af3e12e 100644 --- a/src/test/compile-fail/on-unimplemented/on-trait.rs +++ b/src/test/compile-fail/on-unimplemented/on-trait.rs @@ -35,9 +35,9 @@ pub fn main() { //~^ ERROR //~^^ NOTE a collection of type `std::option::Option>` cannot be built from an iterator over elements of type `&u8` //~^^^ NOTE required by `collect` - //~| NOTE trait `std::option::Option>: MyFromIterator<&u8>` not satisfied + //~| NOTE the trait `MyFromIterator<&u8>` is not implemented for `std::option::Option>` let x: String = foobar(); //~ ERROR //~^ NOTE test error `std::string::String` with `u8` `_` `u32` //~^^ NOTE required by `foobar` - //~| NOTE trait `std::string::String: Foo` not satisfied + //~| NOTE the trait `Foo` is not implemented for `std::string::String` } diff --git a/src/test/compile-fail/on-unimplemented/slice-index.rs b/src/test/compile-fail/on-unimplemented/slice-index.rs index 5c548b5d5bf20..d528d0e626a79 100644 --- a/src/test/compile-fail/on-unimplemented/slice-index.rs +++ b/src/test/compile-fail/on-unimplemented/slice-index.rs @@ -17,10 +17,12 @@ use std::ops::Index; #[rustc_error] fn main() { let x = &[1, 2, 3] as &[i32]; - x[1i32]; //~ ERROR E0277 - //~| NOTE trait `[i32]: std::ops::Index` not satisfied - //~| NOTE slice indices are of type `usize` - x[..1i32]; //~ ERROR E0277 - //~| NOTE trait `[i32]: std::ops::Index>` not satisfied - //~| NOTE slice indices are of type `usize` + x[1i32]; + //~^ ERROR E0277 + //~| NOTE the trait `std::ops::Index` is not implemented for `[i32]` + //~| NOTE slice indices are of type `usize` + x[..1i32]; + //~^ ERROR E0277 + //~| NOTE the trait `std::ops::Index>` is not implemented for `[i32]` + //~| NOTE slice indices are of type `usize` } diff --git a/src/test/compile-fail/trait-suggest-where-clause.rs b/src/test/compile-fail/trait-suggest-where-clause.rs index d15e3536d60ca..7530d8890b98c 100644 --- a/src/test/compile-fail/trait-suggest-where-clause.rs +++ b/src/test/compile-fail/trait-suggest-where-clause.rs @@ -16,13 +16,13 @@ fn check() { // suggest a where-clause, if needed mem::size_of::(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| NOTE trait `U: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `U` //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required by `std::mem::size_of` mem::size_of::>(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| NOTE trait `U: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `U` //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required because it appears within the type `Misc` //~| NOTE required by `std::mem::size_of` @@ -31,13 +31,13 @@ fn check() { >::from; //~^ ERROR `u64: std::convert::From` is not satisfied - //~| NOTE trait `u64: std::convert::From` not satisfied + //~| NOTE the trait `std::convert::From` is not implemented for `u64` //~| HELP consider adding a `where u64: std::convert::From` bound //~| NOTE required by `std::convert::From::from` ::Item>>::from; //~^ ERROR `u64: std::convert::From<::Item>` is not satisfied - //~| NOTE trait `u64: std::convert::From<::Item>` not satisfied + //~| NOTE the trait `std::convert::From<::Item>` is not implemented //~| HELP consider adding a `where u64: //~| NOTE required by `std::convert::From::from` @@ -45,20 +45,20 @@ fn check() { as From>::from; //~^ ERROR `Misc<_>: std::convert::From` is not satisfied - //~| NOTE trait `Misc<_>: std::convert::From` not satisfied + //~| NOTE the trait `std::convert::From` is not implemented for `Misc<_>` //~| NOTE required by `std::convert::From::from` // ... and also not if the error is not related to the type mem::size_of::<[T]>(); //~^ ERROR `[T]: std::marker::Sized` is not satisfied - //~| NOTE `[T]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[T]` //~| NOTE `[T]` does not have a constant size //~| NOTE required by `std::mem::size_of` mem::size_of::<[&U]>(); //~^ ERROR `[&U]: std::marker::Sized` is not satisfied - //~| NOTE `[&U]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[&U]` //~| NOTE `[&U]` does not have a constant size //~| NOTE required by `std::mem::size_of` } diff --git a/src/test/run-make/symbols-are-reasonable/Makefile b/src/test/run-make/symbols-are-reasonable/Makefile index c668ffc5832b6..1c117cf0c9e62 100644 --- a/src/test/run-make/symbols-are-reasonable/Makefile +++ b/src/test/run-make/symbols-are-reasonable/Makefile @@ -1,7 +1,7 @@ -include ../tools.mk # check that the compile generated symbols for strings, binaries, -# vtables, etc. have semisane names (e.g. `str1234`); it's relatively +# vtables, etc. have semisane names (e.g. `str.1234`); it's relatively # easy to accidentally modify the compiler internals to make them # become things like `str"str"(1234)`. @@ -10,6 +10,6 @@ OUT=$(TMPDIR)/lib.s all: $(RUSTC) lib.rs --emit=asm --crate-type=staticlib # just check for symbol declarations with the names we're expecting. - grep 'str[0-9][0-9]*:' $(OUT) - grep 'byte_str[0-9][0-9]*:' $(OUT) - grep 'vtable[0-9][0-9]*' $(OUT) + grep 'str.[0-9][0-9]*:' $(OUT) + grep 'byte_str.[0-9][0-9]*:' $(OUT) + grep 'vtable.[0-9][0-9]*' $(OUT)