diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 72ef23b3fd712..9c5e4b9f0e086 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -150,7 +150,7 @@ pub enum NamespaceResult { UnboundResult, /// Means that resolve has determined that the name is bound in the Module /// argument, and specified by the NameBindings argument. - BoundResult(@Module, @mut NameBindings) + BoundResult(@mut Module, @mut NameBindings) } pub impl NamespaceResult { @@ -196,7 +196,7 @@ pub enum ImportDirectiveSubclass { /// The context that we thread through while building the reduced graph. pub enum ReducedGraphParent { - ModuleReducedGraphParent(@Module) + ModuleReducedGraphParent(@mut Module) } pub enum ResolveResult { @@ -293,7 +293,7 @@ pub enum SearchThroughModulesFlag { pub enum ModulePrefixResult { NoPrefixFound, - PrefixFound(@Module, uint) + PrefixFound(@mut Module, uint) } #[deriving_eq] @@ -368,11 +368,13 @@ pub fn ImportDirective(privacy: Privacy, /// The item that an import resolves to. pub struct Target { - target_module: @Module, + target_module: @mut Module, bindings: @mut NameBindings, } -pub fn Target(target_module: @Module, bindings: @mut NameBindings) -> Target { +pub fn Target(target_module: @mut Module, + bindings: @mut NameBindings) + -> Target { Target { target_module: target_module, bindings: bindings @@ -435,8 +437,8 @@ pub fn ImportState() -> ImportState { /// The link from a module up to its nearest parent node. pub enum ParentLink { NoParentLink, - ModuleParentLink(@Module, ident), - BlockParentLink(@Module, node_id) + ModuleParentLink(@mut Module, ident), + BlockParentLink(@mut Module, node_id) } /// The type of module this is. @@ -450,11 +452,11 @@ pub enum ModuleKind { /// One node in the tree of modules. pub struct Module { parent_link: ParentLink, - mut def_id: Option, + def_id: Option, kind: ModuleKind, - children: HashMap, - imports: DVec<@ImportDirective>, + children: @HashMap, + imports: @DVec<@ImportDirective>, // The anonymous children of this node. Anonymous children are pseudo- // modules that are implicitly created around items contained within @@ -471,16 +473,16 @@ pub struct Module { // There will be an anonymous module created around `g` with the ID of the // entry block for `f`. - anonymous_children: HashMap, + anonymous_children: @HashMap, // The status of resolving each import in this module. - import_resolutions: HashMap, + import_resolutions: @HashMap, // The number of unresolved globs that this module exports. - mut glob_count: uint, + glob_count: uint, // The index of the import we're resolving. - mut resolved_import_count: uint, + resolved_import_count: uint, } pub fn Module(parent_link: ParentLink, @@ -491,10 +493,10 @@ pub fn Module(parent_link: ParentLink, parent_link: parent_link, def_id: def_id, kind: kind, - children: HashMap(), - imports: DVec(), - anonymous_children: HashMap(), - import_resolutions: HashMap(), + children: @HashMap(), + imports: @DVec(), + anonymous_children: @HashMap(), + import_resolutions: @HashMap(), glob_count: 0, resolved_import_count: 0 } @@ -519,7 +521,7 @@ pub fn unused_import_lint_level(session: Session) -> level { // Records a possibly-private type definition. pub struct TypeNsDef { privacy: Privacy, - module_def: Option<@Module>, + module_def: Option<@mut Module>, type_def: Option } @@ -550,7 +552,7 @@ pub impl NameBindings { kind: ModuleKind, sp: span) { // Merges the module with the existing type def or creates a new one. - let module_ = @Module(parent_link, def_id, kind); + let module_ = @mut Module(parent_link, def_id, kind); match self.type_def { None => { self.type_def = Some(TypeNsDef { @@ -599,7 +601,7 @@ pub impl NameBindings { } /// Returns the module node if applicable. - fn get_module_if_available() -> Option<@Module> { + fn get_module_if_available() -> Option<@mut Module> { match self.type_def { Some(ref type_def) => (*type_def).module_def, None => None @@ -610,7 +612,7 @@ pub impl NameBindings { * Returns the module node. Fails if this node does not have a module * definition. */ - fn get_module(@mut self) -> @Module { + fn get_module(@mut self) -> @mut Module { match self.get_module_if_available() { None => { fail!(~"get_module called on a node with no module \ @@ -759,7 +761,7 @@ pub fn Resolver(session: Session, let current_module = graph_root.get_module(); let self = Resolver { - session: session, + session: @session, lang_items: copy lang_items, crate: crate, @@ -770,8 +772,8 @@ pub fn Resolver(session: Session, unused_import_lint_level: unused_import_lint_level(session), - trait_info: HashMap(), - structs: HashMap(), + trait_info: @HashMap(), + structs: @HashMap(), unresolved_imports: 0, @@ -794,8 +796,8 @@ pub fn Resolver(session: Session, attr_main_fn: None, main_fns: ~[], - def_map: HashMap(), - export_map2: HashMap(), + def_map: @HashMap(), + export_map2: @HashMap(), trait_map: @HashMap(), intr: session.intr() @@ -806,7 +808,7 @@ pub fn Resolver(session: Session, /// The main resolver class. pub struct Resolver { - session: Session, + session: @Session, lang_items: LanguageItems, crate: @crate, @@ -816,14 +818,14 @@ pub struct Resolver { unused_import_lint_level: level, - trait_info: HashMap>, - structs: HashMap, + trait_info: @HashMap>, + structs: @HashMap, // The number of imports that are currently unresolved. - mut unresolved_imports: uint, + unresolved_imports: uint, // The module that represents the current item scope. - mut current_module: @Module, + current_module: @mut Module, // The current set of local scopes, for values. // FIXME #4948: Reuse ribs to avoid allocation. @@ -837,10 +839,10 @@ pub struct Resolver { // Whether the current context is an X-ray context. An X-ray context is // allowed to access private names of any module. - mut xray_context: XrayFlag, + xray_context: XrayFlag, // The trait that the current context can refer to. - mut current_trait_refs: Option<@DVec>, + current_trait_refs: Option<@DVec>, // The ident for the keyword "self". self_ident: ident, @@ -854,19 +856,19 @@ pub struct Resolver { namespaces: ~[Namespace], // The function that has attribute named 'main' - mut attr_main_fn: Option<(node_id, span)>, + attr_main_fn: Option<(node_id, span)>, // The functions named 'main' - mut main_fns: ~[Option<(node_id, span)>], + main_fns: ~[Option<(node_id, span)>], - def_map: DefMap, - export_map2: ExportMap2, + def_map: @DefMap, + export_map2: @ExportMap2, trait_map: TraitMap, } pub impl Resolver { /// The main name resolution procedure. - fn resolve(@self, this: @Resolver) { - self.build_reduced_graph(this); + fn resolve(@mut self) { + self.build_reduced_graph(); self.session.abort_if_errors(); self.resolve_imports(); @@ -890,25 +892,25 @@ pub impl Resolver { // /// Constructs the reduced graph for the entire crate. - fn build_reduced_graph(this: @Resolver) { + fn build_reduced_graph(@mut self) { let initial_parent = ModuleReducedGraphParent(self.graph_root.get_module()); visit_crate(*self.crate, initial_parent, mk_vt(@Visitor { visit_item: |item, context, visitor| - (*this).build_reduced_graph_for_item(item, context, visitor), + self.build_reduced_graph_for_item(item, context, visitor), visit_foreign_item: |foreign_item, context, visitor| - (*this).build_reduced_graph_for_foreign_item(foreign_item, + self.build_reduced_graph_for_foreign_item(foreign_item, context, visitor), visit_view_item: |view_item, context, visitor| - (*this).build_reduced_graph_for_view_item(view_item, + self.build_reduced_graph_for_view_item(view_item, context, visitor), visit_block: |block, context, visitor| - (*this).build_reduced_graph_for_block(block, + self.build_reduced_graph_for_block(block, context, visitor), @@ -917,8 +919,9 @@ pub impl Resolver { } /// Returns the current module tracked by the reduced graph parent. - fn get_module_from_parent(reduced_graph_parent: ReducedGraphParent) - -> @Module { + fn get_module_from_parent(@mut self, + reduced_graph_parent: ReducedGraphParent) + -> @mut Module { match reduced_graph_parent { ModuleReducedGraphParent(module_) => { return module_; @@ -936,7 +939,8 @@ pub impl Resolver { * If this node does not have a module definition and we are not inside * a block, fails. */ - fn add_child(name: ident, + fn add_child(@mut self, + name: ident, reduced_graph_parent: ReducedGraphParent, duplicate_checking_mode: DuplicateCheckingMode, // For printing errors @@ -1023,7 +1027,7 @@ pub impl Resolver { } } - fn block_needs_anonymous_module(block: blk) -> bool { + fn block_needs_anonymous_module(@mut self, block: blk) -> bool { // If the block has view items, we need an anonymous module. if block.node.view_items.len() > 0 { return true; @@ -1054,8 +1058,10 @@ pub impl Resolver { return false; } - fn get_parent_link(parent: ReducedGraphParent, - name: ident) -> ParentLink { + fn get_parent_link(@mut self, + parent: ReducedGraphParent, + name: ident) + -> ParentLink { match parent { ModuleReducedGraphParent(module_) => { return ModuleParentLink(module_, name); @@ -1064,7 +1070,8 @@ pub impl Resolver { } /// Constructs the reduced graph for one item. - fn build_reduced_graph_for_item(item: @item, + fn build_reduced_graph_for_item(@mut self, + item: @item, parent: ReducedGraphParent, &&visitor: vt) { let ident = item.ident; @@ -1339,12 +1346,12 @@ pub impl Resolver { // Constructs the reduced graph for one variant. Variants exist in the // type and/or value namespaces. - fn build_reduced_graph_for_variant(variant: variant, + fn build_reduced_graph_for_variant(@mut self, + variant: variant, item_id: def_id, +parent_privacy: Privacy, parent: ReducedGraphParent, &&visitor: vt) { - let ident = variant.node.name; let (child, _) = self.add_child(ident, parent, ForbidDuplicateValues, variant.span); @@ -1387,7 +1394,8 @@ pub impl Resolver { * Constructs the reduced graph for one 'view item'. View items consist * of imports and use directives. */ - fn build_reduced_graph_for_view_item(view_item: @view_item, + fn build_reduced_graph_for_view_item(@mut self, + view_item: @view_item, parent: ReducedGraphParent, &&_visitor: vt) { let privacy = visibility_to_privacy(view_item.vis); @@ -1495,11 +1503,11 @@ pub impl Resolver { } /// Constructs the reduced graph for one foreign item. - fn build_reduced_graph_for_foreign_item(foreign_item: @foreign_item, + fn build_reduced_graph_for_foreign_item(@mut self, + foreign_item: @foreign_item, parent: ReducedGraphParent, &&visitor: vt) { - let name = foreign_item.ident; let (name_bindings, new_parent) = self.add_child(name, parent, ForbidDuplicateValues, @@ -1525,10 +1533,10 @@ pub impl Resolver { } } - fn build_reduced_graph_for_block(block: blk, + fn build_reduced_graph_for_block(@mut self, + block: blk, parent: ReducedGraphParent, &&visitor: vt) { - let mut new_parent; if self.block_needs_anonymous_module(block) { let block_id = block.node.id; @@ -1538,9 +1546,10 @@ pub impl Resolver { block_id); let parent_module = self.get_module_from_parent(parent); - let new_module = @Module(BlockParentLink(parent_module, block_id), - None, - AnonymousModuleKind); + let new_module = @mut Module( + BlockParentLink(parent_module, block_id), + None, + AnonymousModuleKind); parent_module.anonymous_children.insert(block_id, new_module); new_parent = ModuleReducedGraphParent(new_module); } else { @@ -1550,8 +1559,9 @@ pub impl Resolver { visit_block(block, new_parent, visitor); } - fn handle_external_def(def: def, - modules: HashMap, + fn handle_external_def(@mut self, + def: def, + modules: HashMap, child_name_bindings: @mut NameBindings, final_ident: &str, ident: ident, @@ -1671,7 +1681,7 @@ pub impl Resolver { * Builds the reduced graph rooted at the 'use' directive for an external * crate. */ - fn build_reduced_graph_for_external_crate(root: @Module) { + fn build_reduced_graph_for_external_crate(@mut self, root: @mut Module) { let modules = HashMap(); // Create all the items reachable by paths. @@ -1842,8 +1852,9 @@ pub impl Resolver { } /// Creates and adds an import directive to the given module. - fn build_import_directive(privacy: Privacy, - module_: @Module, + fn build_import_directive(@mut self, + privacy: Privacy, + module_: @mut Module, module_path: @DVec, subclass: @ImportDirectiveSubclass, span: span, @@ -1908,7 +1919,7 @@ pub impl Resolver { * Resolves all imports for the crate. This method performs the fixed- * point iteration. */ - fn resolve_imports() { + fn resolve_imports(@mut self) { let mut i = 0; let mut prev_unresolved_imports = 0; loop { @@ -1938,7 +1949,7 @@ pub impl Resolver { * Attempts to resolve imports for the given module and all of its * submodules. */ - fn resolve_imports_for_module_subtree(module_: @Module) { + fn resolve_imports_for_module_subtree(@mut self, module_: @mut Module) { debug!("(resolving imports for module subtree) resolving %s", self.module_to_str(module_)); self.resolve_imports_for_module(module_); @@ -1960,7 +1971,7 @@ pub impl Resolver { } /// Attempts to resolve imports for the given module only. - fn resolve_imports_for_module(module_: @Module) { + fn resolve_imports_for_module(@mut self, module_: @mut Module) { if (*module_).all_imports_resolved() { debug!("(resolving imports for module) all imports resolved for \ %s", @@ -1994,23 +2005,26 @@ pub impl Resolver { } } - fn idents_to_str(idents: ~[ident]) -> ~str { + fn idents_to_str(@mut self, idents: ~[ident]) -> ~str { let ident_strs = do idents.map |ident| { /*bad*/ copy *self.session.str_of(*ident) }; str::connect(ident_strs, "::") } - fn import_directive_subclass_to_str(subclass: ImportDirectiveSubclass) - -> @~str { + fn import_directive_subclass_to_str(@mut self, + subclass: ImportDirectiveSubclass) + -> @~str { match subclass { SingleImport(_target, source, _ns) => self.session.str_of(source), GlobImport => @~"*" } } - fn import_path_to_str(idents: ~[ident], subclass: ImportDirectiveSubclass) - -> @~str { + fn import_path_to_str(@mut self, + idents: ~[ident], + subclass: ImportDirectiveSubclass) + -> @~str { if idents.is_empty() { self.import_directive_subclass_to_str(subclass) } else { @@ -2027,10 +2041,10 @@ pub impl Resolver { * currently-unresolved imports, or success if we know the name exists. * If successful, the resolved bindings are written into the module. */ - fn resolve_import_for_module(module_: @Module, + fn resolve_import_for_module(@mut self, + module_: @mut Module, import_directive: @ImportDirective) -> ResolveResult<()> { - let mut resolution_result; let module_path = import_directive.module_path; @@ -2122,12 +2136,12 @@ pub impl Resolver { return resolution_result; } - fn resolve_single_import(module_: @Module, - containing_module: @Module, + fn resolve_single_import(@mut self, + module_: @mut Module, + containing_module: @mut Module, target: ident, source: ident) -> ResolveResult<()> { - debug!("(resolving single import) resolving `%s` = `%s::%s` from \ `%s`", *self.session.str_of(target), @@ -2314,12 +2328,12 @@ pub impl Resolver { return Success(()); } - fn resolve_single_module_import(module_: @Module, - containing_module: @Module, + fn resolve_single_module_import(@mut self, + module_: @mut Module, + containing_module: @mut Module, target: ident, source: ident) -> ResolveResult<()> { - debug!("(resolving single module import) resolving `%s` = `%s::%s` \ from `%s`", *self.session.str_of(target), @@ -2443,9 +2457,10 @@ pub impl Resolver { * succeeds or bails out (as importing * from an empty module or a module * that exports nothing is valid). */ - fn resolve_glob_import(privacy: Privacy, - module_: @Module, - containing_module: @Module, + fn resolve_glob_import(@mut self, + privacy: Privacy, + module_: @mut Module, + containing_module: @mut Module, span: span) -> ResolveResult<()> { // This function works in a highly imperative manner; it eagerly adds @@ -2557,11 +2572,12 @@ pub impl Resolver { return Success(()); } - fn resolve_module_path_from_root(module_: @Module, + fn resolve_module_path_from_root(@mut self, + module_: @mut Module, module_path: @DVec, index: uint, span: span) - -> ResolveResult<@Module> { + -> ResolveResult<@mut Module> { let mut search_module = module_; let mut index = index; let module_path_len = (*module_path).len(); @@ -2629,12 +2645,12 @@ pub impl Resolver { * Attempts to resolve the module part of an import directive or path * rooted at the given module. */ - fn resolve_module_path_for_import(module_: @Module, + fn resolve_module_path_for_import(@mut self, + module_: @mut Module, module_path: @DVec, use_lexical_scope: UseLexicalScopeFlag, span: span) - -> ResolveResult<@Module> { - + -> ResolveResult<@mut Module> { let module_path_len = (*module_path).len(); assert module_path_len > 0; @@ -2708,13 +2724,13 @@ pub impl Resolver { span); } - fn resolve_item_in_lexical_scope(module_: @Module, + fn resolve_item_in_lexical_scope(@mut self, + module_: @mut Module, name: ident, namespace: Namespace, search_through_modules: SearchThroughModulesFlag) -> ResolveResult { - debug!("(resolving item in lexical scope) resolving `%s` in \ namespace %? in `%s`", *self.session.str_of(name), @@ -2822,8 +2838,10 @@ pub impl Resolver { } /** Resolves a module name in the current lexical scope. */ - fn resolve_module_in_lexical_scope(module_: @Module, name: ident) - -> ResolveResult<@Module> { + fn resolve_module_in_lexical_scope(@mut self, + module_: @mut Module, + name: ident) + -> ResolveResult<@mut Module> { // If this module is an anonymous module, resolve the item in the // lexical scope. Otherwise, resolve the item from the crate root. let resolve_result = self.resolve_item_in_lexical_scope( @@ -2867,7 +2885,8 @@ pub impl Resolver { /** * Returns the nearest normal module parent of the given module. */ - fn get_nearest_normal_module_parent(module_: @Module) -> Option<@Module> { + fn get_nearest_normal_module_parent(@mut self, module_: @mut Module) + -> Option<@mut Module> { let mut module_ = module_; loop { match module_.parent_link { @@ -2889,7 +2908,9 @@ pub impl Resolver { * Returns the nearest normal module parent of the given module, or the * module itself if it is a normal module. */ - fn get_nearest_normal_module_parent_or_self(module_: @Module) -> @Module { + fn get_nearest_normal_module_parent_or_self(@mut self, + module_: @mut Module) + -> @mut Module { match module_.kind { NormalModuleKind => return module_, ExternModuleKind | TraitModuleKind | AnonymousModuleKind => { @@ -2905,7 +2926,8 @@ pub impl Resolver { * Resolves a "module prefix". A module prefix is one of (a) `self::`; * (b) some chain of `super::`. */ - fn resolve_module_prefix(module_: @Module, + fn resolve_module_prefix(@mut self, + module_: @mut Module, module_path: @DVec) -> ResolveResult { let interner = self.session.parse_sess.interner; @@ -2951,7 +2973,8 @@ pub impl Resolver { * given namespace. If successful, returns the target corresponding to * the name. */ - fn resolve_name_in_module(module_: @Module, + fn resolve_name_in_module(@mut self, + module_: @mut Module, name: ident, namespace: Namespace, allow_globs: bool) @@ -3020,10 +3043,10 @@ pub impl Resolver { * This needs special handling, as, unlike all of the other imports, it * needs to look in the scope chain for modules and non-modules alike. */ - fn resolve_one_level_renaming_import(module_: @Module, + fn resolve_one_level_renaming_import(@mut self, + module_: @mut Module, import_directive: @ImportDirective) -> ResolveResult<()> { - let mut target_name; let mut source_name; let allowable_namespaces; @@ -3177,7 +3200,7 @@ pub impl Resolver { return Success(()); } - fn report_unresolved_imports(module_: @Module) { + fn report_unresolved_imports(@mut self, module_: @mut Module) { let index = module_.resolved_import_count; let import_count = module_.imports.len(); if index != import_count { @@ -3211,12 +3234,12 @@ pub impl Resolver { // Then this operation can simply be performed as part of item (or import) // processing. - fn record_exports() { + fn record_exports(@mut self) { let root_module = self.graph_root.get_module(); self.record_exports_for_module_subtree(root_module); } - fn record_exports_for_module_subtree(module_: @Module) { + fn record_exports_for_module_subtree(@mut self, module_: @mut Module) { // If this isn't a local crate, then bail out. We don't need to record // exports for nonlocal crates. @@ -3258,7 +3281,7 @@ pub impl Resolver { } } - fn record_exports_for_module(module_: @Module) { + fn record_exports_for_module(@mut self, module_: @mut Module) { let mut exports2 = ~[]; self.add_exports_for_module(&mut exports2, module_); @@ -3272,8 +3295,8 @@ pub impl Resolver { } } - - fn add_exports_of_namebindings(exports2: &mut ~[Export2], + fn add_exports_of_namebindings(@mut self, + exports2: &mut ~[Export2], ident: ident, namebindings: @mut NameBindings, ns: Namespace, @@ -3300,7 +3323,9 @@ pub impl Resolver { } } - fn add_exports_for_module(exports2: &mut ~[Export2], module_: @Module) { + fn add_exports_for_module(@mut self, + exports2: &mut ~[Export2], + module_: @mut Module) { for module_.children.each |ident, namebindings| { debug!("(computing exports) maybe export '%s'", *self.session.str_of(*ident)); @@ -3357,7 +3382,7 @@ pub impl Resolver { // generate a fake "implementation scope" containing all the // implementations thus found, for compatibility with old resolve pass. - fn with_scope(name: Option, f: fn()) { + fn with_scope(@mut self, name: Option, f: fn()) { let orig_module = self.current_module; // Move down in the graph. @@ -3397,10 +3422,13 @@ pub impl Resolver { // Wraps the given definition in the appropriate number of `def_upvar` // wrappers. - fn upvarify(ribs: @DVec<@Rib>, rib_index: uint, def_like: def_like, - span: span, allow_capturing_self: AllowCapturingSelfFlag) + fn upvarify(@mut self, + ribs: @DVec<@Rib>, + rib_index: uint, + def_like: def_like, + span: span, + allow_capturing_self: AllowCapturingSelfFlag) -> Option { - let mut def; let mut is_ty_param; @@ -3504,10 +3532,12 @@ pub impl Resolver { return Some(dl_def(def)); } - fn search_ribs(ribs: @DVec<@Rib>, name: ident, span: span, + fn search_ribs(@mut self, + ribs: @DVec<@Rib>, + name: ident, + span: span, allow_capturing_self: AllowCapturingSelfFlag) -> Option { - // FIXME #4950: This should not use a while loop. // FIXME #4950: Try caching? @@ -3529,7 +3559,7 @@ pub impl Resolver { return None; } - fn resolve_crate(@self) { + fn resolve_crate(@mut self) { debug!("(resolving crate) starting"); visit_crate(*self.crate, (), mk_vt(@Visitor { @@ -3549,7 +3579,7 @@ pub impl Resolver { })); } - fn resolve_item(item: @item, visitor: ResolveVisitor) { + fn resolve_item(@mut self, item: @item, visitor: ResolveVisitor) { debug!("(resolving item) resolving %s", *self.session.str_of(item.ident)); @@ -3777,7 +3807,9 @@ pub impl Resolver { self.xray_context = orig_xray_flag; } - fn with_type_parameter_rib(type_parameters: TypeParameters, f: fn()) { + fn with_type_parameter_rib(@mut self, + type_parameters: TypeParameters, + f: fn()) { match type_parameters { HasTypeParameters(type_parameters, node_id, initial_index, rib_kind) => { @@ -3818,19 +3850,20 @@ pub impl Resolver { } } - fn with_label_rib(f: fn()) { + fn with_label_rib(@mut self, f: fn()) { (*self.label_ribs).push(@Rib(NormalRibKind)); f(); (*self.label_ribs).pop(); } - fn with_constant_rib(f: fn()) { + + fn with_constant_rib(@mut self, f: fn()) { (*self.value_ribs).push(@Rib(ConstantItemRibKind)); f(); (*self.value_ribs).pop(); } - - fn resolve_function(rib_kind: RibKind, + fn resolve_function(@mut self, + rib_kind: RibKind, optional_declaration: Option<@fn_decl>, type_parameters: TypeParameters, block: blk, @@ -3906,7 +3939,8 @@ pub impl Resolver { (*self.value_ribs).pop(); } - fn resolve_type_parameters(type_parameters: ~[ty_param], + fn resolve_type_parameters(@mut self, + type_parameters: ~[ty_param], visitor: ResolveVisitor) { for type_parameters.each |type_parameter| { for type_parameter.bounds.each |&bound| { @@ -3918,11 +3952,12 @@ pub impl Resolver { } } - fn resolve_struct(id: node_id, - type_parameters: @~[ty_param], - fields: ~[@struct_field], - optional_destructor: Option, - visitor: ResolveVisitor) { + fn resolve_struct(@mut self, + id: node_id, + type_parameters: @~[ty_param], + fields: ~[@struct_field], + optional_destructor: Option, + visitor: ResolveVisitor) { // If applicable, create a rib for the type parameters. let borrowed_type_parameters: &~[ty_param] = &*type_parameters; do self.with_type_parameter_rib(HasTypeParameters @@ -3959,7 +3994,8 @@ pub impl Resolver { // Does this really need to take a RibKind or is it always going // to be NormalRibKind? - fn resolve_method(rib_kind: RibKind, + fn resolve_method(@mut self, + rib_kind: RibKind, method: @method, outer_type_parameter_count: uint, visitor: ResolveVisitor) { @@ -3984,7 +4020,8 @@ pub impl Resolver { visitor); } - fn resolve_implementation(id: node_id, + fn resolve_implementation(@mut self, + id: node_id, span: span, type_parameters: ~[ty_param], opt_trait_reference: Option<@trait_ref>, @@ -4060,15 +4097,18 @@ pub impl Resolver { } } - fn resolve_module(module_: _mod, span: span, _name: ident, id: node_id, + fn resolve_module(@mut self, + module_: _mod, + span: span, + _name: ident, + id: node_id, visitor: ResolveVisitor) { - // Write the implementations in scope into the module metadata. debug!("(resolving module) resolving module ID %d", id); visit_mod(module_, span, id, (), visitor); } - fn resolve_local(local: @local, visitor: ResolveVisitor) { + fn resolve_local(@mut self, local: @local, visitor: ResolveVisitor) { let mutability = if local.node.is_mutbl {Mutable} else {Immutable}; // Resolve the type. @@ -4089,9 +4129,9 @@ pub impl Resolver { None, visitor); } - fn binding_mode_map(pat: @pat) -> BindingMap { + fn binding_mode_map(@mut self, pat: @pat) -> BindingMap { let result = HashMap(); - do pat_bindings(self.def_map, pat) |binding_mode, _id, sp, path| { + do pat_bindings(*self.def_map, pat) |binding_mode, _id, sp, path| { let ident = path_to_ident(path); result.insert(ident, binding_info {span: sp, @@ -4100,7 +4140,7 @@ pub impl Resolver { return result; } - fn check_consistent_bindings(arm: arm) { + fn check_consistent_bindings(@mut self, arm: arm) { if arm.pats.len() == 0 { return; } let map_0 = self.binding_mode_map(arm.pats[0]); for arm.pats.eachi() |i, p| { @@ -4139,7 +4179,7 @@ pub impl Resolver { } } - fn resolve_arm(arm: arm, visitor: ResolveVisitor) { + fn resolve_arm(@mut self, arm: arm, visitor: ResolveVisitor) { (*self.value_ribs).push(@Rib(NormalRibKind)); let bindings_list = HashMap(); @@ -4158,7 +4198,7 @@ pub impl Resolver { (*self.value_ribs).pop(); } - fn resolve_block(block: blk, visitor: ResolveVisitor) { + fn resolve_block(@mut self, block: blk, visitor: ResolveVisitor) { debug!("(resolving block) entering block"); (*self.value_ribs).push(@Rib(NormalRibKind)); @@ -4183,7 +4223,7 @@ pub impl Resolver { debug!("(resolving block) leaving block"); } - fn resolve_type(ty: @Ty, visitor: ResolveVisitor) { + fn resolve_type(@mut self, ty: @Ty, visitor: ResolveVisitor) { match ty.node { // Like path expressions, the interpretation of path types depends // on whether the path has multiple elements in it or not. @@ -4256,14 +4296,14 @@ pub impl Resolver { } } - fn resolve_pattern(pattern: @pat, + fn resolve_pattern(@mut self, + pattern: @pat, mode: PatternBindingMode, mutability: Mutability, // Maps idents to the node ID for the (outermost) // pattern that binds them bindings_list: Option>, visitor: ResolveVisitor) { - let pat_id = pattern.id; do walk_pat(pattern) |pattern| { match pattern.node { @@ -4463,7 +4503,7 @@ pub impl Resolver { } } - fn resolve_bare_identifier_pattern(name: ident) + fn resolve_bare_identifier_pattern(@mut self, name: ident) -> BareIdentifierPatternResolution { match self.resolve_item_in_lexical_scope(self.current_module, name, @@ -4505,7 +4545,8 @@ pub impl Resolver { * If `check_ribs` is true, checks the local definitions first; i.e. * doesn't skip straight to the containing module. */ - fn resolve_path(path: @path, + fn resolve_path(@mut self, + path: @path, namespace: Namespace, check_ribs: bool, visitor: ResolveVisitor) @@ -4533,12 +4574,12 @@ pub impl Resolver { path.span); } - fn resolve_identifier(identifier: ident, + fn resolve_identifier(@mut self, + identifier: ident, namespace: Namespace, check_ribs: bool, span: span) -> Option { - if check_ribs { match self.resolve_identifier_in_local_ribs(identifier, namespace, @@ -4557,12 +4598,12 @@ pub impl Resolver { } // FIXME #4952: Merge me with resolve_name_in_module? - fn resolve_definition_of_name_in_module(containing_module: @Module, + fn resolve_definition_of_name_in_module(@mut self, + containing_module: @mut Module, name: ident, namespace: Namespace, xray: XrayFlag) -> NameDefinition { - // First, search children. match containing_module.children.find(&name) { Some(child_name_bindings) => { @@ -4619,7 +4660,7 @@ pub impl Resolver { } } - fn intern_module_part_of_path(path: @path) -> @DVec { + fn intern_module_part_of_path(@mut self, path: @path) -> @DVec { let module_path_idents = @DVec(); for path.idents.eachi |index, ident| { if index == path.idents.len() - 1 { @@ -4632,11 +4673,11 @@ pub impl Resolver { return module_path_idents; } - fn resolve_module_relative_path(path: @path, + fn resolve_module_relative_path(@mut self, + path: @path, +xray: XrayFlag, namespace: Namespace) -> Option { - let module_path_idents = self.intern_module_part_of_path(path); let mut containing_module; @@ -4676,11 +4717,11 @@ pub impl Resolver { } } - fn resolve_crate_relative_path(path: @path, + fn resolve_crate_relative_path(@mut self, + path: @path, +xray: XrayFlag, namespace: Namespace) -> Option { - let module_path_idents = self.intern_module_part_of_path(path); let root_module = self.graph_root.get_module(); @@ -4723,7 +4764,8 @@ pub impl Resolver { } } - fn resolve_identifier_in_local_ribs(ident: ident, + fn resolve_identifier_in_local_ribs(@mut self, + ident: ident, namespace: Namespace, span: span) -> Option { @@ -4754,7 +4796,8 @@ pub impl Resolver { } } - fn resolve_item_by_identifier_in_lexical_scope(ident: ident, + fn resolve_item_by_identifier_in_lexical_scope(@mut self, + ident: ident, namespace: Namespace) -> Option { // Check the items. @@ -4786,7 +4829,7 @@ pub impl Resolver { } } - fn name_exists_in_scope_struct(name: &str) -> bool { + fn name_exists_in_scope_struct(@mut self, name: &str) -> bool { let mut i = self.type_ribs.len(); while i != 0 { i -= 1; @@ -4819,7 +4862,7 @@ pub impl Resolver { return false; } - fn resolve_expr(expr: @expr, visitor: ResolveVisitor) { + fn resolve_expr(@mut self, expr: @expr, visitor: ResolveVisitor) { // First, record candidate traits for this expression if it could // result in the invocation of a method call. @@ -4928,7 +4971,7 @@ pub impl Resolver { } } - fn record_candidate_traits_for_expr_if_necessary(expr: @expr) { + fn record_candidate_traits_for_expr_if_necessary(@mut self, expr: @expr) { match expr.node { expr_field(_, ident, _) => { let traits = self.search_for_traits_containing_method(ident); @@ -5005,7 +5048,9 @@ pub impl Resolver { } } - fn search_for_traits_containing_method(name: ident) -> @DVec { + fn search_for_traits_containing_method(@mut self, + name: ident) + -> @DVec { debug!("(searching for traits containing method) looking for '%s'", *self.session.str_of(name)); @@ -5095,10 +5140,11 @@ pub impl Resolver { return found_traits; } - fn add_trait_info_if_containing_method(found_traits: @DVec, + fn add_trait_info_if_containing_method(@mut self, + found_traits: @DVec, trait_def_id: def_id, - name: ident) -> bool { - + name: ident) + -> bool { debug!("(adding trait info if containing method) trying trait %d:%d \ for method '%s'", trait_def_id.crate, @@ -5121,18 +5167,21 @@ pub impl Resolver { } } - fn add_fixed_trait_for_expr(expr_id: node_id, +trait_id: def_id) { + fn add_fixed_trait_for_expr(@mut self, + expr_id: node_id, + +trait_id: def_id) { let traits = @DVec(); traits.push(trait_id); self.trait_map.insert(expr_id, traits); } - fn record_def(node_id: node_id, def: def) { + fn record_def(@mut self, node_id: node_id, def: def) { debug!("(recording def) recording %? for %?", def, node_id); self.def_map.insert(node_id, def); } - fn enforce_default_binding_mode(pat: @pat, + fn enforce_default_binding_mode(@mut self, + pat: @pat, pat_binding_mode: binding_mode, descr: &str) { match pat_binding_mode { @@ -5157,7 +5206,7 @@ pub impl Resolver { // // be sure that there is only one main function // - fn check_duplicate_main() { + fn check_duplicate_main(@mut self) { if self.attr_main_fn.is_none() { if self.main_fns.len() >= 1u { let mut i = 1u; @@ -5183,7 +5232,7 @@ pub impl Resolver { // resolve data structures. // - fn check_for_unused_imports_if_necessary() { + fn check_for_unused_imports_if_necessary(@mut self) { if self.unused_import_lint_level == allow { return; } @@ -5192,7 +5241,8 @@ pub impl Resolver { self.check_for_unused_imports_in_module_subtree(root_module); } - fn check_for_unused_imports_in_module_subtree(module_: @Module) { + fn check_for_unused_imports_in_module_subtree(@mut self, + module_: @mut Module) { // If this isn't a local crate, then bail out. We don't need to check // for unused imports in external crates. @@ -5231,7 +5281,7 @@ pub impl Resolver { } } - fn check_for_unused_imports_in_module(module_: @Module) { + fn check_for_unused_imports_in_module(@mut self, module_: @mut Module) { for module_.import_resolutions.each_value |&import_resolution| { // Ignore dummy spans for things like automatically injected // imports for the prelude, and also don't warn about the same @@ -5268,7 +5318,7 @@ pub impl Resolver { // /// A somewhat inefficient routine to obtain the name of a module. - fn module_to_str(module_: @Module) -> ~str { + fn module_to_str(@mut self, module_: @mut Module) -> ~str { let idents = DVec(); let mut current_module = module_; loop { @@ -5293,7 +5343,7 @@ pub impl Resolver { return self.idents_to_str(vec::reversed(idents.get())); } - fn dump_module(module_: @Module) { + fn dump_module(@mut self, module_: @mut Module) { debug!("Dump of module `%s`:", self.module_to_str(module_)); debug!("Children:"); @@ -5338,11 +5388,11 @@ pub fn resolve_crate(session: Session, lang_items: LanguageItems, crate: @crate) -> CrateMap { - let resolver = @Resolver(session, lang_items, crate); - resolver.resolve(resolver); + let resolver = @mut Resolver(session, lang_items, crate); + resolver.resolve(); CrateMap { - def_map: resolver.def_map, - exp_map2: resolver.export_map2, + def_map: *resolver.def_map, + exp_map2: *resolver.export_map2, trait_map: resolver.trait_map } } diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index 5e37406bdd752..df6073f9339b4 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -831,7 +831,7 @@ pub fn extract_variant_args(bcx: block, -> ExtractedBlock { let (enm, evar) = vdefs; let _icx = bcx.insn_ctxt("match::extract_variant_args"); - let ccx = bcx.fcx.ccx; + let ccx = *bcx.fcx.ccx; let enum_ty_substs = match ty::get(node_id_type(bcx, pat_id)).sty { ty::ty_enum(id, ref substs) => { assert id == enm; @@ -1272,7 +1272,7 @@ pub fn compile_submatch(bcx: block, let vals_left = vec::append(vec::slice(vals, 0u, col).to_vec(), vec::slice(vals, col + 1u, vals.len())); - let ccx = bcx.fcx.ccx; + let ccx = *bcx.fcx.ccx; let mut pat_id = 0; for vec::each(m) |br| { // Find a real id (we're adding placeholder wildcard patterns, but @@ -1710,7 +1710,7 @@ pub fn bind_irrefutable_pat(bcx: block, binding_mode: IrrefutablePatternBindingMode) -> block { let _icx = bcx.insn_ctxt("match::bind_irrefutable_pat"); - let ccx = bcx.fcx.ccx; + let ccx = *bcx.fcx.ccx; let mut bcx = bcx; // Necessary since bind_irrefutable_pat is called outside trans_match diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 4eed47ebafcc9..cadbe1208ad2b 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -866,8 +866,8 @@ pub fn need_invoke(bcx: block) -> bool { // Walk the scopes to look for cleanups let mut cur = bcx; loop { - match cur.kind { - block_scope(ref inf) => { + match *cur.kind { + block_scope(ref mut inf) => { for vec::each((*inf).cleanups) |cleanup| { match *cleanup { clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => { @@ -898,16 +898,21 @@ pub fn have_cached_lpad(bcx: block) -> bool { return res; } -pub fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) { +pub fn in_lpad_scope_cx(bcx: block, f: fn(&mut scope_info)) { let mut bcx = bcx; loop { - match bcx.kind { - block_scope(ref inf) => { - if (*inf).cleanups.len() > 0u || bcx.parent.is_none() { - f((*inf)); return; + { + // XXX: Borrow check bug workaround. + let kind: &mut block_kind = &mut *bcx.kind; + match *kind { + block_scope(ref mut inf) => { + if inf.cleanups.len() > 0u || bcx.parent.is_none() { + f(inf); + return; + } + } + _ => () } - } - _ => () } bcx = block_parent(bcx); } @@ -1157,7 +1162,7 @@ pub fn trans_stmt(cx: block, s: ast::stmt) -> block { } } } - ast::decl_item(i) => trans_item(cx.fcx.ccx, *i) + ast::decl_item(i) => trans_item(*cx.fcx.ccx, *i) } } ast::stmt_mac(*) => cx.tcx().sess.bug(~"unexpanded macro") @@ -1198,9 +1203,9 @@ pub fn simple_block_scope() -> block_kind { block_scope(scope_info { loop_break: None, loop_label: None, - mut cleanups: ~[], - mut cleanup_paths: ~[], - mut landing_pad: None + cleanups: ~[], + cleanup_paths: ~[], + landing_pad: None }) } @@ -1226,9 +1231,9 @@ pub fn loop_scope_block(bcx: block, return new_block(bcx.fcx, Some(bcx), block_scope(scope_info { loop_break: Some(loop_break), loop_label: loop_label, - mut cleanups: ~[], - mut cleanup_paths: ~[], - mut landing_pad: None + cleanups: ~[], + cleanup_paths: ~[], + landing_pad: None }), bcx.is_lpad, n, opt_node_info); } @@ -1301,23 +1306,30 @@ pub fn cleanup_and_leave(bcx: block, @fmt!("cleanup_and_leave(%s)", cur.to_str())); } - match cur.kind { - block_scope(ref inf) if !inf.cleanups.is_empty() => { - for vec::find((*inf).cleanup_paths, - |cp| cp.target == leave).each |cp| { - Br(bcx, cp.dest); - return; + { + // XXX: Borrow check bug workaround. + let kind: &mut block_kind = &mut *cur.kind; + match *kind { + block_scope(ref mut inf) if !inf.cleanups.is_empty() => { + for vec::find((*inf).cleanup_paths, + |cp| cp.target == leave).each |cp| { + Br(bcx, cp.dest); + return; + } + let sub_cx = sub_block(bcx, ~"cleanup"); + Br(bcx, sub_cx.llbb); + inf.cleanup_paths.push(cleanup_path { + target: leave, + dest: sub_cx.llbb + }); + bcx = trans_block_cleanups_(sub_cx, + block_cleanups(cur), + is_lpad); + } + _ => () } - let sub_cx = sub_block(bcx, ~"cleanup"); - Br(bcx, sub_cx.llbb); - (*inf).cleanup_paths.push(cleanup_path { - target: leave, - dest: sub_cx.llbb - }); - bcx = trans_block_cleanups_(sub_cx, block_cleanups(cur), is_lpad); - } - _ => () } + match upto { Some(bb) => { if cur.llbb == bb { break; } } _ => () @@ -1572,25 +1584,25 @@ pub fn new_fn_ctxt_w_id(ccx: @CrateContext, param_substs: Option<@param_substs>, sp: Option) -> fn_ctxt { let llbbs = mk_standard_basic_blocks(llfndecl); - return @fn_ctxt_ { + return @mut fn_ctxt_ { llfn: llfndecl, llenv: unsafe { llvm::LLVMGetParam(llfndecl, 1u as c_uint) }, llretptr: unsafe { llvm::LLVMGetParam(llfndecl, 0u as c_uint) }, - mut llstaticallocas: llbbs.sa, - mut llloadenv: None, - mut llreturn: llbbs.rt, - mut llself: None, - mut personality: None, - mut loop_ret: None, - llargs: HashMap(), - lllocals: HashMap(), - llupvars: HashMap(), + llstaticallocas: llbbs.sa, + llloadenv: None, + llreturn: llbbs.rt, + llself: None, + personality: None, + loop_ret: None, + llargs: @HashMap(), + lllocals: @HashMap(), + llupvars: @HashMap(), id: id, impl_id: impl_id, param_substs: param_substs, span: sp, path: path, - ccx: ccx + ccx: @ccx }; } @@ -1780,7 +1792,7 @@ pub fn trans_closure(ccx: @CrateContext, llvm::LLVMSetGC(fcx.llfn, strategy); } } - ccx.uses_gc = true; + *ccx.uses_gc = true; } // Create the first basic block in the function and keep a handle on it to @@ -2803,7 +2815,7 @@ pub fn trap(bcx: block) { } pub fn decl_gc_metadata(ccx: @CrateContext, llmod_id: ~str) { - if !ccx.sess.opts.gc || !ccx.uses_gc { + if !ccx.sess.opts.gc || !*ccx.uses_gc { return; } @@ -3038,7 +3050,7 @@ pub fn trans_crate(sess: session::Session, discrims: HashMap(), discrim_symbols: HashMap(), tydescs: ty::new_ty_hash(), - mut finished_tydescs: false, + finished_tydescs: @mut false, external: HashMap(), monomorphized: HashMap(), monomorphizing: HashMap(), @@ -3080,9 +3092,9 @@ pub fn trans_crate(sess: session::Session, builder: BuilderRef_res(unsafe { llvm::LLVMCreateBuilder() }), shape_cx: mk_ctxt(llmod), crate_map: crate_map, - mut uses_gc: false, + uses_gc: @mut false, dbg_cx: dbg_cx, - mut do_not_commit_warning_issued: false + do_not_commit_warning_issued: @mut false }; { diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 9f39cc8575d01..8bd85be0f7082 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -175,7 +175,7 @@ pub struct CrateContext { tydescs: HashMap, // Set when running emit_tydescs to enforce that no more tydescs are // created. - mut finished_tydescs: bool, + finished_tydescs: @mut bool, // Track mapping of external ids to local items imported for inlining external: HashMap>, // Cache instances of monomorphized functions @@ -224,9 +224,9 @@ pub struct CrateContext { // Set when at least one function uses GC. Needed so that // decl_gc_metadata knows whether to link to the module metadata, which // is not emitted by LLVM's GC pass when no functions use GC. - mut uses_gc: bool, + uses_gc: @mut bool, dbg_cx: Option, - mut do_not_commit_warning_issued: bool + do_not_commit_warning_issued: @mut bool } // Types used for llself. @@ -273,34 +273,34 @@ pub struct fn_ctxt_ { // the function, due to LLVM's quirks. // A block for all the function's static allocas, so that LLVM // will coalesce them into a single alloca call. - mut llstaticallocas: BasicBlockRef, + llstaticallocas: BasicBlockRef, // A block containing code that copies incoming arguments to space // already allocated by code in one of the llallocas blocks. // (LLVM requires that arguments be copied to local allocas before // allowing most any operation to be performed on them.) - mut llloadenv: Option, - mut llreturn: BasicBlockRef, + llloadenv: Option, + llreturn: BasicBlockRef, // The 'self' value currently in use in this function, if there // is one. // // NB: This is the type of the self *variable*, not the self *type*. The // self type is set only for default methods, while the self variable is // set for all methods. - mut llself: Option, + llself: Option, // The a value alloca'd for calls to upcalls.rust_personality. Used when // outputting the resume instruction. - mut personality: Option, + personality: Option, // If this is a for-loop body that returns, this holds the pointers needed // for that (flagptr, retptr) - mut loop_ret: Option<(ValueRef, ValueRef)>, + loop_ret: Option<(ValueRef, ValueRef)>, // Maps arguments to allocas created for them in llallocas. - llargs: HashMap, + llargs: @HashMap, // Maps the def_ids for local variables to the allocas created for // them in llallocas. - lllocals: HashMap, + lllocals: @HashMap, // Same as above, but for closure upvars - llupvars: HashMap, + llupvars: @HashMap, // The node_id of the function, or -1 if it doesn't correspond to // a user-defined function. @@ -319,14 +319,14 @@ pub struct fn_ctxt_ { path: path, // This function's enclosing crate context. - ccx: @CrateContext + ccx: @@CrateContext } -pub type fn_ctxt = @fn_ctxt_; +pub type fn_ctxt = @mut fn_ctxt_; pub fn warn_not_to_commit(ccx: @CrateContext, msg: ~str) { - if !ccx.do_not_commit_warning_issued { - ccx.do_not_commit_warning_issued = true; + if !*ccx.do_not_commit_warning_issued { + *ccx.do_not_commit_warning_issued = true; ccx.sess.warn(msg + ~" -- do not commit like this!"); } } @@ -355,7 +355,7 @@ pub struct cleanup_path { dest: BasicBlockRef } -pub fn scope_clean_changed(scope_info: scope_info) { +pub fn scope_clean_changed(scope_info: &mut scope_info) { if scope_info.cleanup_paths.len() > 0u { scope_info.cleanup_paths = ~[]; } scope_info.landing_pad = None; } @@ -498,9 +498,9 @@ pub fn revoke_clean(cx: block, val: ValueRef) { } pub fn block_cleanups(bcx: block) -> ~[cleanup] { - match bcx.kind { + match *bcx.kind { block_non_scope => ~[], - block_scope(ref inf) => /*bad*/copy inf.cleanups + block_scope(ref mut inf) => /*bad*/copy inf.cleanups } } @@ -524,12 +524,12 @@ pub struct scope_info { // A list of functions that must be run at when leaving this // block, cleaning up any variables that were introduced in the // block. - mut cleanups: ~[cleanup], + cleanups: ~[cleanup], // Existing cleanup paths that may be reused, indexed by destination and // cleared when the set of cleanups changes. - mut cleanup_paths: ~[cleanup_path], + cleanup_paths: ~[cleanup_path], // Unwinding landing pad. Also cleared when cleanups change. - mut landing_pad: Option, + landing_pad: Option, } pub trait get_node_info { @@ -574,11 +574,11 @@ pub struct block_ { // instructions into that block by way of this block context. // The block pointing to this one in the function's digraph. llbb: BasicBlockRef, - mut terminated: bool, - mut unreachable: bool, + terminated: bool, + unreachable: bool, parent: Option, // The 'kind' of basic block this is. - kind: block_kind, + kind: @mut block_kind, // Is this block part of a landing pad? is_lpad: bool, // info about the AST node this block originated from, if any @@ -597,21 +597,19 @@ pub fn block_(llbb: BasicBlockRef, parent: Option, -kind: block_kind, terminated: false, unreachable: false, parent: parent, - kind: kind, + kind: @mut kind, is_lpad: is_lpad, node_info: node_info, fcx: fcx } } -/* This must be enum and not type, or trans goes into an infinite loop (#2572) - */ -pub enum block = @block_; +pub type block = @mut block_; pub fn mk_block(llbb: BasicBlockRef, parent: Option, -kind: block_kind, is_lpad: bool, node_info: Option, fcx: fn_ctxt) -> block { - block(@block_(llbb, parent, kind, is_lpad, node_info, fcx)) + @mut block_(llbb, parent, kind, is_lpad, node_info, fcx) } // First two args are retptr, env @@ -660,17 +658,21 @@ pub fn struct_elt(llstructty: TypeRef, n: uint) -> TypeRef { } } -pub fn in_scope_cx(cx: block, f: fn(scope_info)) { +pub fn in_scope_cx(cx: block, f: &fn(&mut scope_info)) { let mut cur = cx; loop { - match cur.kind { - block_scope(ref inf) => { - debug!("in_scope_cx: selected cur=%s (cx=%s)", - cur.to_str(), cx.to_str()); - f((*inf)); - return; - } - _ => () + { + // XXX: Borrow check bug workaround. + let kind: &mut block_kind = &mut *cur.kind; + match *kind { + block_scope(ref mut inf) => { + debug!("in_scope_cx: selected cur=%s (cx=%s)", + cur.to_str(), cx.to_str()); + f(inf); + return; + } + _ => () + } } cur = block_parent(cur); } @@ -687,7 +689,7 @@ pub fn block_parent(cx: block) -> block { // Accessors pub impl block { - pure fn ccx() -> @CrateContext { self.fcx.ccx } + pure fn ccx() -> @CrateContext { *self.fcx.ccx } pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx } pure fn sess() -> Session { self.fcx.ccx.sess } diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index b5aab1f3ac5e9..4cf12576a78ee 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -237,7 +237,7 @@ pub fn trans_break_cont(bcx: block, let mut unwind = bcx; let mut target; loop { - match unwind.kind { + match *unwind.kind { block_scope(scope_info { loop_break: Some(brk), loop_label: l, diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 606f7ce725939..8a28769756e6e 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -778,7 +778,7 @@ pub fn create_local_var(bcx: block, local: @ast::local) pub fn create_arg(bcx: block, arg: ast::arg, sp: span) -> Option<@Metadata> { unsafe { - let fcx = bcx.fcx, cx = fcx.ccx; + let fcx = bcx.fcx, cx = *fcx.ccx; let cache = get_cache(cx); let tg = ArgVariableTag; match cached_metadata::<@Metadata>( @@ -845,7 +845,7 @@ pub fn update_source_pos(cx: block, s: span) { } pub fn create_function(fcx: fn_ctxt) -> @Metadata { - let cx = fcx.ccx; + let cx = *fcx.ccx; let dbg_cx = (/*bad*/copy cx.dbg_cx).get(); debug!("~~"); diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 936c8cf1ce550..ad171a44859d1 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -944,10 +944,10 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum { } } ast::def_arg(nid, _, _) => { - take_local(bcx, bcx.fcx.llargs, nid) + take_local(bcx, *bcx.fcx.llargs, nid) } ast::def_local(nid, _) | ast::def_binding(nid, _) => { - take_local(bcx, bcx.fcx.lllocals, nid) + take_local(bcx, *bcx.fcx.lllocals, nid) } ast::def_self(nid, _) => { let self_info: ValSelfData = match bcx.fcx.llself { diff --git a/src/librustc/middle/trans/glue.rs b/src/librustc/middle/trans/glue.rs index 2a07e7a80e762..bbe80431c07fb 100644 --- a/src/librustc/middle/trans/glue.rs +++ b/src/librustc/middle/trans/glue.rs @@ -654,7 +654,7 @@ pub fn declare_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info { let _icx = ccx.insn_ctxt("declare_tydesc"); // If emit_tydescs already ran, then we shouldn't be creating any new // tydescs. - assert !ccx.finished_tydescs; + assert !*ccx.finished_tydescs; let llty = type_of(ccx, t); @@ -761,7 +761,7 @@ pub fn make_generic_glue(ccx: @CrateContext, t: ty::t, llfn: ValueRef, pub fn emit_tydescs(ccx: @CrateContext) { let _icx = ccx.insn_ctxt("emit_tydescs"); // As of this point, allow no more tydescs to be created. - ccx.finished_tydescs = true; + *ccx.finished_tydescs = true; for ccx.tydescs.each_value |&val| { let glue_fn_ty = T_ptr(T_generic_glue_fn(ccx)); let ti = val; diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 413ec9fcdf389..69c50d7d84ad6 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -230,7 +230,7 @@ pub type ctxt = @ctxt_; struct ctxt_ { diag: syntax::diagnostic::span_handler, interner: HashMap, - mut next_id: uint, + next_id: @mut uint, vecs_implicitly_copyable: bool, legacy_modes: bool, legacy_records: bool, @@ -261,7 +261,7 @@ struct ctxt_ { short_names_cache: HashMap, needs_drop_cache: HashMap, needs_unwind_cleanup_cache: HashMap, - mut tc_cache: LinearMap, + tc_cache: @mut LinearMap, ast_ty_to_ty_cache: HashMap, enum_var_cache: HashMap, trait_method_cache: HashMap, @@ -811,7 +811,7 @@ pub fn mk_ctxt(s: session::Session, @ctxt_ { diag: s.diagnostic(), interner: interner, - mut next_id: 0u, + next_id: @mut 0, vecs_implicitly_copyable: vecs_implicitly_copyable, legacy_modes: legacy_modes, legacy_records: legacy_records, @@ -831,7 +831,7 @@ pub fn mk_ctxt(s: session::Session, short_names_cache: new_ty_hash(), needs_drop_cache: new_ty_hash(), needs_unwind_cleanup_cache: new_ty_hash(), - tc_cache: LinearMap::new(), + tc_cache: @mut LinearMap::new(), ast_ty_to_ty_cache: HashMap(), enum_var_cache: HashMap(), trait_method_cache: HashMap(), @@ -920,7 +920,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option) -> t { let t = @t_box_ { sty: st, - id: cx.next_id, + id: *cx.next_id, flags: flags, o_def_id: o_def_id }; @@ -931,7 +931,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option) -> t { cx.interner.insert(key, t); - cx.next_id += 1u; + *cx.next_id += 1; unsafe { cast::reinterpret_cast(&t) } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3a863fc7ac5dc..1ab55fe9035bc 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -242,7 +242,7 @@ pub struct FileMap { /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code - mut lines: ~[BytePos], + lines: @mut ~[BytePos], /// Locations of multi-byte characters in the source code multibyte_chars: DVec } @@ -312,7 +312,7 @@ pub impl CodeMap { let filemap = @FileMap { name: filename, substr: substr, src: src, start_pos: BytePos(start_pos), - mut lines: ~[], + lines: @mut ~[], multibyte_chars: DVec() }; @@ -439,7 +439,7 @@ priv impl CodeMap { let idx = self.lookup_filemap_idx(pos); let f = self.files[idx]; let mut a = 0u; - let mut b = vec::len(f.lines); + let mut b = f.lines.len(); while b - a > 1u { let m = (a + b) / 2u; if f.lines[m] > pos { b = m; } else { a = m; } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index da0e58b3e1fce..77af7177f7acb 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -152,7 +152,7 @@ pub fn mk_handler(emitter: Option) -> @handler { } }; - @mut HandlerT { mut err_count: 0, emit: emit } as @handler + @mut HandlerT { err_count: 0, emit: emit } as @handler } #[deriving_eq] diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 38134d4321adb..f3a74302400c9 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -196,7 +196,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, struct CtxtRepr { parse_sess: @mut parse::ParseSess, cfg: ast::crate_cfg, - backtrace: Option<@ExpnInfo>, + backtrace: @mut Option<@ExpnInfo>, mod_path: ~[ast::ident], trace_mac: bool } @@ -205,33 +205,33 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, fn parse_sess(@mut self) -> @mut parse::ParseSess { self.parse_sess } fn cfg(@mut self) -> ast::crate_cfg { self.cfg } fn call_site(@mut self) -> span { - match self.backtrace { + match *self.backtrace { Some(@ExpandedFrom(CallInfo {call_site: cs, _})) => cs, None => self.bug(~"missing top span") } } fn print_backtrace(@mut self) { } - fn backtrace(@mut self) -> Option<@ExpnInfo> { self.backtrace } + fn backtrace(@mut self) -> Option<@ExpnInfo> { *self.backtrace } fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); } fn mod_pop(@mut self) { self.mod_path.pop(); } fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; } fn bt_push(@mut self, ei: codemap::ExpnInfo) { match ei { ExpandedFrom(CallInfo {call_site: cs, callee: ref callee}) => { - self.backtrace = + *self.backtrace = Some(@ExpandedFrom(CallInfo { call_site: span {lo: cs.lo, hi: cs.hi, - expn_info: self.backtrace}, + expn_info: *self.backtrace}, callee: (*callee)})); } } } fn bt_pop(@mut self) { - match self.backtrace { + match *self.backtrace { Some(@ExpandedFrom(CallInfo { call_site: span {expn_info: prev, _}, _ })) => { - self.backtrace = prev + *self.backtrace = prev } _ => self.bug(~"tried to pop without a push") } @@ -280,7 +280,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess, let imp: @mut CtxtRepr = @mut CtxtRepr { parse_sess: parse_sess, cfg: cfg, - backtrace: None, + backtrace: @mut None, mod_path: ~[], trace_mac: false }; @@ -339,7 +339,7 @@ pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree]) cx.cfg(), tts); let mut es = ~[]; - while p.token != token::EOF { + while *p.token != token::EOF { if es.len() != 0 { p.eat(token::COMMA); } diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 6d117f5ad235c..8b8e48bd5229b 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -73,7 +73,7 @@ pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); - let proto = rust_parser.parse_proto(cx.str_of(id)); + let mut proto = rust_parser.parse_proto(cx.str_of(id)); // check for errors visit(proto, cx); diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index a2f881fc19f71..66feb7cc753cf 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -25,7 +25,7 @@ pub trait proto_parser { pub impl proto_parser for parser::Parser { fn parse_proto(&self, id: ~str) -> protocol { - let proto = protocol(id, self.span); + let proto = protocol(id, *self.span); self.parse_seq_to_before_end(token::EOF, SeqSep { sep: None, @@ -40,7 +40,7 @@ pub impl proto_parser for parser::Parser { let name = *self.interner.get(id); self.expect(token::COLON); - let dir = match copy self.token { + let dir = match *self.token { token::IDENT(n, _) => self.interner.get(n), _ => fail!() }; @@ -51,10 +51,11 @@ pub impl proto_parser for parser::Parser { _ => fail!() }; - let typarms = if self.token == token::LT { + let typarms = if *self.token == token::LT { self.parse_ty_params() - } - else { ~[] }; + } else { + ~[] + }; let state = proto.add_state_poly(name, id, dir, typarms); @@ -69,7 +70,7 @@ pub impl proto_parser for parser::Parser { fn parse_message(&self, state: state) { let mname = *self.interner.get(self.parse_ident()); - let args = if self.token == token::LPAREN { + let args = if *self.token == token::LPAREN { self.parse_unspanned_seq(token::LPAREN, token::RPAREN, SeqSep { sep: Some(token::COMMA), @@ -80,10 +81,10 @@ pub impl proto_parser for parser::Parser { self.expect(token::RARROW); - let next = match copy self.token { + let next = match *self.token { token::IDENT(_, _) => { let name = *self.interner.get(self.parse_ident()); - let ntys = if self.token == token::LT { + let ntys = if *self.token == token::LT { self.parse_unspanned_seq(token::LT, token::GT, SeqSep { sep: Some(token::COMMA), @@ -101,7 +102,7 @@ pub impl proto_parser for parser::Parser { _ => self.fatal(~"invalid next state") }; - state.add_message(mname, copy self.span, args, next); + state.add_message(mname, *self.span, args, next); } } diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 48bd8b0329742..5fdba837da41d 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -27,8 +27,8 @@ use core::to_str::ToStr; use core::vec; pub trait gen_send { - fn gen_send(&self, cx: ext_ctxt, try: bool) -> @ast::item; - fn to_ty(&self, cx: ext_ctxt) -> @ast::Ty; + fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item; + fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty; } pub trait to_type_decls { @@ -47,8 +47,11 @@ pub trait gen_init { } pub impl gen_send for message { - fn gen_send(&self, cx: ext_ctxt, try: bool) -> @ast::item { + fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item { debug!("pipec: gen_send"); + let name = self.name(); + let params = self.get_params(); + match *self { message(ref _id, span, ref tys, this, Some(ref next_state)) => { debug!("pipec: next state exists"); @@ -67,7 +70,7 @@ pub impl gen_send for message { args_ast); let mut body = ~"{\n"; - body += fmt!("use super::%s;\n", self.name()); + body += fmt!("use super::%s;\n", name); if this.proto.is_bounded() { let (sp, rp) = match (this.dir, next.dir) { @@ -96,7 +99,7 @@ pub impl gen_send for message { body += fmt!("let %s = ::pipes::entangle();\n", pat); } body += fmt!("let message = %s(%s);\n", - self.name(), + name, str::connect(vec::append_one( arg_names.map(|x| cx.str_of(*x)), ~"s"), ~", ")); @@ -121,13 +124,12 @@ pub impl gen_send for message { rty = cx.ty_option(rty); } - let name = cx.ident_of(if try { ~"try_" + self.name() - } else { self.name() } ); + let name = cx.ident_of(if try { ~"try_" + name } else { name } ); cx.item_fn_poly(name, args_ast, rty, - self.get_params(), + params, cx.expr_block(body)) } @@ -156,10 +158,8 @@ pub impl gen_send for message { }; let mut body = ~"{ "; - body += fmt!("use super::%s;\n", self.name()); - body += fmt!("let message = %s%s;\n", - self.name(), - message_args); + body += fmt!("use super::%s;\n", name); + body += fmt!("let message = %s%s;\n", name, message_args); if !try { body += fmt!("::pipes::send(pipe, message);\n"); @@ -175,10 +175,7 @@ pub impl gen_send for message { let body = cx.parse_expr(body); - let name = if try { - ~"try_" + self.name() - } - else { self.name() }; + let name = if try { ~"try_" + name } else { name }; cx.item_fn_poly(cx.ident_of(name), args_ast, @@ -187,13 +184,13 @@ pub impl gen_send for message { } else { cx.ty_nil_ast_builder() }, - self.get_params(), + params, cx.expr_block(body)) } } } - fn to_ty(&self, cx: ext_ctxt) -> @ast::Ty { + fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span()) .add_tys(cx.ty_vars_global(self.get_params()))) } @@ -259,10 +256,14 @@ pub impl to_type_decls for state { recv => (*self).dir.reverse() }; let mut items = ~[]; - for self.messages.each |m| { - if dir == send { - items.push(m.gen_send(cx, true)); - items.push(m.gen_send(cx, false)); + + { + let messages = &mut *self.messages; + for vec::each_mut(*messages) |m| { + if dir == send { + items.push(m.gen_send(cx, true)); + items.push(m.gen_send(cx, false)); + } } } @@ -393,7 +394,8 @@ pub impl gen_init for protocol { } cx.ty_path_ast_builder(path(~[cx.ident_of(~"super"), - cx.ident_of(~"__Buffer")], self.span) + cx.ident_of(~"__Buffer")], + copy self.span) .add_tys(cx.ty_vars_global(params))) } @@ -451,12 +453,12 @@ pub impl gen_init for protocol { } items.push(cx.item_mod(cx.ident_of(~"client"), - self.span, + copy self.span, client_states)); items.push(cx.item_mod(cx.ident_of(~"server"), - self.span, + copy self.span, server_states)); - cx.item_mod(cx.ident_of(self.name), self.span, items) + cx.item_mod(cx.ident_of(self.name), copy self.span, items) } } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index da67e48dfa6e6..7c6dc1f937dca 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -16,7 +16,6 @@ use ext::base::ext_ctxt; use ext::pipes::ast_builder::{append_types, ext_ctxt_ast_builder, path}; use core::cmp; -use core::dvec::DVec; use core::to_str::ToStr; #[deriving_eq] @@ -45,26 +44,24 @@ pub struct next_state { tys: ~[@ast::Ty], } -pub enum message { - // name, span, data, current state, next state - message(~str, span, ~[@ast::Ty], state, Option) -} +// name, span, data, current state, next state +pub struct message(~str, span, ~[@ast::Ty], state, Option); pub impl message { - fn name(&self) -> ~str { + fn name(&mut self) -> ~str { match *self { message(ref id, _, _, _, _) => (*id) } } - fn span(&self) -> span { + fn span(&mut self) -> span { match *self { message(_, span, _, _, _) => span } } /// Return the type parameters actually used by this message - fn get_params(&self) -> ~[ast::ty_param] { + fn get_params(&mut self) -> ~[ast::ty_param] { match *self { message(_, _, _, this, _) => this.ty_params } @@ -80,7 +77,7 @@ pub struct state_ { span: span, dir: direction, ty_params: ~[ast::ty_param], - messages: DVec, + messages: @mut ~[message], proto: protocol } @@ -121,17 +118,17 @@ pub impl state_ { } } -pub type protocol = @protocol_; +pub type protocol = @mut protocol_; pub fn protocol(name: ~str, +span: span) -> protocol { - @protocol_(name, span) + @mut protocol_(name, span) } pub fn protocol_(name: ~str, span: span) -> protocol_ { protocol_ { name: name, span: span, - states: DVec(), + states: @mut ~[], bounded: None } } @@ -139,30 +136,30 @@ pub fn protocol_(name: ~str, span: span) -> protocol_ { pub struct protocol_ { name: ~str, span: span, - states: DVec, + states: @mut ~[state], - mut bounded: Option, + bounded: Option, } pub impl protocol_ { /// Get a state. - fn get_state(&self, name: ~str) -> state { + fn get_state(&mut self, name: ~str) -> state { self.states.find(|i| i.name == name).get() } - fn get_state_by_id(&self, id: uint) -> state { self.states[id] } + fn get_state_by_id(&mut self, id: uint) -> state { self.states[id] } - fn has_state(&self, name: ~str) -> bool { + fn has_state(&mut self, name: ~str) -> bool { self.states.find(|i| i.name == name).is_some() } - fn filename(&self) -> ~str { + fn filename(&mut self) -> ~str { ~"proto://" + self.name } - fn num_states(&self) -> uint { self.states.len() } + fn num_states(&mut self) -> uint { self.states.len() } - fn has_ty_params(&self) -> bool { + fn has_ty_params(&mut self) -> bool { for self.states.each |s| { if s.ty_params.len() > 0 { return true; @@ -170,7 +167,7 @@ pub impl protocol_ { } false } - fn is_bounded(&self) -> bool { + fn is_bounded(&mut self) -> bool { let bounded = self.bounded.get(); bounded } @@ -179,7 +176,7 @@ pub impl protocol_ { pub impl protocol { fn add_state_poly(&self, name: ~str, ident: ast::ident, dir: direction, +ty_params: ~[ast::ty_param]) -> state { - let messages = DVec(); + let messages = @mut ~[]; let state = @state_ { id: self.states.len(), diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index ffa6101d58fcc..d529ee0c01b01 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -515,7 +515,7 @@ fn expand_tts(cx: ext_ctxt, // try removing it when enough of them are gone. let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); - p.quote_depth += 1u; + *p.quote_depth += 1u; let tts = p.parse_all_token_trees(); p.abort_if_errors(); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d51ddae6db2f3..890420edf6d68 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -114,8 +114,8 @@ pub fn is_some(&&mpu: matcher_pos_up) -> bool { pub struct MatcherPos { elts: ~[ast::matcher], // maybe should be /&? Need to understand regions. sep: Option, - mut idx: uint, - mut up: matcher_pos_up, // mutable for swapping only + idx: uint, + up: matcher_pos_up, // mutable for swapping only matches: ~[DVec<@named_match>], match_lo: uint, match_hi: uint, sp_lo: BytePos, @@ -155,8 +155,8 @@ pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) ~MatcherPos { elts: ms, sep: sep, - mut idx: 0u, - mut up: matcher_pos_up(None), + idx: 0u, + up: matcher_pos_up(None), matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()), match_lo: 0u, match_hi: match_idx_hi, @@ -267,7 +267,7 @@ pub fn parse(sess: @mut ParseSess, if idx == len { // pop from the matcher position - let new_pos = copy_up(ei.up); + let mut new_pos = copy_up(ei.up); // update matches (the MBE "parse tree") by appending // each tree as a subtree. @@ -295,13 +295,13 @@ pub fn parse(sess: @mut ParseSess, match copy ei.sep { Some(ref t) if idx == len => { // we need a separator if tok == (*t) { //pass the separator - let ei_t = ei; + let mut ei_t = ei; ei_t.idx += 1; next_eis.push(ei_t); } } _ => { // we don't need a separator - let ei_t = ei; + let mut ei_t = ei; ei_t.idx = 0; cur_eis.push(ei_t); } @@ -315,7 +315,7 @@ pub fn parse(sess: @mut ParseSess, match_seq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { if zero_ok { - let new_ei = copy ei; + let mut new_ei = copy ei; new_ei.idx += 1u; //we specifically matched zero repeats. for uint::range(match_idx_lo, match_idx_hi) |idx| { @@ -331,8 +331,8 @@ pub fn parse(sess: @mut ParseSess, cur_eis.push(~MatcherPos { elts: (*matchers), sep: (*sep), - mut idx: 0u, - mut up: matcher_pos_up(Some(ei_t)), + idx: 0u, + up: matcher_pos_up(Some(ei_t)), matches: matches, match_lo: match_idx_lo, match_hi: match_idx_hi, sp_lo: sp.lo @@ -340,7 +340,7 @@ pub fn parse(sess: @mut ParseSess, } match_nonterminal(_,_,_) => { bb_eis.push(ei) } match_tok(ref t) => { - let ei_t = ei; + let mut ei_t = ei; if (*t) == tok { ei_t.idx += 1; next_eis.push(ei_t); @@ -388,7 +388,7 @@ pub fn parse(sess: @mut ParseSess, } else /* bb_eis.len() == 1 */ { let rust_parser = Parser(sess, cfg, rdr.dup()); - let ei = bb_eis.pop(); + let mut ei = bb_eis.pop(); match ei.elts[ei.idx].node { match_nonterminal(_, name, idx) => { ei.matches[idx].push(@matched_nonterminal( @@ -421,16 +421,16 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal { ~"expr" => token::nt_expr(p.parse_expr()), ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), // this could be handled like a token, since it is one - ~"ident" => match copy p.token { + ~"ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(p.reader.interner(), copy p.token)) + + token::to_str(p.reader.interner(), *p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { - p.quote_depth += 1u; //but in theory, non-quoted tts might be useful + *p.quote_depth += 1u; //but in theory, non-quoted tts might be useful let res = token::nt_tt(@p.parse_token_tree()); - p.quote_depth -= 1u; + *p.quote_depth -= 1u; res } ~"matchers" => token::nt_matchers(p.parse_matchers()), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index aa9036d295e53..a9502ff29020e 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -28,7 +28,7 @@ use std::oldmap::HashMap; `~` */ ///an unzipping of `token_tree`s struct TtFrame { - readme: ~[ast::token_tree], + readme: @mut ~[ast::token_tree], idx: uint, dotdotdoted: bool, sep: Option, @@ -59,8 +59,8 @@ pub fn new_tt_reader(sp_diag: span_handler, let r = @mut TtReader { sp_diag: sp_diag, interner: itr, - mut cur: @mut TtFrame { - readme: src, + cur: @mut TtFrame { + readme: @mut src, idx: 0u, dotdotdoted: false, sep: None, @@ -82,7 +82,7 @@ pub fn new_tt_reader(sp_diag: span_handler, pure fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame { @mut TtFrame { - readme: f.readme, + readme: @mut (copy *f.readme), idx: f.idx, dotdotdoted: f.dotdotdoted, sep: f.sep, @@ -199,9 +199,9 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { loop { /* because it's easiest, this handles `tt_delim` not starting with a `tt_tok`, even though it won't happen */ match r.cur.readme[r.cur.idx] { - tt_delim(copy tts) => { + tt_delim(tts) => { r.cur = @mut TtFrame { - readme: tts, + readme: @mut copy tts, idx: 0u, dotdotdoted: false, sep: None, @@ -242,7 +242,7 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan { r.repeat_len.push(len); r.repeat_idx.push(0u); r.cur = @mut TtFrame { - readme: tts, + readme: @mut copy tts, idx: 0u, dotdotdoted: true, sep: sep, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 5803c607191ef..60f0cd4342af4 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -36,7 +36,7 @@ impl parser_attr for Parser { fn parse_outer_attributes() -> ~[ast::attribute] { let mut attrs: ~[ast::attribute] = ~[]; loop { - match copy self.token { + match *self.token { token::POUND => { if self.look_ahead(1u) != token::LBRACKET { break; @@ -86,14 +86,14 @@ impl parser_attr for Parser { let mut inner_attrs: ~[ast::attribute] = ~[]; let mut next_outer_attrs: ~[ast::attribute] = ~[]; loop { - match copy self.token { + match *self.token { token::POUND => { if self.look_ahead(1u) != token::LBRACKET { // This is an extension break; } let attr = self.parse_attribute(ast::attr_inner); - if self.token == token::SEMI { + if *self.token == token::SEMI { self.bump(); inner_attrs += ~[attr]; } else { @@ -127,7 +127,7 @@ impl parser_attr for Parser { fn parse_meta_item() -> @ast::meta_item { let lo = self.span.lo; let name = self.id_to_str(self.parse_ident()); - match self.token { + match *self.token { token::EQ => { self.bump(); let lit = self.parse_lit(); @@ -153,7 +153,7 @@ impl parser_attr for Parser { } fn parse_optional_meta() -> ~[@ast::meta_item] { - match self.token { + match *self.token { token::LPAREN => return self.parse_meta_seq(), _ => return ~[] } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 22004be87adc6..57d62d628dc6f 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -53,25 +53,25 @@ pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str { pub impl Parser { fn unexpected_last(t: token::Token) -> ! { self.span_fatal( - copy self.last_span, + *self.last_span, ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); } fn unexpected() -> ! { self.fatal(~"unexpected token: `" - + token_to_str(self.reader, self.token) + ~"`"); + + token_to_str(self.reader, *self.token) + ~"`"); } // expect and consume the token t. Signal an error if // the next token is not t. fn expect(t: token::Token) { - if self.token == t { + if *self.token == t { self.bump(); } else { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, t); s += ~"` but found `"; - s += token_to_str(self.reader, self.token); + s += token_to_str(self.reader, *self.token); self.fatal(s + ~"`"); } } @@ -79,12 +79,12 @@ pub impl Parser { fn parse_ident() -> ast::ident { self.check_strict_keywords(); self.check_reserved_keywords(); - match copy self.token { + match *self.token { token::IDENT(i, _) => { self.bump(); return i; } token::INTERPOLATED(token::nt_ident(*)) => { self.bug( ~"ident interpolation not converted to real token"); } _ => { self.fatal(~"expected ident, found `" - + token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } } } @@ -104,7 +104,7 @@ pub impl Parser { // consume token 'tok' if it exists. Returns true if the given // token was present, false otherwise. fn eat(tok: token::Token) -> bool { - return if self.token == tok { self.bump(); true } else { false }; + return if *self.token == tok { self.bump(); true } else { false }; } // Storing keywords as interned idents instead of strings would be nifty. @@ -129,7 +129,7 @@ pub impl Parser { } fn is_keyword(word: ~str) -> bool { - self.token_is_keyword(word, self.token) + self.token_is_keyword(word, *self.token) } fn is_any_keyword(tok: token::Token) -> bool { @@ -143,7 +143,7 @@ pub impl Parser { fn eat_keyword(word: ~str) -> bool { self.require_keyword(word); - let is_kw = match self.token { + let is_kw = match *self.token { token::IDENT(sid, false) => (word == *self.id_to_str(sid)), _ => false }; @@ -155,7 +155,7 @@ pub impl Parser { self.require_keyword(word); if !self.eat_keyword(word) { self.fatal(~"expected `" + word + ~"`, found `" + - token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } } @@ -165,9 +165,9 @@ pub impl Parser { } fn check_strict_keywords() { - match self.token { + match *self.token { token::IDENT(_, false) => { - let w = token_to_str(self.reader, self.token); + let w = token_to_str(self.reader, *self.token); self.check_strict_keywords_(w); } _ => () @@ -185,9 +185,9 @@ pub impl Parser { } fn check_reserved_keywords() { - match self.token { + match *self.token { token::IDENT(_, false) => { - let w = token_to_str(self.reader, self.token); + let w = token_to_str(self.reader, *self.token); self.check_reserved_keywords_(w); } _ => () @@ -203,9 +203,9 @@ pub impl Parser { // expect and consume a GT. if a >> is seen, replace it // with a single > and continue. fn expect_gt() { - if self.token == token::GT { + if *self.token == token::GT { self.bump(); - } else if self.token == token::BINOP(token::SHR) { + } else if *self.token == token::BINOP(token::SHR) { self.replace_token(token::GT, self.span.lo + BytePos(1u), self.span.hi); @@ -213,7 +213,7 @@ pub impl Parser { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, token::GT); s += ~"`, found `"; - s += token_to_str(self.reader, self.token); + s += token_to_str(self.reader, *self.token); s += ~"`"; self.fatal(s); } @@ -225,8 +225,8 @@ pub impl Parser { f: fn(Parser) -> T) -> ~[T] { let mut first = true; let mut v = ~[]; - while self.token != token::GT - && self.token != token::BINOP(token::SHR) { + while *self.token != token::GT + && *self.token != token::BINOP(token::SHR) { match sep { Some(ref t) => { if first { first = false; } @@ -276,7 +276,7 @@ pub impl Parser { f: fn(Parser) -> T) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; - while self.token != ket { + while *self.token != ket { match sep.sep { Some(ref t) => { if first { first = false; } @@ -284,7 +284,7 @@ pub impl Parser { } _ => () } - if sep.trailing_sep_allowed && self.token == ket { break; } + if sep.trailing_sep_allowed && *self.token == ket { break; } v.push(f(self)); } return v; @@ -293,8 +293,8 @@ pub impl Parser { // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_unspanned_seq(bra: token::Token, - ket: token::Token, + fn parse_unspanned_seq(+bra: token::Token, + +ket: token::Token, sep: SeqSep, f: fn(Parser) -> T) -> ~[T] { self.expect(bra); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 0ecd7917ac6dc..7f0e91e7cd848 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -134,7 +134,7 @@ pub fn parse_tts_from_source_str(name: ~str, sess: @mut ParseSess) -> ~[ast::token_tree] { let p = new_parser_from_source_str(sess, cfg, name, codemap::FssNone, source); - p.quote_depth += 1u; + *p.quote_depth += 1u; let r = p.parse_all_token_trees(); p.abort_if_errors(); return r; diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index f5ee5bd802907..1ae8786e09bb2 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -158,7 +158,7 @@ pub impl Parser { } fn is_obsolete_ident(ident: &str) -> bool { - self.token_is_obsolete_ident(ident, copy self.token) + self.token_is_obsolete_ident(ident, *self.token) } fn eat_obsolete_ident(ident: &str) -> bool { @@ -172,7 +172,7 @@ pub impl Parser { fn try_parse_obsolete_struct_ctor() -> bool { if self.eat_obsolete_ident("new") { - self.obsolete(copy self.last_span, ObsoleteStructCtor); + self.obsolete(*self.last_span, ObsoleteStructCtor); self.parse_fn_decl(|p| p.parse_arg()); self.parse_block(); true @@ -182,13 +182,13 @@ pub impl Parser { } fn try_parse_obsolete_with() -> bool { - if self.token == token::COMMA + if *self.token == token::COMMA && self.token_is_obsolete_ident("with", self.look_ahead(1u)) { self.bump(); } if self.eat_obsolete_ident("with") { - self.obsolete(copy self.last_span, ObsoleteWith); + self.obsolete(*self.last_span, ObsoleteWith); self.parse_expr(); true } else { @@ -198,10 +198,10 @@ pub impl Parser { fn try_parse_obsolete_priv_section() -> bool { if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE { - self.obsolete(copy self.span, ObsoletePrivSection); + self.obsolete(*self.span, ObsoletePrivSection); self.eat_keyword(~"priv"); self.bump(); - while self.token != token::RBRACE { + while *self.token != token::RBRACE { self.parse_single_class_item(ast::private); } self.bump(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d0c857d363b5e..d2702a24170dc 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -126,7 +126,7 @@ enum view_item_parse_mode { The important thing is to make sure that lookahead doesn't balk at INTERPOLATED tokens */ macro_rules! maybe_whole_expr ( - ($p:expr) => ( match copy $p.token { + ($p:expr) => ( match *$p.token { INTERPOLATED(token::nt_expr(e)) => { $p.bump(); return e; @@ -141,28 +141,28 @@ macro_rules! maybe_whole_expr ( ) macro_rules! maybe_whole ( - ($p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (*x); } + ($p:expr, $constructor:ident) => ( match *$p.token { + INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } _ => () }) ; - (deref $p:expr, $constructor:ident) => ( match copy $p.token { + (deref $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; } _ => () }) ; - (Some $p:expr, $constructor:ident) => ( match copy $p.token { + (Some $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return Some(x); } _ => () }) ; - (iovi $p:expr, $constructor:ident) => ( match copy $p.token { + (iovi $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return iovi_item(x); } _ => () }) ; - (pair_empty $p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(ref x)) => { - $p.bump(); return (~[], (*x)); + (pair_empty $p:expr, $constructor:ident) => ( match *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); return (~[], x); } _ => () }) @@ -201,35 +201,35 @@ pub fn Parser(sess: @mut ParseSess, interner: interner, sess: sess, cfg: cfg, - token: tok0.tok, - span: span0, - last_span: span0, - mut buffer: [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], - buffer_start: 0, - buffer_end: 0, - tokens_consumed: 0u, - restriction: UNRESTRICTED, - quote_depth: 0u, + token: @mut tok0.tok, + span: @mut span0, + last_span: @mut span0, + buffer: @mut [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], + buffer_start: @mut 0, + buffer_end: @mut 0, + tokens_consumed: @mut 0u, + restriction: @mut UNRESTRICTED, + quote_depth: @mut 0u, keywords: token::keyword_table(), strict_keywords: token::strict_keyword_table(), reserved_keywords: token::reserved_keyword_table(), obsolete_set: HashMap(), - mod_path_stack: ~[], + mod_path_stack: @mut ~[], } } pub struct Parser { sess: @mut ParseSess, cfg: crate_cfg, - mut token: token::Token, - mut span: span, - mut last_span: span, - mut buffer: [TokenAndSpan * 4], - mut buffer_start: int, - mut buffer_end: int, - mut tokens_consumed: uint, - mut restriction: restriction, - mut quote_depth: uint, // not (yet) related to the quasiquoter + token: @mut token::Token, + span: @mut span, + last_span: @mut span, + buffer: @mut [TokenAndSpan * 4], + buffer_start: @mut int, + buffer_end: @mut int, + tokens_consumed: @mut uint, + restriction: @mut restriction, + quote_depth: @mut uint, // not (yet) related to the quasiquoter reader: reader, interner: @token::ident_interner, keywords: HashMap<~str, ()>, @@ -239,7 +239,7 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashMap, /// Used to determine the path to externally loaded source files - mut mod_path_stack: ~[~str], + mod_path_stack: @mut ~[~str], drop {} /* do not copy the parser; its state is tied to outside state */ } @@ -247,39 +247,39 @@ pub struct Parser { pub impl Parser { // advance the parser by one token fn bump() { - self.last_span = self.span; - let next = if self.buffer_start == self.buffer_end { + *self.last_span = *self.span; + let next = if *self.buffer_start == *self.buffer_end { self.reader.next_token() } else { - let next = self.buffer[self.buffer_start]; - self.buffer_start = (self.buffer_start + 1) & 3; + let next = self.buffer[*self.buffer_start]; + *self.buffer_start = (*self.buffer_start + 1) & 3; next }; - self.token = next.tok; - self.span = next.sp; - self.tokens_consumed += 1u; + *self.token = next.tok; + *self.span = next.sp; + *self.tokens_consumed += 1u; } // EFFECT: replace the current token and span with the given one fn replace_token(next: token::Token, +lo: BytePos, +hi: BytePos) { - self.token = next; - self.span = mk_sp(lo, hi); + *self.token = next; + *self.span = mk_sp(lo, hi); } fn buffer_length() -> int { - if self.buffer_start <= self.buffer_end { - return self.buffer_end - self.buffer_start; + if *self.buffer_start <= *self.buffer_end { + return *self.buffer_end - *self.buffer_start; } - return (4 - self.buffer_start) + self.buffer_end; + return (4 - *self.buffer_start) + *self.buffer_end; } fn look_ahead(distance: uint) -> token::Token { let dist = distance as int; while self.buffer_length() < dist { - self.buffer[self.buffer_end] = self.reader.next_token(); - self.buffer_end = (self.buffer_end + 1) & 3; + self.buffer[*self.buffer_end] = self.reader.next_token(); + *self.buffer_end = (*self.buffer_end + 1) & 3; } - return copy self.buffer[(self.buffer_start + dist - 1) & 3].tok; + return copy self.buffer[(*self.buffer_start + dist - 1) & 3].tok; } fn fatal(m: ~str) -> ! { - self.sess.span_diagnostic.span_fatal(copy self.span, m) + self.sess.span_diagnostic.span_fatal(*copy self.span, m) } fn span_fatal(sp: span, m: ~str) -> ! { self.sess.span_diagnostic.span_fatal(sp, m) @@ -288,10 +288,10 @@ pub impl Parser { self.sess.span_diagnostic.span_note(sp, m) } fn bug(m: ~str) -> ! { - self.sess.span_diagnostic.span_bug(copy self.span, m) + self.sess.span_diagnostic.span_bug(*copy self.span, m) } fn warn(m: ~str) { - self.sess.span_diagnostic.span_warn(copy self.span, m) + self.sess.span_diagnostic.span_warn(*copy self.span, m) } fn span_err(sp: span, m: ~str) { self.sess.span_diagnostic.span_err(sp, m) @@ -450,8 +450,8 @@ pub impl Parser { let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ `%s`", - token_to_str(p.reader, p.token)); - match p.token { + token_to_str(p.reader, *p.token)); + match *p.token { token::SEMI => { p.bump(); debug!("parse_trait_methods(): parsing required method"); @@ -489,7 +489,7 @@ pub impl Parser { } _ => { p.fatal(~"expected `;` or `}` but found `" + - token_to_str(p.reader, p.token) + ~"`"); + token_to_str(p.reader, *p.token) + ~"`"); } } } @@ -561,7 +561,7 @@ pub impl Parser { fn parse_region() -> @region { self.expect(token::BINOP(token::AND)); - match copy self.token { + match *self.token { token::IDENT(sid, _) => { self.bump(); self.region_from_name(Some(sid)) @@ -577,9 +577,9 @@ pub impl Parser { let lo = self.span.lo; - let t = if self.token == token::LPAREN { + let t = if *self.token == token::LPAREN { self.bump(); - if self.token == token::RPAREN { + if *self.token == token::RPAREN { self.bump(); ty_nil } else { @@ -588,9 +588,9 @@ pub impl Parser { // of type t let mut ts = ~[self.parse_ty(false)]; let mut one_tuple = false; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); - if self.token != token::RPAREN { + if *self.token != token::RPAREN { ts.push(self.parse_ty(false)); } else { @@ -602,16 +602,16 @@ pub impl Parser { self.expect(token::RPAREN); t } - } else if self.token == token::AT { + } else if *self.token == token::AT { self.bump(); self.parse_box_or_uniq_pointee(ManagedSigil, ty_box) - } else if self.token == token::TILDE { + } else if *self.token == token::TILDE { self.bump(); self.parse_box_or_uniq_pointee(OwnedSigil, ty_uniq) - } else if self.token == token::BINOP(token::STAR) { + } else if *self.token == token::BINOP(token::STAR) { self.bump(); ty_ptr(self.parse_mt()) - } else if self.token == token::LBRACE { + } else if *self.token == token::LBRACE { let elems = self.parse_unspanned_seq( token::LBRACE, token::RBRACE, seq_sep_trailing_allowed(token::COMMA), @@ -620,7 +620,7 @@ pub impl Parser { self.unexpected_last(token::RBRACE); } ty_rec(elems) - } else if self.token == token::LBRACKET { + } else if *self.token == token::LBRACKET { self.expect(token::LBRACKET); let mt = self.parse_mt(); @@ -631,15 +631,15 @@ pub impl Parser { }; self.expect(token::RBRACKET); t - } else if self.token == token::BINOP(token::AND) { + } else if *self.token == token::BINOP(token::AND) { self.bump(); self.parse_borrowed_pointee() } else if self.eat_keyword(~"extern") { self.parse_ty_bare_fn() - } else if self.token_is_closure_keyword(self.token) { + } else if self.token_is_closure_keyword(*self.token) { self.parse_ty_closure(None, None) - } else if self.token == token::MOD_SEP - || is_ident_or_path(self.token) { + } else if *self.token == token::MOD_SEP + || is_ident_or_path(*self.token) { let path = self.parse_path_with_tps(colons_before_params); ty_path(path, self.get_id()) } else { self.fatal(~"expected type"); }; @@ -653,7 +653,7 @@ pub impl Parser { ctor: &fn(+v: mt) -> ty_) -> ty_ { // @'foo fn() or @foo/fn() or @fn() are parsed directly as fn types: - match copy self.token { + match *self.token { token::LIFETIME(rname) => { self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); @@ -661,11 +661,10 @@ pub impl Parser { token::IDENT(rname, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) && - self.token_is_closure_keyword(self.look_ahead(2u)) - { + self.token_is_closure_keyword(self.look_ahead(2u)) { self.bump(); self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); - } else if self.token_is_closure_keyword(self.token) { + } else if self.token_is_closure_keyword(*self.token) { return self.parse_ty_closure(Some(sigil), None); } } @@ -682,7 +681,7 @@ pub impl Parser { fn parse_borrowed_pointee() -> ty_ { // look for `&'lt` or `&foo/` and interpret `foo` as the region name: - let rname = match self.token { + let rname = match *self.token { token::LIFETIME(sid) => { self.bump(); Some(sid) @@ -700,7 +699,7 @@ pub impl Parser { _ => { None } }; - if self.token_is_closure_keyword(self.token) { + if self.token_is_closure_keyword(*self.token) { return self.parse_ty_closure(Some(BorrowedSigil), rname); } @@ -726,13 +725,13 @@ pub impl Parser { } fn is_named_argument() -> bool { - let offset = if self.token == token::BINOP(token::AND) { + let offset = if *self.token == token::BINOP(token::AND) { 1 - } else if self.token == token::BINOP(token::MINUS) { + } else if *self.token == token::BINOP(token::MINUS) { 1 - } else if self.token == token::ANDAND { + } else if *self.token == token::ANDAND { 1 - } else if self.token == token::BINOP(token::PLUS) { + } else if *self.token == token::BINOP(token::PLUS) { if self.look_ahead(1) == token::BINOP(token::PLUS) { 2 } else { @@ -740,7 +739,7 @@ pub impl Parser { } } else { 0 }; if offset == 0 { - is_plain_ident(self.token) + is_plain_ident(*self.token) && self.look_ahead(1) == token::COLON } else { is_plain_ident(self.look_ahead(offset)) @@ -774,7 +773,7 @@ pub impl Parser { } else { m = infer(self.get_id()); ast_util::ident_to_pat(self.get_id(), - copy self.last_span, + *self.last_span, special_idents::invalid) }; @@ -818,7 +817,7 @@ pub impl Parser { fn maybe_parse_fixed_vstore_with_star() -> Option { if self.eat(token::BINOP(token::STAR)) { - match copy self.token { + match *self.token { token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { self.bump(); Some(i as uint) @@ -827,7 +826,7 @@ pub impl Parser { self.fatal( fmt!("expected integral vector length \ but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } } } else { @@ -856,7 +855,8 @@ pub impl Parser { } else if self.eat_keyword(~"false") { lit_bool(false) } else { - let tok = self.token; + // XXX: This is a really bad copy! + let tok = *self.token; self.bump(); self.lit_from_token(tok) }; @@ -919,9 +919,8 @@ pub impl Parser { // vstores is... um... the same. I guess that's my fault. This // is still not ideal as for &str we end up parsing more than we // ought to and have to sort it out later. - if self.token == token::BINOP(token::SLASH) + if *self.token == token::BINOP(token::SLASH) && self.look_ahead(1u) == token::BINOP(token::AND) { - self.expect(token::BINOP(token::SLASH)); Some(self.parse_region()) } else { @@ -957,7 +956,7 @@ pub impl Parser { * Parses 0 or 1 lifetime. */ - match self.token { + match *self.token { token::LIFETIME(_) => { Some(self.parse_lifetime()) } @@ -973,12 +972,12 @@ pub impl Parser { * Parses a single lifetime. */ - match self.token { + match *self.token { token::LIFETIME(i) => { self.bump(); return ast::Lifetime { id: self.get_id(), - span: self.span, + span: *self.span, ident: i }; } @@ -999,7 +998,7 @@ pub impl Parser { let mut res = ~[]; loop { - match self.token { + match *self.token { token::LIFETIME(_) => { res.push(self.parse_lifetime()); } @@ -1008,7 +1007,7 @@ pub impl Parser { } } - match self.token { + match *self.token { token::COMMA => { self.bump();} token::GT => { return res; } _ => { @@ -1057,14 +1056,16 @@ pub impl Parser { fn mk_lit_u32(i: u32) -> @expr { let span = self.span; - let lv_lit = @codemap::spanned { node: lit_uint(i as u64, ty_u32), - span: span }; + let lv_lit = @codemap::spanned { + node: lit_uint(i as u64, ty_u32), + span: *span + }; @expr { id: self.get_id(), callee_id: self.get_id(), node: expr_lit(lv_lit), - span: span, + span: *span, } } @@ -1075,21 +1076,21 @@ pub impl Parser { let mut ex: expr_; - if self.token == token::LPAREN { + if *self.token == token::LPAREN { self.bump(); // (e) is parenthesized e // (e,) is a tuple with only one field, e let mut one_tuple = false; - if self.token == token::RPAREN { + if *self.token == token::RPAREN { hi = self.span.hi; self.bump(); let lit = @spanned(lo, hi, lit_nil); return self.mk_expr(lo, hi, expr_lit(lit)); } let mut es = ~[self.parse_expr()]; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); - if self.token != token::RPAREN { + if *self.token != token::RPAREN { es.push(self.parse_expr()); } else { @@ -1105,7 +1106,7 @@ pub impl Parser { else { self.mk_expr(lo, hi, expr_tup(es)) } - } else if self.token == token::LBRACE { + } else if *self.token == token::LBRACE { if self.looking_at_record_literal() { ex = self.parse_record_literal(); hi = self.span.hi; @@ -1115,7 +1116,7 @@ pub impl Parser { return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); } - } else if token::is_bar(self.token) { + } else if token::is_bar(*self.token) { return self.parse_lambda_expr(); } else if self.eat_keyword(~"if") { return self.parse_if_expr(); @@ -1142,17 +1143,17 @@ pub impl Parser { return self.parse_fn_expr(sigil); } else if self.eat_keyword(~"unsafe") { return self.parse_block_expr(lo, unsafe_blk); - } else if self.token == token::LBRACKET { + } else if *self.token == token::LBRACKET { self.bump(); let mutbl = self.parse_mutability(); - if self.token == token::RBRACKET { + if *self.token == token::RBRACKET { // Empty vector. self.bump(); ex = expr_vec(~[], mutbl); } else { // Nonempty vector. let first_expr = self.parse_expr(); - if self.token == token::COMMA && + if *self.token == token::COMMA && self.look_ahead(1) == token::DOTDOT { // Repeating vector syntax: [ 0, ..512 ] self.bump(); @@ -1160,7 +1161,7 @@ pub impl Parser { let count = self.parse_expr(); self.expect(token::RBRACKET); ex = expr_repeat(first_expr, count, mutbl); - } else if self.token == token::COMMA { + } else if *self.token == token::COMMA { // Vector with two or more elements. self.bump(); let remaining_exprs = @@ -1188,13 +1189,13 @@ pub impl Parser { ex = expr_assert(e); hi = e.span.hi; } else if self.eat_keyword(~"return") { - if can_begin_expr(self.token) { + if can_begin_expr(*self.token) { let e = self.parse_expr(); hi = e.span.hi; ex = expr_ret(Some(e)); } else { ex = expr_ret(None); } } else if self.eat_keyword(~"break") { - if is_ident(self.token) { + if is_ident(*self.token) { ex = expr_break(Some(self.parse_ident())); } else { ex = expr_break(None); @@ -1204,28 +1205,28 @@ pub impl Parser { let e = self.parse_expr(); ex = expr_copy(e); hi = e.span.hi; - } else if self.token == token::MOD_SEP || - is_ident(self.token) && !self.is_keyword(~"true") && - !self.is_keyword(~"false") { + } else if *self.token == token::MOD_SEP || + is_ident(*self.token) && !self.is_keyword(~"true") && + !self.is_keyword(~"false") { let pth = self.parse_path_with_tps(true); /* `!`, as an operator, is prefix, so we know this isn't that */ - if self.token == token::NOT { + if *self.token == token::NOT { self.bump(); - let tts = match self.token { - token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(copy self.token); - self.parse_unspanned_seq(copy self.token, ket, - seq_sep_none(), - |p| p.parse_token_tree()) - } + match *self.token { + token::LPAREN | token::LBRACE => {} _ => self.fatal(~"expected open delimiter") }; + + let ket = token::flip_delimiter(*self.token); + let tts = self.parse_unspanned_seq(*self.token, + ket, + seq_sep_none(), + |p| p.parse_token_tree()); let hi = self.span.hi; - return self.mk_mac_expr( - lo, hi, mac_invoc_tt(pth, tts)); - } else if self.token == token::LBRACE { + return self.mk_mac_expr(lo, hi, mac_invoc_tt(pth, tts)); + } else if *self.token == token::LBRACE { // This might be a struct literal. if self.looking_at_record_literal() { // It's a struct literal. @@ -1233,8 +1234,7 @@ pub impl Parser { let mut fields = ~[]; let mut base = None; fields.push(self.parse_field(token::COLON)); - while self.token != token::RBRACE { - + while *self.token != token::RBRACE { if self.try_parse_obsolete_with() { break; } @@ -1246,7 +1246,7 @@ pub impl Parser { break; } - if self.token == token::RBRACE { + if *self.token == token::RBRACE { // Accept an optional trailing comma. break; } @@ -1283,7 +1283,7 @@ pub impl Parser { } fn permits_call() -> bool { - return self.restriction != RESTRICT_NO_CALL_EXPRS; + return *self.restriction != RESTRICT_NO_CALL_EXPRS; } fn parse_dot_or_call_expr_with(e0: @expr) -> @expr { @@ -1293,7 +1293,7 @@ pub impl Parser { loop { // expr.f if self.eat(token::DOT) { - match copy self.token { + match *self.token { token::IDENT(i, _) => { hi = self.span.hi; self.bump(); @@ -1306,7 +1306,7 @@ pub impl Parser { }; // expr.f() method call - match copy self.token { + match *self.token { token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, @@ -1327,7 +1327,7 @@ pub impl Parser { loop; } if self.expr_is_complete(e) { break; } - match copy self.token { + match *self.token { // expr(...) token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( @@ -1358,17 +1358,17 @@ pub impl Parser { // parse an optional separator followed by a kleene-style // repetition token (+ or *). fn parse_sep_and_zerok() -> (Option, bool) { - if self.token == token::BINOP(token::STAR) - || self.token == token::BINOP(token::PLUS) { - let zerok = self.token == token::BINOP(token::STAR); + if *self.token == token::BINOP(token::STAR) + || *self.token == token::BINOP(token::PLUS) { + let zerok = *self.token == token::BINOP(token::STAR); self.bump(); return (None, zerok); } else { - let sep = self.token; + let sep = *self.token; self.bump(); - if self.token == token::BINOP(token::STAR) - || self.token == token::BINOP(token::PLUS) { - let zerok = self.token == token::BINOP(token::STAR); + if *self.token == token::BINOP(token::STAR) + || *self.token == token::BINOP(token::PLUS) { + let zerok = *self.token == token::BINOP(token::STAR); self.bump(); return (Some(sep), zerok); } else { @@ -1383,18 +1383,18 @@ pub impl Parser { fn parse_non_delim_tt_tok(p: Parser) -> token_tree { maybe_whole!(deref p, nt_tt); - match p.token { + match *p.token { token::RPAREN | token::RBRACE | token::RBRACKET => { p.fatal(~"incorrect close delimiter: `" - + token_to_str(p.reader, p.token) + ~"`"); + + token_to_str(p.reader, *p.token) + ~"`"); } /* we ought to allow different depths of unquotation */ - token::DOLLAR if p.quote_depth > 0u => { + token::DOLLAR if *p.quote_depth > 0u => { p.bump(); - let sp = p.span; + let sp = *p.span; - if p.token == token::LPAREN { + if *p.token == token::LPAREN { let seq = p.parse_seq(token::LPAREN, token::RPAREN, seq_sep_none(), |p| p.parse_token_tree()); @@ -1412,18 +1412,18 @@ pub impl Parser { // turn the next token into a tt_tok: fn parse_any_tt_tok(p: Parser) -> token_tree{ - let res = tt_tok(p.span, p.token); + let res = tt_tok(*p.span, *p.token); p.bump(); res } - match self.token { + match *self.token { token::EOF => { self.fatal(~"file ended in the middle of a macro invocation"); } token::LPAREN | token::LBRACE | token::LBRACKET => { // tjc: ?????? - let ket = token::flip_delimiter(copy self.token); + let ket = token::flip_delimiter(*self.token); tt_delim(vec::append( // the open delimiter: ~[parse_any_tt_tok(self)], @@ -1440,7 +1440,7 @@ pub impl Parser { fn parse_all_token_trees() -> ~[token_tree] { let mut tts = ~[]; - while self.token != token::EOF { + while *self.token != token::EOF { tts.push(self.parse_token_tree()); } tts @@ -1451,11 +1451,11 @@ pub impl Parser { // the interpolation of matchers maybe_whole!(self, nt_matchers); let name_idx = @mut 0u; - return match self.token { + return match *self.token { token::LBRACE | token::LPAREN | token::LBRACKET => { - self.parse_matcher_subseq(name_idx, copy self.token, + self.parse_matcher_subseq(name_idx, *self.token, // tjc: not sure why we need a copy - token::flip_delimiter(copy self.token)) + token::flip_delimiter(*self.token)) } _ => self.fatal(~"expected open delimiter") } @@ -1472,9 +1472,9 @@ pub impl Parser { self.expect(bra); - while self.token != ket || lparens > 0u { - if self.token == token::LPAREN { lparens += 1u; } - if self.token == token::RPAREN { lparens -= 1u; } + while *self.token != ket || lparens > 0u { + if *self.token == token::LPAREN { lparens += 1u; } + if *self.token == token::RPAREN { lparens -= 1u; } ret_val.push(self.parse_matcher(name_idx)); } @@ -1486,11 +1486,12 @@ pub impl Parser { fn parse_matcher(name_idx: @mut uint) -> matcher { let lo = self.span.lo; - let m = if self.token == token::DOLLAR { + let m = if *self.token == token::DOLLAR { self.bump(); - if self.token == token::LPAREN { + if *self.token == token::LPAREN { let name_idx_lo = *name_idx; - let ms = self.parse_matcher_subseq(name_idx, token::LPAREN, + let ms = self.parse_matcher_subseq(name_idx, + token::LPAREN, token::RPAREN); if ms.len() == 0u { self.fatal(~"repetition body must be nonempty"); @@ -1506,7 +1507,7 @@ pub impl Parser { m } } else { - let m = match_tok(self.token); + let m = match_tok(*self.token); self.bump(); m }; @@ -1520,7 +1521,7 @@ pub impl Parser { let mut hi; let mut ex; - match copy self.token { + match *self.token { token::NOT => { self.bump(); let e = self.parse_prefix_expr(); @@ -1609,13 +1610,13 @@ pub impl Parser { fn parse_more_binops(lhs: @expr, min_prec: uint) -> @expr { if self.expr_is_complete(lhs) { return lhs; } - let peeked = self.token; + let peeked = *self.token; if peeked == token::BINOP(token::OR) && - (self.restriction == RESTRICT_NO_BAR_OP || - self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { + (*self.restriction == RESTRICT_NO_BAR_OP || + *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { lhs } else if peeked == token::OROR && - self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP { + *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP { lhs } else { let cur_opt = token_to_binop(peeked); @@ -1655,7 +1656,7 @@ pub impl Parser { fn parse_assign_expr() -> @expr { let lo = self.span.lo; let lhs = self.parse_binops(); - match copy self.token { + match *self.token { token::EQ => { self.bump(); let rhs = self.parse_expr(); @@ -1682,7 +1683,7 @@ pub impl Parser { expr_assign_op(aop, lhs, rhs)) } token::LARROW => { - self.obsolete(copy self.span, ObsoleteBinaryMove); + self.obsolete(*self.span, ObsoleteBinaryMove); // Bogus value (but it's an error) self.bump(); // <- self.bump(); // rhs @@ -1732,7 +1733,7 @@ pub impl Parser { fn parse_lambda_block_expr() -> @expr { self.parse_lambda_expr_( || { - match self.token { + match *self.token { token::BINOP(token::OR) | token::OROR => { self.parse_fn_block_decl() } @@ -1743,7 +1744,7 @@ pub impl Parser { output: @Ty { id: self.get_id(), node: ty_infer, - span: self.span + span: *self.span }, cf: return_val } @@ -1840,8 +1841,8 @@ pub impl Parser { // but they aren't represented by tests debug!("sugary call on %?", e.node); self.span_fatal( - lo, fmt!("`%s` must be followed by a block call", - keyword)); + *lo, + fmt!("`%s` must be followed by a block call", keyword)); } } } @@ -1857,13 +1858,13 @@ pub impl Parser { fn parse_loop_expr() -> @expr { // loop headers look like 'loop {' or 'loop unsafe {' let is_loop_header = - self.token == token::LBRACE - || (is_ident(copy self.token) + *self.token == token::LBRACE + || (is_ident(*self.token) && self.look_ahead(1) == token::LBRACE); // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = - is_ident(self.token) - && !self.is_any_keyword(copy self.token) + is_ident(*self.token) + && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::COLON; if is_loop_header || is_labeled_loop_header { @@ -1883,7 +1884,7 @@ pub impl Parser { } else { // This is a 'continue' expression let lo = self.span.lo; - let ex = if is_ident(self.token) { + let ex = if is_ident(*self.token) { expr_again(Some(self.parse_ident())) } else { expr_again(None) @@ -1896,7 +1897,7 @@ pub impl Parser { // For distingishing between record literals and blocks fn looking_at_record_literal() -> bool { let lookahead = self.look_ahead(1); - self.token == token::LBRACE && + *self.token == token::LBRACE && (self.token_is_keyword(~"mut", lookahead) || (is_plain_ident(lookahead) && self.look_ahead(2) == token::COLON)) @@ -1906,8 +1907,8 @@ pub impl Parser { self.expect(token::LBRACE); let mut fields = ~[self.parse_field(token::COLON)]; let mut base = None; - while self.token != token::RBRACE { - if self.token == token::COMMA + while *self.token != token::RBRACE { + if *self.token == token::COMMA && self.look_ahead(1) == token::DOTDOT { self.bump(); self.bump(); @@ -1919,7 +1920,7 @@ pub impl Parser { } self.expect(token::COMMA); - if self.token == token::RBRACE { + if *self.token == token::RBRACE { // record ends by an optional trailing comma break; } @@ -1935,7 +1936,7 @@ pub impl Parser { let discriminant = self.parse_expr(); self.expect(token::LBRACE); let mut arms: ~[arm] = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; if self.eat_keyword(~"if") { guard = Some(self.parse_expr()); } @@ -1944,7 +1945,7 @@ pub impl Parser { let require_comma = !classify::expr_is_simple_block(expr) - && self.token != token::RBRACE; + && *self.token != token::RBRACE; if require_comma { self.expect(token::COMMA); @@ -1977,21 +1978,21 @@ pub impl Parser { // parse an expression, subject to the given restriction fn parse_expr_res(r: restriction) -> @expr { - let old = self.restriction; - self.restriction = r; + let old = *self.restriction; + *self.restriction = r; let e = self.parse_assign_expr(); - self.restriction = old; + *self.restriction = old; return e; } fn parse_initializer() -> Option<@expr> { - match self.token { + match *self.token { token::EQ => { self.bump(); return Some(self.parse_expr()); } token::LARROW => { - self.obsolete(copy self.span, ObsoleteMoveInit); + self.obsolete(*self.span, ObsoleteMoveInit); self.bump(); self.bump(); return None; @@ -2006,7 +2007,7 @@ pub impl Parser { let mut pats = ~[]; loop { pats.push(self.parse_pat(true)); - if self.token == token::BINOP(token::OR) { self.bump(); } + if *self.token == token::BINOP(token::OR) { self.bump(); } else { return pats; } }; } @@ -2016,12 +2017,12 @@ pub impl Parser { let mut tail = None; let mut first = true; - while self.token != token::RBRACKET { + while *self.token != token::RBRACKET { if first { first = false; } else { self.expect(token::COMMA); } let mut is_tail = false; - if self.token == token::DOTDOT { + if *self.token == token::DOTDOT { self.bump(); is_tail = true; } @@ -2048,15 +2049,15 @@ pub impl Parser { let mut fields = ~[]; let mut etc = false; let mut first = true; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { if first { first = false; } else { self.expect(token::COMMA); } - if self.token == token::UNDERSCORE { + if *self.token == token::UNDERSCORE { self.bump(); - if self.token != token::RBRACE { + if *self.token != token::RBRACE { self.fatal(~"expected `}`, found `" + - token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } etc = true; @@ -2073,14 +2074,14 @@ pub impl Parser { let fieldpath = ast_util::ident_to_path(mk_sp(lo1, hi1), fieldname); let mut subpat; - if self.token == token::COLON { + if *self.token == token::COLON { self.bump(); subpat = self.parse_pat(refutable); } else { subpat = @ast::pat { id: self.get_id(), node: pat_ident(bind_infer, fieldpath, None), - span: self.last_span + span: *self.last_span }; } fields.push(ast::field_pat { ident: fieldname, pat: subpat }); @@ -2094,7 +2095,7 @@ pub impl Parser { let lo = self.span.lo; let mut hi = self.span.hi; let mut pat; - match self.token { + match *self.token { token::UNDERSCORE => { self.bump(); pat = pat_wild; } token::AT => { self.bump(); @@ -2172,7 +2173,7 @@ pub impl Parser { } token::LPAREN => { self.bump(); - if self.token == token::RPAREN { + if *self.token == token::RPAREN { hi = self.span.hi; self.bump(); let lit = @codemap::spanned { @@ -2183,7 +2184,7 @@ pub impl Parser { } else { let mut fields = ~[self.parse_pat(refutable)]; if self.look_ahead(1) != token::RPAREN { - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); fields.push(self.parse_pat(refutable)); } @@ -2232,7 +2233,7 @@ pub impl Parser { cannot_be_enum_or_struct = true } - if is_plain_ident(self.token) && cannot_be_enum_or_struct { + if is_plain_ident(*self.token) && cannot_be_enum_or_struct { let name = self.parse_value_path(); let sub; if self.eat(token::AT) { @@ -2243,7 +2244,7 @@ pub impl Parser { pat = pat_ident(binding_mode, name, sub); } else { let enum_path = self.parse_path_with_tps(true); - match self.token { + match *self.token { token::LBRACE => { self.bump(); let (fields, etc) = @@ -2254,7 +2255,7 @@ pub impl Parser { _ => { let mut args: ~[@pat] = ~[]; let mut star_pat = false; - match self.token { + match *self.token { token::LPAREN => match self.look_ahead(1u) { token::BINOP(token::STAR) => { // This is a "top constructor only" pat @@ -2298,9 +2299,9 @@ pub impl Parser { fn parse_pat_ident(refutable: bool, binding_mode: ast::binding_mode) -> ast::pat_ { - if !is_plain_ident(self.token) { + if !is_plain_ident(*self.token) { self.span_fatal( - copy self.last_span, + *self.last_span, ~"expected identifier, found path"); } let name = self.parse_value_path(); @@ -2314,9 +2315,9 @@ pub impl Parser { // leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead // will direct us over to parse_enum_variant() - if self.token == token::LPAREN { + if *self.token == token::LPAREN { self.span_fatal( - copy self.last_span, + *self.last_span, ~"expected identifier, found enum pattern"); } @@ -2364,7 +2365,7 @@ pub impl Parser { if self.eat_keyword(~"mut") { is_mutbl = struct_mutable; } - if !is_plain_ident(self.token) { + if !is_plain_ident(*self.token) { self.fatal(~"expected ident"); } let name = self.parse_ident(); @@ -2393,8 +2394,8 @@ pub impl Parser { self.expect_keyword(~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); - } else if is_ident(self.token) - && !self.is_any_keyword(copy self.token) + } else if is_ident(*self.token) + && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::NOT { check_expected_item(self, first_item_attrs); @@ -2404,7 +2405,7 @@ pub impl Parser { let pth = self.parse_value_path(); self.bump(); - let id = if self.token == token::LPAREN { + let id = if *self.token == token::LPAREN { token::special_idents::invalid // no special identifier } else { self.parse_ident() @@ -2459,7 +2460,7 @@ pub impl Parser { } fn expr_is_complete(e: @expr) -> bool { - return self.restriction == RESTRICT_STMT_EXPR && + return *self.restriction == RESTRICT_STMT_EXPR && !classify::expr_requires_semi_to_be_stmt(e); } @@ -2485,7 +2486,7 @@ pub impl Parser { let lo = self.span.lo; if self.eat_keyword(~"unsafe") { - self.obsolete(copy self.span, ObsoleteUnsafeBlock); + self.obsolete(*self.span, ObsoleteUnsafeBlock); } self.expect(token::LBRACE); let (inner, next) = @@ -2529,12 +2530,12 @@ pub impl Parser { let mut initial_attrs = attrs_remaining; - if self.token == token::RBRACE && !vec::is_empty(initial_attrs) { + if *self.token == token::RBRACE && !vec::is_empty(initial_attrs) { self.fatal(~"expected item"); } - while self.token != token::RBRACE { - match self.token { + while *self.token != token::RBRACE { + match *self.token { token::SEMI => { self.bump(); // empty } @@ -2544,7 +2545,7 @@ pub impl Parser { match stmt.node { stmt_expr(e, stmt_id) => { // Expression without semicolon - match self.token { + match *self.token { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { @@ -2569,7 +2570,7 @@ pub impl Parser { stmt_mac(ref m, _) => { // Statement macro; might be an expr - match self.token { + match *self.token { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { @@ -2615,9 +2616,9 @@ pub impl Parser { @Ty { id: self.get_id(), node: ty_path( - ident_to_path(copy self.last_span, i), + ident_to_path(*self.last_span, i), self.get_id()), - span: self.last_span, + span: *self.last_span, } } @@ -2643,12 +2644,12 @@ pub impl Parser { if self.eat_keyword(~"static") { bounds.push(RegionTyParamBound); } else { - self.span_err(copy self.span, + self.span_err(*self.span, ~"`&static` is the only permissible \ region bound here"); } - } else if is_ident(self.token) { - let maybe_bound = match self.token { + } else if is_ident(*self.token) { + let maybe_bound = match *self.token { token::IDENT(copy sid, _) => { match *self.id_to_str(sid) { @@ -2656,7 +2657,7 @@ pub impl Parser { | ~"copy" | ~"const" | ~"owned" => { - self.obsolete(copy self.span, + self.obsolete(*self.span, ObsoleteLowerCaseKindBounds); // Bogus value, but doesn't matter, since // is an error @@ -2687,8 +2688,8 @@ pub impl Parser { loop; } - if is_ident_or_path(self.token) { - self.obsolete(copy self.span, + if is_ident_or_path(*self.token) { + self.obsolete(*self.span, ObsoleteTraitBoundSeparator); } } @@ -2730,7 +2731,7 @@ pub impl Parser { } fn is_self_ident() -> bool { - match self.token { + match *self.token { token::IDENT(id, false) if id == special_idents::self_ => true, _ => false @@ -2740,7 +2741,7 @@ pub impl Parser { fn expect_self_ident() { if !self.is_self_ident() { self.fatal(fmt!("expected `self` but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } self.bump(); } @@ -2771,7 +2772,7 @@ pub impl Parser { // A bit of complexity and lookahead is needed here in order to to be // backwards compatible. let lo = self.span.lo; - let self_ty = match copy self.token { + let self_ty = match *self.token { token::BINOP(token::AND) => { maybe_parse_self_ty(sty_region, self) } @@ -2793,7 +2794,7 @@ pub impl Parser { // If we parsed a self type, expect a comma before the argument list. let args_or_capture_items; if self_ty != sty_by_ref { - match copy self.token { + match *self.token { token::COMMA => { self.bump(); let sep = seq_sep_trailing_disallowed(token::COMMA); @@ -2807,7 +2808,8 @@ pub impl Parser { } _ => { self.fatal(~"expected `,` or `)`, found `" + - token_to_str(self.reader, self.token) + ~"`"); + token_to_str(self.reader, *self.token) + + ~"`"); } } } else { @@ -2848,7 +2850,7 @@ pub impl Parser { let output = if self.eat(token::RARROW) { self.parse_ty(false) } else { - @Ty { id: self.get_id(), node: ty_infer, span: self.span } + @Ty { id: self.get_id(), node: ty_infer, span: *self.span } }; ast::fn_decl { @@ -2927,7 +2929,7 @@ pub impl Parser { // Parse traits, if necessary. let traits; - if self.token == token::COLON { + if *self.token == token::COLON { self.bump(); traits = self.parse_trait_ref_list(token::LBRACE); } else { @@ -2952,7 +2954,7 @@ pub impl Parser { // First, parse type parameters if necessary. let mut tps; - if self.token == token::LT { + if *self.token == token::LT { tps = self.parse_ty_params(); } else { tps = ~[]; @@ -2976,7 +2978,7 @@ pub impl Parser { }) } _ => { - self.span_err(copy self.span, ~"not a trait"); + self.span_err(*self.span, ~"not a trait"); None } }; @@ -2984,7 +2986,7 @@ pub impl Parser { ty = self.parse_ty(false); opt_trait_ref } else if self.eat(token::COLON) { - self.obsolete(copy self.span, ObsoleteImplSyntax); + self.obsolete(*self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { None @@ -3006,7 +3008,7 @@ pub impl Parser { // the return type of the ctor function. fn ident_to_path_tys(i: ident, typarams: ~[ty_param]) -> @path { - let s = self.last_span; + let s = *self.last_span; @ast::path { span: s, @@ -3024,7 +3026,7 @@ pub impl Parser { } fn ident_to_path(i: ident) -> @path { - @ast::path { span: self.last_span, + @ast::path { span: *self.last_span, global: false, idents: ~[i], rp: None, @@ -3049,7 +3051,7 @@ pub impl Parser { self.parse_region_param(); let ty_params = self.parse_ty_params(); if self.eat(token::COLON) { - self.obsolete(copy self.span, ObsoleteClassTraits); + self.obsolete(*self.span, ObsoleteClassTraits); let _ = self.parse_trait_ref_list(token::LBRACE); } @@ -3061,7 +3063,7 @@ pub impl Parser { // It's a record-like struct. is_tuple_like = false; fields = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { match self.parse_class_item() { dtor_decl(ref blk, ref attrs, s) => { match the_dtor { @@ -3085,7 +3087,7 @@ pub impl Parser { } } self.bump(); - } else if self.token == token::LPAREN { + } else if *self.token == token::LPAREN { // It's a tuple-like struct. is_tuple_like = true; fields = do self.parse_unspanned_seq(token::LPAREN, token::RPAREN, @@ -3107,7 +3109,7 @@ pub impl Parser { } else { self.fatal(fmt!("expected `{`, `(`, or `;` after struct name \ but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } let actual_dtor = do the_dtor.map |dtor| { @@ -3137,13 +3139,13 @@ pub impl Parser { fn parse_single_class_item(vis: visibility) -> @struct_field { if self.eat_obsolete_ident("let") { - self.obsolete(copy self.last_span, ObsoleteLet); + self.obsolete(*self.last_span, ObsoleteLet); } let a_var = self.parse_instance_var(vis); - match self.token { + match *self.token { token::SEMI => { - self.obsolete(copy self.span, ObsoleteFieldTerminator); + self.obsolete(*self.span, ObsoleteFieldTerminator); self.bump(); } token::COMMA => { @@ -3151,11 +3153,11 @@ pub impl Parser { } token::RBRACE => {} _ => { - self.span_fatal(copy self.span, + self.span_fatal(*self.span, fmt!("expected `;`, `,`, or '}' but \ found `%s`", token_to_str(self.reader, - self.token))); + *self.token))); } } a_var @@ -3218,7 +3220,7 @@ pub impl Parser { let mut items: ~[@item] = starting_items; let mut first = true; - while self.token != term { + while *self.token != term { let mut attrs = self.parse_outer_attributes(); if first { attrs = vec::append(attrs_remaining, attrs); @@ -3235,7 +3237,7 @@ pub impl Parser { } _ => { self.fatal(~"expected item but found `" + - token_to_str(self.reader, self.token) + ~"`"); + token_to_str(self.reader, *self.token) + ~"`"); } } debug!("parse_mod_items: attrs=%?", attrs); @@ -3260,9 +3262,9 @@ pub impl Parser { } fn parse_item_mod(outer_attrs: ~[ast::attribute]) -> item_info { - let id_span = self.span; + let id_span = *self.span; let id = self.parse_ident(); - let info_ = if self.token == token::SEMI { + let info_ = if *self.token == token::SEMI { self.bump(); // This mod is in an external file. Let's go get it! let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span); @@ -3284,7 +3286,7 @@ pub impl Parser { match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") { Some(path) => { let prefix = Path( - self.sess.cm.span_to_filename(copy self.span)); + self.sess.cm.span_to_filename(*self.span)); let prefix = prefix.dir_path(); let path = Path(copy *path); let (new_mod_item, new_attrs) = self.eval_src_mod_from_path( @@ -3329,9 +3331,9 @@ pub impl Parser { outer_attrs: ~[ast::attribute], id_sp: span) -> (ast::item_, ~[ast::attribute]) { - let prefix = Path(self.sess.cm.span_to_filename(copy self.span)); + let prefix = Path(self.sess.cm.span_to_filename(*self.span)); let prefix = prefix.dir_path(); - let mod_path = Path(".").push_many(self.mod_path_stack); + let mod_path = Path(".").push_many(*self.mod_path_stack); let default_path = self.sess.interner.get(id) + ~".rs"; let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, ~"path") { @@ -3448,7 +3450,7 @@ pub impl Parser { let mut items: ~[@foreign_item] = foreign_items; let mut initial_attrs = attrs_remaining; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let attrs = vec::append(initial_attrs, self.parse_outer_attributes()); initial_attrs = ~[]; @@ -3470,7 +3472,7 @@ pub impl Parser { // Parse the ABI. let abi_opt; - match self.token { + match *self.token { token::LIT_STR(copy found_abi) => { self.bump(); abi_opt = Some(found_abi); @@ -3484,21 +3486,21 @@ pub impl Parser { if self.is_keyword(~"mod") { must_be_named_mod = true; self.expect_keyword(~"mod"); - } else if self.token != token::LBRACE { - self.span_fatal(copy self.span, + } else if *self.token != token::LBRACE { + self.span_fatal(*self.span, fmt!("expected `{` or `mod` but found %s", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } - let (sort, ident) = match self.token { + let (sort, ident) = match *self.token { token::IDENT(*) => (ast::named, self.parse_ident()), _ => { if must_be_named_mod { - self.span_fatal(copy self.span, + self.span_fatal(*self.span, fmt!("expected foreign module name but \ found %s", token_to_str(self.reader, - self.token))); + *self.token))); } (ast::anonymous, @@ -3526,7 +3528,7 @@ pub impl Parser { match abi_opt { None => {} // OK. Some(_) => { - self.span_err(copy self.span, ~"an ABI may not be specified \ + self.span_err(*self.span, ~"an ABI may not be specified \ here"); } } @@ -3567,7 +3569,7 @@ pub impl Parser { fn parse_struct_def() -> @struct_def { let mut the_dtor: Option<(blk, ~[attribute], codemap::span)> = None; let mut fields: ~[@struct_field] = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { match self.parse_class_item() { dtor_decl(ref blk, ref attrs, s) => { match the_dtor { @@ -3613,7 +3615,7 @@ pub impl Parser { let mut all_nullary = true, have_disr = false; let mut common_fields = None; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let variant_attrs = self.parse_outer_attributes(); let vlo = self.span.lo; @@ -3644,7 +3646,7 @@ pub impl Parser { // Parse a struct variant. all_nullary = false; kind = struct_variant_kind(self.parse_struct_def()); - } else if self.token == token::LPAREN { + } else if *self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, @@ -3693,7 +3695,7 @@ pub impl Parser { self.parse_region_param(); let ty_params = self.parse_ty_params(); // Newtype syntax - if self.token == token::EQ { + if *self.token == token::EQ { self.bump(); let ty = self.parse_ty(false); self.expect(token::SEMI); @@ -3725,7 +3727,7 @@ pub impl Parser { } fn parse_fn_ty_sigil() -> Option { - match self.token { + match *self.token { token::AT => { self.bump(); Some(ManagedSigil) @@ -3854,7 +3856,7 @@ pub impl Parser { vis: visibility, span: mk_sp(lo, self.last_span.hi) }); - } else if macros_allowed && !self.is_any_keyword(copy self.token) + } else if macros_allowed && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::NOT && (is_plain_ident(self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN @@ -3870,15 +3872,15 @@ pub impl Parser { // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if is_plain_ident(self.token) { + let id = if is_plain_ident(*self.token) { self.parse_ident() } else { token::special_idents::invalid // no special identifier }; - let tts = match self.token { + let tts = match *self.token { token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(copy self.token); - self.parse_unspanned_seq(copy self.token, ket, + let ket = token::flip_delimiter(*self.token); + self.parse_unspanned_seq(*self.token, ket, seq_sep_none(), |p| p.parse_token_tree()) } @@ -3896,7 +3898,7 @@ pub impl Parser { let mut s = ~"unmatched visibility `"; s += if visibility == public { ~"pub" } else { ~"priv" }; s += ~"`"; - self.span_fatal(copy self.last_span, s); + self.span_fatal(*self.last_span, s); } return iovi_none; }; @@ -3932,12 +3934,12 @@ pub impl Parser { let first_ident = self.parse_ident(); let mut path = ~[first_ident]; debug!("parsed view_path: %s", *self.id_to_str(first_ident)); - match self.token { + match *self.token { token::EQ => { // x = foo::bar self.bump(); path = ~[self.parse_ident()]; - while self.token == token::MOD_SEP { + while *self.token == token::MOD_SEP { self.bump(); let id = self.parse_ident(); path.push(id); @@ -3954,11 +3956,10 @@ pub impl Parser { token::MOD_SEP => { // foo::bar or foo::{a,b,c} or foo::* - while self.token == token::MOD_SEP { + while *self.token == token::MOD_SEP { self.bump(); - match copy self.token { - + match *self.token { token::IDENT(i, _) => { self.bump(); path.push(i); @@ -4009,7 +4010,7 @@ pub impl Parser { fn parse_view_paths() -> ~[@view_path] { let mut vp = ~[self.parse_view_path()]; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); vp.push(self.parse_view_path()); } @@ -4019,7 +4020,7 @@ pub impl Parser { fn is_view_item() -> bool { let tok, next_tok; if !self.is_keyword(~"pub") && !self.is_keyword(~"priv") { - tok = self.token; + tok = *self.token; next_tok = self.look_ahead(1); } else { tok = self.look_ahead(1); @@ -4124,7 +4125,7 @@ pub impl Parser { } fn parse_str() -> @~str { - match copy self.token { + match *self.token { token::LIT_STR(s) => { self.bump(); self.id_to_str(s) } _ => self.fatal(~"expected string literal") } diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 4b13818974c30..41500d6a409a8 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -18,7 +18,7 @@ use hashmap::linear::LinearMap; use dvec::DVec; pub struct Interner { - priv mut map: LinearMap, + priv map: @mut LinearMap, priv vect: DVec, } @@ -26,7 +26,7 @@ pub struct Interner { pub impl Interner { static fn new() -> Interner { Interner { - map: LinearMap::new(), + map: @mut LinearMap::new(), vect: DVec(), } }