From 5ef897a0a3b71804d23b371ec2cf51e9334ba071 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 12 Apr 2023 16:15:07 +0200 Subject: [PATCH 001/678] Add `super` and `ingot` keyword --- crates/hir/src/hir_def/mod.rs | 2 ++ crates/hir/src/hir_def/path.rs | 7 +++-- crates/hir/src/hir_def/use_tree.rs | 8 +++-- crates/hir/src/lower/path.rs | 20 ++++++------- crates/hir/src/lower/use_tree.rs | 4 ++- crates/parser2/src/ast/path.rs | 27 +++++++++++++++++ crates/parser2/src/ast/use_tree.rs | 18 +++++++++-- crates/parser2/src/parser/path.rs | 6 +++- crates/parser2/src/parser/use_tree.rs | 5 ++-- crates/parser2/src/syntax_kind.rs | 6 ++++ .../test_files/syntax_node/exprs/expr_path.fe | 3 ++ .../syntax_node/exprs/expr_path.snap | 30 +++++++++++++++++++ .../test_files/syntax_node/items/use.fe | 5 +++- .../test_files/syntax_node/items/use.snap | 27 +++++++++++++++-- crates/parser2/tests/syntax_node.rs | 1 + 15 files changed, 144 insertions(+), 25 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/exprs/expr_path.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/expr_path.snap diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index e2f3e0f4f6..af4f5f3939 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -35,6 +35,8 @@ pub struct IdentId { } impl IdentId { pub fn is_self(&self, db: &dyn HirDb) -> bool { + // TODO: Keyword should be prefilled in the database. + // ref: https://github.com/salsa-rs/salsa/pull/440 self.data(db) == "self" } } diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index 657f318903..718835ac0e 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -9,10 +9,13 @@ pub struct PathId { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum PathSegment { - /// `Normal Path` segment. - Ident(IdentId), + /// `ingot`. + Ingot, + /// `super`. + Super, /// `Self` segment. SelfTy, /// `self` segment. Self_, + Ident(IdentId), } diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 42f468c14c..474a36a3e2 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -22,8 +22,12 @@ pub struct UseTreeId { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum UsePathSegment { Ident(IdentId), - /// `self`, - SelfPath, + /// `ingot`. + Ingot, + /// `super`. + Super, + /// `self`. + Self_, /// `*`. Glob, } diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 5428ee80a3..6d20ab9b76 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -8,17 +8,15 @@ impl PathId { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Path) -> Self { let mut segments = Vec::new(); for seg in ast.into_iter() { - let segment = if seg.is_self() { - Some(PathSegment::Self_) - } else if seg.is_self_ty() { - Some(PathSegment::SelfTy) - } else if let Some(ident) = seg.ident() { - Some(PathSegment::Ident(IdentId::new( - ctxt.db, - ident.text().to_string(), - ))) - } else { - None + let segment = match seg.kind() { + Some(ast::PathSegmentKind::Ingot(_)) => Some(PathSegment::Ingot), + Some(ast::PathSegmentKind::Super(_)) => Some(PathSegment::Super), + Some(ast::PathSegmentKind::SelfTy(_)) => Some(PathSegment::SelfTy), + Some(ast::PathSegmentKind::Self_(_)) => Some(PathSegment::Self_), + Some(ast::PathSegmentKind::Ident(ident)) => { + Some(PathSegment::Ident(IdentId::lower_token(ctxt, ident))) + } + None => None, } .into(); segments.push(segment); diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index fbba3b7cc4..4080154758 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -43,10 +43,12 @@ impl UsePathSegment { ) -> Partial { ast.kind() .map(|kind| match kind { + ast::UsePathSegmentKind::Ingot(_) => Self::Ingot, + ast::UsePathSegmentKind::Super(_) => Self::Super, ast::UsePathSegmentKind::Ident(ident) => { Self::Ident(IdentId::lower_token(ctxt, ident)) } - ast::UsePathSegmentKind::SelfPath(_) => Self::SelfPath, + ast::UsePathSegmentKind::Self_(_) => Self::Self_, ast::UsePathSegmentKind::Glob(_) => Self::Glob, }) .into() diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs index f01c91d7fa..2b02a4723f 100644 --- a/crates/parser2/src/ast/path.rs +++ b/crates/parser2/src/ast/path.rs @@ -23,6 +23,19 @@ ast_node! { SK::PathSegment } impl PathSegment { + pub fn kind(&self) -> Option { + match self.syntax().first_child_or_token() { + Some(node) => match node.kind() { + SK::IngotKw => Some(PathSegmentKind::Ingot(node.into_token().unwrap())), + SK::SuperKw => Some(PathSegmentKind::Super(node.into_token().unwrap())), + SK::SelfTypeKw => Some(PathSegmentKind::SelfTy(node.into_token().unwrap())), + SK::SelfKw => Some(PathSegmentKind::Self_(node.into_token().unwrap())), + SK::Ident => Some(PathSegmentKind::Ident(node.into_token().unwrap())), + _ => None, + }, + _ => None, + } + } /// Returns the identifier of the segment. pub fn ident(&self) -> Option { support::token(self.syntax(), SK::Ident) @@ -39,6 +52,20 @@ impl PathSegment { } } +/// A path segment kind. +pub enum PathSegmentKind { + /// `ingot` + Ingot(SyntaxToken), + /// `super` + Super(SyntaxToken), + /// `Self` + SelfTy(SyntaxToken), + /// `self` + Self_(SyntaxToken), + /// `foo` + Ident(SyntaxToken), +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs index bfb40c2cb0..975a0f9080 100644 --- a/crates/parser2/src/ast/use_tree.rs +++ b/crates/parser2/src/ast/use_tree.rs @@ -54,7 +54,9 @@ impl UsePathSegment { pub fn kind(&self) -> Option { match self.syntax().first_child_or_token() { Some(node) => match node.kind() { - SK::SelfKw => Some(UsePathSegmentKind::SelfPath(node.into_token().unwrap())), + SK::IngotKw => Some(UsePathSegmentKind::Ingot(node.into_token().unwrap())), + SK::SuperKw => Some(UsePathSegmentKind::Super(node.into_token().unwrap())), + SK::SelfKw => Some(UsePathSegmentKind::Self_(node.into_token().unwrap())), SK::Ident => Some(UsePathSegmentKind::Ident(node.into_token().unwrap())), SK::Star => Some(UsePathSegmentKind::Glob(node.into_token().unwrap())), _ => None, @@ -67,6 +69,14 @@ impl UsePathSegment { support::token(self.syntax(), SK::Ident) } + pub fn ingot_token(&self) -> Option { + support::token(self.syntax(), SK::IngotKw) + } + + pub fn super_token(&self) -> Option { + support::token(self.syntax(), SK::SuperKw) + } + pub fn self_token(&self) -> Option { support::token(self.syntax(), SK::SelfKw) } @@ -98,8 +108,12 @@ impl UseTreeAlias { /// A path segment in a use tree. pub enum UsePathSegmentKind { + /// `ingot` + Ingot(SyntaxToken), + /// `super` + Super(SyntaxToken), /// `self` - SelfPath(SyntaxToken), + Self_(SyntaxToken), /// `foo` Ident(SyntaxToken), /// `*` diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 278734966b..85c84edc5c 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -37,6 +37,10 @@ impl super::Parse for PathSegmentScope { pub(super) fn is_path_segment(kind: SyntaxKind) -> bool { matches!( kind, - SyntaxKind::SelfTypeKw | SyntaxKind::SelfKw | SyntaxKind::Ident + SyntaxKind::SelfTypeKw + | SyntaxKind::SelfKw + | SyntaxKind::IngotKw + | SyntaxKind::SuperKw + | SyntaxKind::Ident ) } diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index b04f4f8c5d..82c4e1839f 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -1,6 +1,6 @@ use std::{cell::Cell, rc::Rc}; -use crate::SyntaxKind; +use crate::{parser::path::is_path_segment, SyntaxKind}; use super::{define_scope, token_stream::TokenStream, Parser}; @@ -144,6 +144,5 @@ impl super::Parse for UseTreeRenameScope { } fn is_use_path_segment(kind: SyntaxKind) -> bool { - use SyntaxKind::*; - matches!(kind, Ident | SelfKw | Star) + is_path_segment(kind) || matches!(kind, SyntaxKind::Star) } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 3605ab48a5..098a2f39fd 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -220,6 +220,12 @@ pub enum SyntaxKind { /// `unsafe` #[token("unsafe")] UnsafeKw, + /// `ingot` + #[token("ingot")] + IngotKw, + /// `super` + #[token("super")] + SuperKw, /// `<<` LShift, diff --git a/crates/parser2/test_files/syntax_node/exprs/expr_path.fe b/crates/parser2/test_files/syntax_node/exprs/expr_path.fe new file mode 100644 index 0000000000..acf6c7fa29 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/expr_path.fe @@ -0,0 +1,3 @@ +super::Foo +ingot::Bar +Self::Foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/expr_path.snap b/crates/parser2/test_files/syntax_node/exprs/expr_path.snap new file mode 100644 index 0000000000..8b70fe1415 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/expr_path.snap @@ -0,0 +1,30 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/expr_path.fe +--- +Root@0..31 + PathExpr@0..10 + Path@0..10 + PathSegment@0..5 + SuperKw@0..5 "super" + Colon2@5..7 "::" + PathSegment@7..10 + Ident@7..10 "Foo" + Newline@10..11 "\n" + PathExpr@11..21 + Path@11..21 + PathSegment@11..16 + IngotKw@11..16 "ingot" + Colon2@16..18 "::" + PathSegment@18..21 + Ident@18..21 "Bar" + Newline@21..22 "\n" + PathExpr@22..31 + Path@22..31 + PathSegment@22..26 + SelfTypeKw@22..26 "Self" + Colon2@26..28 "::" + PathSegment@28..31 + Ident@28..31 "Foo" + diff --git a/crates/parser2/test_files/syntax_node/items/use.fe b/crates/parser2/test_files/syntax_node/items/use.fe index 5975bd9edd..bff0d3b26d 100644 --- a/crates/parser2/test_files/syntax_node/items/use.fe +++ b/crates/parser2/test_files/syntax_node/items/use.fe @@ -10,4 +10,7 @@ use Foo::{self, Bar as Bar1} use Foo::{self as self_, Bar::{Bar as _, Baz}, *} use {Foo::Bar as Bar1, Bar::Bar as Bar2, Baz::Bar as Bar3, Trait::T} -use * \ No newline at end of file +use * + +use super::* +use ingot::Foo diff --git a/crates/parser2/test_files/syntax_node/items/use.snap b/crates/parser2/test_files/syntax_node/items/use.snap index 4d5ab8d839..102ff6d924 100644 --- a/crates/parser2/test_files/syntax_node/items/use.snap +++ b/crates/parser2/test_files/syntax_node/items/use.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/use.fe --- -Root@0..278 - ItemList@0..278 +Root@0..308 + ItemList@0..308 Use@0..12 UseKw@0..3 "use" WhiteSpace@3..4 " " @@ -260,4 +260,27 @@ Root@0..278 UsePath@277..278 UsePathSegment@277..278 Star@277..278 "*" + Newline@278..280 "\n\n" + Use@280..292 + UseKw@280..283 "use" + WhiteSpace@283..284 " " + UseTree@284..292 + UsePath@284..292 + UsePathSegment@284..289 + SuperKw@284..289 "super" + Colon2@289..291 "::" + UsePathSegment@291..292 + Star@291..292 "*" + Newline@292..293 "\n" + Use@293..307 + UseKw@293..296 "use" + WhiteSpace@296..297 " " + UseTree@297..307 + UsePath@297..307 + UsePathSegment@297..302 + IngotKw@297..302 "ingot" + Colon2@302..304 "::" + UsePathSegment@304..307 + Ident@304..307 "Foo" + Newline@307..308 "\n" diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index c6068ef988..9d6a6ddbc3 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -38,6 +38,7 @@ fn test_stmt(fixture: Fixture<&str>) { #[dir_test( dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/exprs", glob: "*.fe" + postfix: "expr" )] fn test_expr(fixture: Fixture<&str>) { let runner = TestRunner::expr_list(true); From b7e039d05c0b13e7819546c3247fc0f28269e6c8 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Apr 2023 11:06:57 +0200 Subject: [PATCH 002/678] Initialize `hir-analysis` crate --- Cargo.lock | 9 +++++++++ crates/hir-analysis/Cargo.toml | 14 ++++++++++++++ crates/hir-analysis/src/lib.rs | 8 ++++++++ 3 files changed, 31 insertions(+) create mode 100644 crates/hir-analysis/Cargo.toml create mode 100644 crates/hir-analysis/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ff870057ae..9fe5b28782 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -918,6 +918,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "fe-hir-analysis" +version = "0.20.0-alpha" +dependencies = [ + "fe-common", + "fe-hir", + "salsa-2022", +] + [[package]] name = "fe-library" version = "0.22.0" diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml new file mode 100644 index 0000000000..300396de7d --- /dev/null +++ b/crates/hir-analysis/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "fe-hir-analysis" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR definition and lowering for Fe lang" + +[dependencies] +salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } + +hir = { path = "../hir", package = "fe-hir" } +common = { path = "../common", package = "fe-common" } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs new file mode 100644 index 0000000000..00a4d737fd --- /dev/null +++ b/crates/hir-analysis/src/lib.rs @@ -0,0 +1,8 @@ +use common::db::Upcast; +use hir::HirDb; + +#[salsa::jar(db = HirAnalysisDb)] +pub struct Jar(); + +pub trait HirAnalysisDb: salsa::DbWithJar + Upcast {} +impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + Upcast {} From 4cedcc0a1ade8ba3edd28b399c68cb0e3dc6c427 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 21 Apr 2022 14:55:12 +0200 Subject: [PATCH 003/678] Remove `ExternFunc` item from HIR --- crates/hir/src/hir_def/item.rs | 47 ++++++++++++--------------- crates/hir/src/hir_def/item_tree.rs | 2 +- crates/hir/src/hir_def/mod.rs | 14 ++++++++ crates/hir/src/hir_def/module_tree.rs | 15 +++++++++ crates/hir/src/hir_def/use_tree.rs | 4 +-- crates/hir/src/lib.rs | 1 - crates/hir/src/lower/item.rs | 47 ++++----------------------- crates/hir/src/lower/mod.rs | 1 + crates/hir/src/lower/use_tree.rs | 4 +-- crates/hir/src/span/item.rs | 19 ++--------- crates/hir/src/span/mod.rs | 8 ++--- crates/hir/src/span/transition.rs | 32 ++++++++---------- 12 files changed, 80 insertions(+), 114 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index b68750854c..651de6d09e 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -11,9 +11,9 @@ use crate::{ lower, span::{ item::{ - LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyExternFuncSpan, LazyFuncSpan, - LazyImplSpan, LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, - LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, + LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, LazyImplSpan, + LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, + LazyTypeAliasSpan, LazyUseSpan, }, HirOrigin, }, @@ -41,7 +41,6 @@ pub enum ItemKind { TopMod(TopLevelMod), Mod(Mod), Func(Func), - ExternFunc(ExternFunc), Struct(Struct), Contract(Contract), Enum(Enum), @@ -77,6 +76,20 @@ impl TopLevelMod { pub fn ingot_module_tree(self, db: &dyn HirDb) -> &ModuleTree { module_tree_impl(db, self.ingot(db)) } + + pub fn ingot_root(self, db: &dyn HirDb) -> TopLevelMod { + self.ingot_module_tree(db).root_data().top_mod + } + + pub fn parent(self, db: &dyn HirDb) -> Option { + let module_tree = self.ingot_module_tree(db); + module_tree.parent(self) + } + + pub fn children(self, db: &dyn HirDb) -> impl Iterator + '_ { + let module_tree = self.ingot_module_tree(db); + module_tree.children(self) + } } #[salsa::tracked] @@ -112,6 +125,7 @@ pub struct Func { pub ret_ty: Option, pub modifier: ItemModifier, pub body: Option, + pub is_extern: bool, pub top_mod: TopLevelMod, #[return_ref] @@ -123,27 +137,6 @@ impl Func { } } -#[salsa::tracked] -pub struct ExternFunc { - #[id] - id: TrackedItemId, - - pub name: Partial, - pub attributes: AttrListId, - pub params: Partial, - pub ret_ty: Option, - pub modifier: ItemModifier, - pub top_mod: TopLevelMod, - - #[return_ref] - pub(crate) origin: HirOrigin, -} -impl ExternFunc { - pub fn lazy_span(self) -> LazyExternFuncSpan { - LazyExternFuncSpan::new(self) - } -} - #[salsa::tracked] pub struct Struct { #[id] @@ -348,7 +341,7 @@ impl ItemModifier { #[salsa::interned] pub struct RecordFieldListId { #[return_ref] - pub fields: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -361,7 +354,7 @@ pub struct RecordField { #[salsa::interned] pub struct EnumVariantListId { #[return_ref] - pub variants: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index 2b72cf5193..2751284c2e 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -76,7 +76,7 @@ mod tests { let foo_children: Vec<_> = item_tree.children(inner_items[0]).collect(); assert!(matches!(foo_children[0], ItemKind::Func(_))); - assert!(matches!(foo_children[1], ItemKind::ExternFunc(_))); + assert!(matches!(foo_children[1], ItemKind::Func(_))); let baz_children: Vec<_> = item_tree.children(inner_items[1]).collect(); assert!(matches!(baz_children[0], ItemKind::Struct(_))); diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index af4f5f3939..295250b6f8 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -5,6 +5,7 @@ pub mod item; pub mod params; pub mod pat; pub mod path; +pub mod scope_graph; pub mod stmt; pub mod types; pub mod use_tree; @@ -85,6 +86,10 @@ impl Partial { Self::Absent => panic!("unwrap called on absent value"), } } + + pub fn to_opt(self) -> Option { + self.into() + } } impl Default for Partial { @@ -102,3 +107,12 @@ impl From> for Partial { } } } + +impl Into> for Partial { + fn into(self) -> Option { + match self { + Self::Present(value) => Some(value), + Self::Absent => None, + } + } +} diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 821aba482d..f8c0f8d917 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -92,6 +92,21 @@ impl ModuleTree { pub fn all_modules(&self) -> impl Iterator + '_ { self.mod_map.keys().copied() } + + pub fn parent(&self, top_mod: TopLevelMod) -> Option { + let node = self.tree_node_data(top_mod); + node.parent.map(|id| self.module_tree[id].top_mod) + } + + pub fn children(&self, top_mod: TopLevelMod) -> impl Iterator + '_ { + self.tree_node_data(top_mod) + .children + .iter() + .map(move |&id| { + let node = &self.module_tree[id]; + node.top_mod + }) + } } /// Returns a module tree of the given ingot. The resulted tree only includes diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 474a36a3e2..b025d47d88 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -16,7 +16,7 @@ pub struct UseTreeId { //// The alias of this use tree. /// `Bar` in `Foo as Bar;` - pub alias: Option>, + pub alias: Option>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -33,7 +33,7 @@ pub enum UsePathSegment { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum UseTreeAlias { +pub enum UseAlias { Ident(IdentId), Underscore, } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 9e87255b19..1d9b5c653c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -18,7 +18,6 @@ pub struct Jar( hir_def::TopLevelMod, hir_def::Mod, hir_def::Func, - hir_def::ExternFunc, hir_def::Struct, hir_def::Contract, hir_def::Enum, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index ea0cecb45b..ddc80e5f55 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -21,7 +21,7 @@ pub(crate) fn lower_module_items( Mod::lower_ast(ctxt, id.clone(), mod_); } ast::ItemKind::Fn(fn_) => { - Func::lower_ast(ctxt, id.clone(), fn_); + Func::lower_ast(ctxt, id.clone(), fn_, false); } ast::ItemKind::Struct(struct_) => { Struct::lower_ast(ctxt, id.clone(), struct_); @@ -53,7 +53,7 @@ pub(crate) fn lower_module_items( ast::ItemKind::Extern(extern_) => { if let Some(extern_block) = extern_.extern_block() { for fn_ in extern_block { - ExternFunc::lower_ast(ctxt, id.clone(), fn_); + Func::lower_ast(ctxt, id.clone(), fn_, true); } } } @@ -88,6 +88,7 @@ impl Func { ctxt: &mut FileLowerCtxt<'_>, parent_id: TrackedItemId, ast: ast::Fn, + is_extern: bool, ) -> Self { ctxt.enter_scope(); @@ -123,6 +124,7 @@ impl Func { ret_ty, modifier, body, + is_extern, ctxt.top_mod, origin, ); @@ -280,7 +282,7 @@ impl Impl { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item, false); } } @@ -317,7 +319,7 @@ impl Trait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item, false); } } @@ -356,7 +358,7 @@ impl ImplTrait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item, false); } } @@ -413,41 +415,6 @@ impl Use { } } -impl ExternFunc { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent: TrackedItemId, - ast: ast::Fn, - ) -> Self { - ctxt.enter_scope(); - - let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Extern.join(parent); - - let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let params = ast - .params() - .map(|params| FnParamListId::lower_ast(ctxt, params)) - .into(); - let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); - let modifier = ItemModifier::lower_ast(ast.modifier()); - let origin = HirOrigin::raw(&ast); - - let extern_func = Self::new( - ctxt.db, - id, - name, - attributes, - params, - ret_ty, - modifier, - ctxt.top_mod, - origin, - ); - ctxt.leave_scope(extern_func) - } -} - impl ItemModifier { fn lower_ast(ast: Option) -> Self { let Some(ast) = ast else { diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 58b030434a..ac37b2bb5b 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -30,6 +30,7 @@ mod item; mod params; mod pat; mod path; +mod scope_builder; mod stmt; mod types; mod use_tree; diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 4080154758..98f4b7a1f1 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -23,7 +23,7 @@ impl UseTreeId { }; let alias = ast .alias() - .map(|ast| UseTreeAlias::lower_ast_partial(ctxt, ast)); + .map(|ast| UseAlias::lower_ast_partial(ctxt, ast)); Self::new(ctxt.db, path, subtree, alias) } @@ -55,7 +55,7 @@ impl UsePathSegment { } } -impl UseTreeAlias { +impl UseAlias { pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: ast::UseTreeAlias, diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 3e9acf9dd4..15636dcc65 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1,8 +1,8 @@ use parser::ast; use crate::hir_def::{ - Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, - Trait, TypeAlias, Use, + Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, + Use, }; use super::{ @@ -47,21 +47,6 @@ define_lazy_span_node!( } ); -define_lazy_span_node!( - LazyExternFuncSpan, - ast::Fn, - new(ExternFunc), - @token { - (name, name), - } - @node { - (attributes, attr_list, LazyAttrListSpan), - (modifier, modifier, LazyItemModifierSpan), - (params, params, LazyFnParamListSpan), - (ret_ty, ret_ty, LazyTypeSpan), - } -); - define_lazy_span_node!( LazyStructSpan, ast::Struct, diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index ee2eb55052..053b96c932 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -7,8 +7,8 @@ use common::diagnostics::Span; use crate::{ hir_def::{ - Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, - Trait, TypeAlias, Use, + Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, }, lower::top_mod_ast, SpannedHirDb, @@ -57,10 +57,6 @@ pub fn func_ast(db: &dyn SpannedHirDb, item: Func) -> &HirOrigin { item.origin(db.upcast()) } -pub fn extern_func_ast(db: &dyn SpannedHirDb, item: ExternFunc) -> &HirOrigin { - item.origin(db.upcast()) -} - pub fn struct_ast(db: &dyn SpannedHirDb, item: Struct) -> &HirOrigin { item.origin(db.upcast()) } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 2763449f3f..13902f52f5 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -8,17 +8,17 @@ use parser::{ use crate::{ hir_def::{ - Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, - Trait, TypeAlias, Use, + Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, }, lower::{map_file_to_mod_impl, top_mod_ast}, SpannedHirDb, }; use super::{ - body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, extern_func_ast, func_ast, - impl_ast, impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, - type_alias_ast, use_ast, AugAssignDesugared, DesugaredOrigin, HirOrigin, LazySpan, + body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, func_ast, impl_ast, + impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, type_alias_ast, + use_ast, AugAssignDesugared, DesugaredOrigin, HirOrigin, LazySpan, }; /// This type represents function from the hir origin to another hir origin to @@ -51,7 +51,6 @@ pub(crate) enum ChainRoot { TopMod(TopLevelMod), Mod(Mod), Func(Func), - ExternFunc(ExternFunc), Struct(Struct), Contract(Contract), Enum(Enum), @@ -71,6 +70,7 @@ pub(crate) struct ResolvedOrigin { pub(crate) file: InputFile, pub(crate) kind: ResolvedOriginKind, } + impl ResolvedOrigin { pub(crate) fn new(file: InputFile, kind: ResolvedOriginKind) -> Self { Self { file, kind } @@ -94,17 +94,7 @@ impl ResolvedOrigin { ResolvedOrigin::new(top_mod.file(db.upcast()), kind) } -} - -pub(crate) enum ResolvedOriginKind { - Node(SyntaxNode), - Token(SyntaxToken), - Expanded(SyntaxNode), - Desugared(DesugaredOrigin), - None, -} -impl ResolvedOrigin { pub(crate) fn map(self, f: F) -> Self where F: FnOnce(SyntaxNode) -> Option, @@ -125,13 +115,20 @@ impl ResolvedOrigin { } } +pub(crate) enum ResolvedOriginKind { + Node(SyntaxNode), + Token(SyntaxToken), + Expanded(SyntaxNode), + Desugared(DesugaredOrigin), + None, +} + impl ChainInitiator for ChainRoot { fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { match self { Self::TopMod(top_mod) => top_mod.init(db), Self::Mod(mod_) => mod_.init(db), Self::Func(func) => func.init(db), - Self::ExternFunc(extern_func) => extern_func.init(db), Self::Struct(struct_) => struct_.init(db), Self::Contract(contract) => contract.init(db), Self::Enum(enum_) => enum_.init(db), @@ -222,7 +219,6 @@ macro_rules! impl_chain_root { impl_chain_root! { (Mod, mod_ast), (Func, func_ast), - (ExternFunc, extern_func_ast), (Struct, struct_ast), (Contract, contract_ast), (Enum, enum_ast), From d3c4cc047d429176f0cc4ad9bf146122f5465b94 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 21 Apr 2022 16:44:50 +0200 Subject: [PATCH 004/678] Define `ScopeGraph` --- crates/hir/src/hir_def/params.rs | 14 +- crates/hir/src/hir_def/scope_graph.rs | 180 ++++++++++++++++++++++++++ 2 files changed, 191 insertions(+), 3 deletions(-) create mode 100644 crates/hir/src/hir_def/scope_graph.rs diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 91ee3f36a0..b8a6736efa 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,4 +1,4 @@ -use crate::hir_def::TypeId; +use crate::{hir_def::TypeId, HirDb}; use super::{Body, IdentId, Partial, PathId}; @@ -11,13 +11,13 @@ pub struct GenericArgListId { #[salsa::interned] pub struct GenericParamListId { #[return_ref] - pub params: Vec, + pub data: Vec, } #[salsa::interned] pub struct FnParamListId { #[return_ref] - args: Vec, + pub data: Vec, } #[salsa::interned] @@ -31,6 +31,14 @@ pub enum GenericParam { Type(TypeGenericParam), Const(ConstGenericParam), } +impl GenericParam { + pub fn name(&self) -> Partial { + match self { + Self::Type(ty) => ty.name, + Self::Const(c) => c.name, + } + } +} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericParam { diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs new file mode 100644 index 0000000000..6cebf18142 --- /dev/null +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -0,0 +1,180 @@ +use cranelift_entity::{entity_impl, PrimaryMap}; +use either::Either; + +use super::{IdentId, ItemKind, PathId, TopLevelMod, TraitRef, TypeId, UseAlias}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ScopeGraph { + pub top_mod: TopLevelMod, + pub scopes: PrimaryMap, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LocalScope { + pub kind: ScopeKind, + pub edges: Vec, +} + +impl LocalScope { + pub fn new(kind: ScopeKind) -> Self { + Self { + kind, + edges: vec![], + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ScopeKind { + Item(ItemKind), + GenericParam(usize), + FnParam(usize), + Field(usize), + Variant(usize), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ScopeEdge { + pub dest: Either, + pub kind: EdgeKind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum EdgeKind { + Lex(LexEdge), + Mod(ModEdge), + GlobUse(GlobUseEdge), + Use(UseEdge), + Type(TypeEdge), + Trait(TraitEdge), + GenericParam(GenericParamEdge), + Value(ValueEdge), + Field(FieldEdge), + Variant(VariantEdge), + Super(SuperEdge), + Ingot(IngotEdge), + Self_(SelfEdge), + SelfTy(SelfTyEdge), + Anon(AnonEdge), +} + +impl EdgeKind { + pub fn lex() -> Self { + EdgeKind::Lex(LexEdge()) + } + + pub fn mod_(ident: IdentId) -> Self { + EdgeKind::Mod(ident.into()) + } + + pub fn glob_use(path: PathId, alias: UseAlias) -> Self { + EdgeKind::GlobUse(GlobUseEdge { path, alias }) + } + + pub fn use_(path: PathId, alias: UseAlias) -> Self { + EdgeKind::Use(UseEdge { path, alias }) + } + + pub fn type_(ident: IdentId) -> Self { + EdgeKind::Type(ident.into()) + } + + pub fn trait_(ident: IdentId) -> Self { + EdgeKind::Trait(ident.into()) + } + + pub fn generic_param(ident: IdentId) -> Self { + EdgeKind::GenericParam(ident.into()) + } + + pub fn value(ident: IdentId) -> Self { + EdgeKind::Value(ident.into()) + } + + pub fn field(ident: IdentId) -> Self { + EdgeKind::Field(ident.into()) + } + + pub fn variant(ident: IdentId) -> Self { + EdgeKind::Variant(ident.into()) + } + + pub fn super_() -> Self { + EdgeKind::Super(SuperEdge()) + } + + pub fn ingot() -> Self { + EdgeKind::Ingot(IngotEdge()) + } + + pub fn self_ty(ty: Either) -> Self { + EdgeKind::SelfTy(ty.into()) + } + + pub fn self_() -> Self { + EdgeKind::Self_(SelfEdge()) + } + + pub fn anon() -> Self { + EdgeKind::Anon(AnonEdge()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct LexEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct ModEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct GlobUseEdge { + /// `UsePathSegment` are lowered to a normal `Path`. + path: PathId, + alias: UseAlias, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct UseEdge { + /// `UsePathSegment` are lowered to a normal `Path`. + path: PathId, + alias: UseAlias, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct TypeEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct TraitEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct ValueEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct GenericParamEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct FieldEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct VariantEdge(IdentId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct SuperEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct IngotEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct SelfTyEdge { + ty: Either, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub struct SelfEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AnonEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct LocalScopeId(u32); +entity_impl!(LocalScopeId); From e4556416c8c24800bc2c1dc3053fdf46b860cbc0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 28 Dec 2022 23:15:00 +0100 Subject: [PATCH 005/678] Define `SyntaxKind` --- Cargo.lock | 223 +++++++++++++++++++++++++ crates/parser2/Cargo.toml | 13 ++ crates/parser2/src/lib.rs | 1 + crates/parser2/src/syntax_kind.rs | 261 ++++++++++++++++++++++++++++++ 4 files changed, 498 insertions(+) create mode 100644 crates/parser2/Cargo.toml create mode 100644 crates/parser2/src/lib.rs create mode 100644 crates/parser2/src/syntax_kind.rs diff --git a/Cargo.lock b/Cargo.lock index 4fba960cdc..d934ba3c35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -102,9 +102,15 @@ dependencies = [ [[package]] name = "bumpalo" +<<<<<<< HEAD version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +======= +version = "3.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" +>>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "byte-slice-cast" @@ -120,6 +126,7 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" +<<<<<<< HEAD version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" @@ -129,6 +136,17 @@ name = "camino" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" +======= +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" + +[[package]] +name = "camino" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e" +>>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "cast" @@ -138,9 +156,15 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" +<<<<<<< HEAD version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +======= +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +>>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "cfg-if" @@ -243,9 +267,21 @@ dependencies = [ ] [[package]] +<<<<<<< HEAD name = "cpufeatures" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" +======= +name = "countme" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +>>>>>>> b04a24a3 (Define `SyntaxKind`) checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", @@ -317,7 +353,11 @@ dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", +<<<<<<< HEAD "memoffset", +======= + "memoffset 0.7.1", +>>>>>>> b04a24a3 (Define `SyntaxKind`) "scopeguard", ] @@ -419,6 +459,7 @@ checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" [[package]] name = "either" +<<<<<<< HEAD version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" @@ -428,6 +469,17 @@ name = "ethabi" version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +======= +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" + +[[package]] +name = "ethabi" +version = "17.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4966fba78396ff92db3b817ee71143eccd98acf0f876b8d600e585a670c5d1b" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "ethereum-types", "hex", @@ -734,6 +786,13 @@ dependencies = [ "wasm-bindgen-test", ] +[[package]] +name = "fe-parser2" +version = "0.20.0-alpha" +dependencies = [ + "rowan", +] + [[package]] name = "fe-test-files" version = "0.20.0-alpha" @@ -983,9 +1042,15 @@ dependencies = [ [[package]] name = "insta" +<<<<<<< HEAD version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff" +======= +version = "1.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5686bd8e9239eabe90bb30a0c341bffd6fdc177fb556708f2cb792bf00352d" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "lazy_static", "linked-hash-map", @@ -1025,9 +1090,15 @@ checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" [[package]] name = "js-sys" +<<<<<<< HEAD version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +======= +version = "0.3.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "wasm-bindgen", ] @@ -1117,12 +1188,21 @@ dependencies = [ ] [[package]] +<<<<<<< HEAD name = "nom8" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" dependencies = [ "memchr", +======= +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +>>>>>>> b04a24a3 (Define `SyntaxKind`) ] [[package]] @@ -1194,9 +1274,15 @@ dependencies = [ [[package]] name = "parity-scale-codec" +<<<<<<< HEAD version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" +======= +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "arrayvec", "bitvec", @@ -1208,9 +1294,15 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" +<<<<<<< HEAD version = "3.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +======= +version = "3.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9299338969a3d2f491d65f140b00ddec470858402f888af98e8642fb5e8965cd" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro-crate", "proc-macro2", @@ -1316,12 +1408,22 @@ dependencies = [ [[package]] name = "proc-macro-crate" +<<<<<<< HEAD version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" dependencies = [ "once_cell", "toml_edit", +======= +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" +dependencies = [ + "once_cell", + "thiserror", + "toml", +>>>>>>> b04a24a3 (Define `SyntaxKind`) ] [[package]] @@ -1350,9 +1452,15 @@ dependencies = [ [[package]] name = "proc-macro2" +<<<<<<< HEAD version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +======= +version = "1.0.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "unicode-ident", ] @@ -1487,9 +1595,15 @@ dependencies = [ [[package]] name = "rayon-core" +<<<<<<< HEAD version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +======= +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -1505,9 +1619,15 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "regex" +<<<<<<< HEAD version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +======= +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "aho-corasick", "memchr", @@ -1559,6 +1679,19 @@ dependencies = [ "serde", ] +[[package]] +name = "rowan" +version = "0.15.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332" +dependencies = [ + "countme", + "hashbrown", + "memoffset 0.6.5", + "rustc-hash", + "text-size", +] + [[package]] name = "rstest" version = "0.6.4" @@ -1732,9 +1865,15 @@ dependencies = [ [[package]] name = "serde_json" +<<<<<<< HEAD version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7434af0dc1cbd59268aa98b4c22c131c0584d2232f6fb166efb993e2832e896a" +======= +version = "1.0.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "itoa 1.0.5", "ryu", @@ -1774,9 +1913,15 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "smol_str" +<<<<<<< HEAD version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" +======= +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "serde", ] @@ -1843,13 +1988,25 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "termcolor" +<<<<<<< HEAD version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +======= +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "winapi-util", ] +[[package]] +name = "text-size" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" + [[package]] name = "textwrap" version = "0.11.0" @@ -1906,9 +2063,15 @@ dependencies = [ [[package]] name = "toml" +<<<<<<< HEAD version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +======= +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "serde", ] @@ -1972,9 +2135,15 @@ checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" [[package]] name = "unicode-segmentation" +<<<<<<< HEAD version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +======= +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +>>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "unicode-width" @@ -2031,9 +2200,15 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" +<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +======= +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -2041,9 +2216,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" +<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +======= +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "bumpalo", "log", @@ -2056,9 +2237,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" +<<<<<<< HEAD version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +======= +version = "0.4.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2068,9 +2255,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" +<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +======= +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2078,9 +2271,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" +<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +======= +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro2", "quote", @@ -2091,6 +2290,7 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" +<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" @@ -2100,6 +2300,17 @@ name = "wasm-bindgen-test" version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b" +======= +version = "0.2.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" + +[[package]] +name = "wasm-bindgen-test" +version = "0.3.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d2fff962180c3fadf677438054b1db62bee4aa32af26a45388af07d1287e1d" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "console_error_panic_hook", "js-sys", @@ -2111,9 +2322,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" +<<<<<<< HEAD version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9" +======= +version = "0.3.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4683da3dfc016f704c9f82cf401520c4f1cb3ee440f7f52b3d6ac29506a49ca7" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro2", "quote", @@ -2121,9 +2338,15 @@ dependencies = [ [[package]] name = "web-sys" +<<<<<<< HEAD version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +======= +version = "0.3.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +>>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "js-sys", "wasm-bindgen", diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml new file mode 100644 index 0000000000..79174c1cd2 --- /dev/null +++ b/crates/parser2/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "fe-parser2" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Parser lib for Fe." + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +rowan = "0.15.10" \ No newline at end of file diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs new file mode 100644 index 0000000000..c4e67bc5cb --- /dev/null +++ b/crates/parser2/src/lib.rs @@ -0,0 +1 @@ +pub mod syntax_kind; diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs new file mode 100644 index 0000000000..90e3bad2ab --- /dev/null +++ b/crates/parser2/src/syntax_kind.rs @@ -0,0 +1,261 @@ +//! This module contains the definition of the `SyntaxKind`. + +/// The definition of the `SyntaxKind'. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[repr(u16)] +pub enum SyntaxKind { + // Atom kinds. These are leaf nodes. + Error = 0, + Newline, + WhiteSpace, + EOF, + /// `foo` + Ident, + /// `1` + Int, + /// "MyString" + String, + + /// `(` + LParen, + /// `)` + RParen, + /// `{` + LBrace, + /// `}` + RBrace, + /// `[` + LBracket, + /// `]` + RBracket, + /// `:` + Colon, + /// `::` + Colon2, + /// `;` + SemiColon, + /// `.` + Dot, + /// `..` + Dot2, + /// `,` + Comma, + /// `->` + Arrow, + /// `=>` + FatArrow, + /// `_` + UnderScore, + /// `#` + Pound, + /// `// Comment` + Comment, + + /// `+` + Plus, + /// `-` + Minus, + /// `*` + Star, + /// `/` + Slash, + /// `%` + Percent, + /// `&` + Amp, + /// `&&` + Amp2, + /// `|` + Pipe, + /// `||` + Pipe2, + /// `<` + Lt, + /// `<<` + Lt2, + /// `<=` + LtEq, + /// `<<=` + Lt2Eq, + /// `<` + Gt, + /// `<<` + Gt2, + /// `<=` + GtEq, + /// `<<=` + Gt2Eq, + /// `=` + Eq, + /// `==` + Eq2, + /// `!=` + NonEq, + + /// `true' + TrueKw, + /// `false` + FalseKw, + /// `assert` + AssertKw, + /// `break` + BreakKw, + /// `continue` + ContinueKw, + /// `contract` + ContractKw, + /// `fn` + FnKw, + /// `const` + ConstKw, + /// `if` + IfKw, + /// `else` + ElseKw, + /// `match` + MatchKw, + /// `for` + ForKw, + /// `while` + WhileKw, + /// `pub` + PubKw, + /// `return` + ReturnKw, + /// `revert` + RevertKw, + /// `self` + SelfKw, + /// `struct` + StructKw, + /// `enum` + EnumKw, + /// `trait` + TraitKw, + /// `impl` + ImplKw, + /// `type` + TypeKw, + /// `let` + LetKw, + /// `mut` + MutKw, + /// `use` + UseKw, + /// `extern` + ExternKw, + + // Expressions. These are non-leaf nodes. + /// `x + 1` + BinExpr, + /// `!x` + UnExpr, + /// `foo(x, y)` + CallExpr, + /// `foo.bar(x, y)` + MethodCallExpr, + /// `foo.bar` + FieldExpr, + /// `foo[1]` + IndexExpr, + /// `(x ,y)` + TupleExpr, + /// `[x; 1]` + ArrayExpr, + /// `1` + LiteralExpr, + /// `if x { 1 } else { 2 }` + IfExpr, + /// `match x { pat => { .. } }` + MatchExpr, + + // Statements. These are non-leaf nodes. + /// `let x = 1` + LetStmt, + /// `for x in y {..}` + ForStmt, + /// `assert x == 2` + AssertStmt, + /// `return 1` + ReturnStmt, + /// `1` + ExprStmt, + + // Patterns. These are non-leaf nodes. + /// `_` + WildCardPat, + /// `..` + RestPat, + /// `x` + LiteralPat, + /// `(x, y)` + TuplePat, + /// `Enum::Variant` + PathPat, + /// `Enum::Variant(x, y)` + PathTuplePat, + + // MatchArms. + // `pat => { stmtlist }` + MatchArm, + MatchArmList, + + // Items. These are non-leaf nodes. + /// `fn foo(x: i32) -> i32 { .. }` + Fn, + + /// `struct Foo { .. }` + Struct, + /// `x: i32` + FieldDef, + FieldDefList, + + /// `contract Foo { .. }` + ContractDef, + + /// `(i32, u32)` + Tuple, + + /// `enum Foo { .. }` + Enum, + VariantDef, + VariantDefList, + + /// `type Foo = i32` + TypeAlias, + + /// `trait Foo { .. }` + Trait, + + /// `impl Trait for Foo { .. }` + TraitImpl, + + /// `const FOO: i32 = 1` + Const, + + /// `use foo::bar` + Use, + + /// `extern { .. }` + Extern, + + // Paths. These are non-leaf nodes. + /// `Segment1::Segment2` + Path, + /// `Segment1` + PathSegment, + + /// `#attr` + Attr, + AttrList, + ItemList, + StmtList, + + /// `(x, y)` + CallArgList, + /// `` + GenericParamList, + + /// `pub` + Visibility, +} From 1dc14c7cf82617b755958b6aa3b547f2d8cb1061 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 31 Dec 2022 15:16:24 +0100 Subject: [PATCH 006/678] Implement `BackTrackableTokenStream` --- crates/parser2/src/lib.rs | 3 + crates/parser2/src/parser/mod.rs | 106 ++++++++++++++++++++++++++++++ crates/parser2/src/syntax_kind.rs | 32 +++++---- 3 files changed, 124 insertions(+), 17 deletions(-) create mode 100644 crates/parser2/src/parser/mod.rs diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index c4e67bc5cb..237d6929b6 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -1 +1,4 @@ +pub mod parser; pub mod syntax_kind; + +pub use syntax_kind::SyntaxKind; diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs new file mode 100644 index 0000000000..0b03ec3ad2 --- /dev/null +++ b/crates/parser2/src/parser/mod.rs @@ -0,0 +1,106 @@ +use crate::SyntaxKind; + +/// This trait works as an abstraction layer to encapsulate the differences +/// between input sources. There are mainly two types of input sources, +/// 1. text in source file and +/// 2. tokens stream produced by procedural macros. +pub trait TokenStream { + type Token: SyntaxToken; + + /// Returns the next token in the stream. + fn next(&mut self) -> Option; + + /// Returns the next token in the stream without consuming it. + fn peek(&mut self) -> Option<&Self::Token>; +} + +/// This trait represents a single token in the token stream. +pub trait SyntaxToken: Clone { + /// Returns `SyntaxKind` of the token. + fn syntax_kind(&self) -> SyntaxKind; + + /// Returns raw text of the token. + fn text(&self) -> &str; +} + +/// This struct is a thin wrapper around `TokenStream` which allows the parser +/// to backtrack. +pub struct BackTrackableTokenStream { + stream: T, + /// Backtrack buffer which stores tokens that have been already consumed. + bt_buffer: Vec, + bt_points: Vec, + /// Points to the current position of the backtrack buffer. + bt_cursor: Option, +} + +impl BackTrackableTokenStream { + /// Creates a new `BackTrackableTokenStream` from the given `TokenStream`. + pub fn new(stream: T) -> Self { + Self { + stream, + bt_buffer: Vec::new(), + bt_points: Vec::new(), + bt_cursor: None, + } + } + + /// Returns the next token in the stream. + pub fn next(&mut self) -> Option { + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + let token = self.bt_buffer.get(cursor).cloned(); + self.bt_cursor = Some(cursor + 1); + return token; + } + } + + let token = self.stream.next()?; + if self.has_bt_point() { + self.bt_buffer.push(token.clone()); + } + if let Some(cursor) = self.bt_cursor { + self.bt_cursor = Some(cursor + 1); + } + Some(token) + } + + /// Returns the next token in the stream without consuming it. + pub fn peek(&mut self) -> Option<&T::Token> { + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + return self.bt_buffer.get(cursor); + } + } + + self.stream.peek() + } + + /// Set a backtrack point which allows the parser to backtrack to this + /// point. + pub fn set_bt_point(&mut self) { + self.bt_points.push(self.bt_buffer.len()); + } + + /// Remove the last resume points. + pub fn complete(&mut self) { + self.bt_cursor = None; + if !self.has_bt_point() { + self.bt_buffer.clear(); + } + } + + /// Backtracks the stream by one token. + /// + /// # Panics + /// Panics if the `set_bt_point` method has not been called before. + pub fn backtrack(&mut self) { + debug_assert!(self.has_bt_point(), "backtrack without `bt_point`"); + self.bt_cursor = Some(self.bt_points.pop().unwrap()); + } + + /// Returns `true` if the stream has a backtrack point. + pub fn has_bt_point(&mut self) -> bool { + !self.bt_points.is_empty() + } +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 90e3bad2ab..ea4113d1af 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -152,6 +152,12 @@ pub enum SyntaxKind { UnExpr, /// `foo(x, y)` CallExpr, + /// `(x, y)` + CallArgList, + /// `` + CallTypeArgList, + /// `FOO::Bar` + PathExpr, /// `foo.bar(x, y)` MethodCallExpr, /// `foo.bar` @@ -180,6 +186,7 @@ pub enum SyntaxKind { ReturnStmt, /// `1` ExprStmt, + StmtList, // Patterns. These are non-leaf nodes. /// `_` @@ -203,41 +210,39 @@ pub enum SyntaxKind { // Items. These are non-leaf nodes. /// `fn foo(x: i32) -> i32 { .. }` Fn, - /// `struct Foo { .. }` Struct, /// `x: i32` FieldDef, FieldDefList, - /// `contract Foo { .. }` ContractDef, - /// `(i32, u32)` Tuple, - /// `enum Foo { .. }` Enum, VariantDef, VariantDefList, - /// `type Foo = i32` TypeAlias, - + /// `impl Foo { .. }` + Impl, /// `trait Foo { .. }` Trait, - /// `impl Trait for Foo { .. }` TraitImpl, - + /// `T` + /// `T: Trait` + TypeBound, + /// `` + GenericParamList, /// `const FOO: i32 = 1` Const, - /// `use foo::bar` Use, - /// `extern { .. }` Extern, + ItemList, // Paths. These are non-leaf nodes. /// `Segment1::Segment2` @@ -248,13 +253,6 @@ pub enum SyntaxKind { /// `#attr` Attr, AttrList, - ItemList, - StmtList, - - /// `(x, y)` - CallArgList, - /// `` - GenericParamList, /// `pub` Visibility, From 33844504b287fe1e1aa74ebcd228c70196a21e81 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 31 Dec 2022 16:20:35 +0100 Subject: [PATCH 007/678] Add `Lexer` --- Cargo.lock | 1 + crates/parser2/Cargo.toml | 3 +- crates/parser2/src/lexer.rs | 48 +++++++++++++++++ crates/parser2/src/lib.rs | 1 + crates/parser2/src/parser/mod.rs | 4 +- crates/parser2/src/syntax_kind.rs | 90 ++++++++++++++++++++++++++----- 6 files changed, 132 insertions(+), 15 deletions(-) create mode 100644 crates/parser2/src/lexer.rs diff --git a/Cargo.lock b/Cargo.lock index d934ba3c35..548fe4d907 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -790,6 +790,7 @@ dependencies = [ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ + "logos", "rowan", ] diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 79174c1cd2..18d5ec9fa5 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -10,4 +10,5 @@ description = "Parser lib for Fe." # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rowan = "0.15.10" \ No newline at end of file +rowan = "0.15.10" +logos = "0.12.1" \ No newline at end of file diff --git a/crates/parser2/src/lexer.rs b/crates/parser2/src/lexer.rs new file mode 100644 index 0000000000..d9cda71042 --- /dev/null +++ b/crates/parser2/src/lexer.rs @@ -0,0 +1,48 @@ +use crate::{ + parser::{SyntaxToken, TokenStream}, + SyntaxKind, +}; + +pub struct Lexer<'s> { + peek: Option>, + inner: logos::Lexer<'s, SyntaxKind>, +} + +impl<'s> TokenStream for Lexer<'s> { + type Token = Token<'s>; + + fn next(&mut self) -> Option { + if let Some(token) = self.peek.take() { + return Some(token); + } + + let syntax_kind = self.inner.next()?; + Some(Token { + syntax_kind, + text: self.inner.slice(), + }) + } + + fn peek(&mut self) -> Option<&Self::Token> { + if !self.peek.is_some() { + self.peek = self.next(); + } + self.peek.as_ref() + } +} + +#[derive(Clone)] +pub struct Token<'s> { + syntax_kind: SyntaxKind, + text: &'s str, +} + +impl<'s> SyntaxToken for Token<'s> { + fn syntax_kind(&self) -> SyntaxKind { + self.syntax_kind + } + + fn text(&self) -> &str { + self.text + } +} diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 237d6929b6..85bf6a84ea 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -1,3 +1,4 @@ +pub mod lexer; pub mod parser; pub mod syntax_kind; diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 0b03ec3ad2..ed1daf687c 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -2,7 +2,7 @@ use crate::SyntaxKind; /// This trait works as an abstraction layer to encapsulate the differences /// between input sources. There are mainly two types of input sources, -/// 1. text in source file and +/// 1. text in source file /// 2. tokens stream produced by procedural macros. pub trait TokenStream { type Token: SyntaxToken; @@ -90,7 +90,7 @@ impl BackTrackableTokenStream { } } - /// Backtracks the stream by one token. + /// Backtracks to the last backtrack point. /// /// # Panics /// Panics if the `set_bt_point` method has not been called before. diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ea4113d1af..d3d75930dc 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -1,148 +1,214 @@ //! This module contains the definition of the `SyntaxKind`. +use logos::Logos; + /// The definition of the `SyntaxKind'. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Logos)] #[repr(u16)] pub enum SyntaxKind { // Atom kinds. These are leaf nodes. + #[error] Error = 0, + #[regex(r"\n[ \t]*")] Newline, + #[regex(r"[ \s\t]")] WhiteSpace, - EOF, /// `foo` + #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] Ident, - /// `1` + /// `1` or `0b1010` or `0o77` or `0xff` + #[regex("[0-9]+(?:_[0-9]+)*")] + #[regex("0[bB][0-1]+")] + #[regex("0[oO][0-7]+")] + #[regex("0[xX][0-9a-fA-F]+")] Int, /// "MyString" + #[regex(r#""([^"\\]|\\.)*""#)] String, - /// `(` + #[token("(")] LParen, /// `)` + #[token(")")] RParen, /// `{` + #[token("{")] LBrace, /// `}` + #[token("}")] RBrace, /// `[` + #[token("[")] LBracket, /// `]` + #[token("]")] RBracket, /// `:` + #[token(":")] Colon, /// `::` + #[token("::")] Colon2, /// `;` + #[token(";")] SemiColon, /// `.` + #[token(".")] Dot, /// `..` + #[token("..")] Dot2, /// `,` + #[token(",")] Comma, /// `->` + #[token("->")] Arrow, /// `=>` + #[token("=>")] FatArrow, /// `_` + #[token("_")] UnderScore, /// `#` + #[token("#")] Pound, /// `// Comment` + #[regex(r"//[^\n]*")] Comment, /// `+` + #[token("+")] Plus, /// `-` + #[token("-")] Minus, /// `*` + #[token("*")] Star, /// `/` + #[token("/")] Slash, /// `%` + #[token("%")] Percent, /// `&` + #[token("&")] Amp, /// `&&` + #[token("&&")] Amp2, /// `|` + #[token("|")] Pipe, /// `||` + #[token("||")] Pipe2, /// `<` + #[token("<")] Lt, /// `<<` + #[token("<<")] Lt2, /// `<=` + #[token("<=")] LtEq, /// `<<=` + #[token("<<=")] Lt2Eq, - /// `<` + /// `>` + #[token(">")] Gt, - /// `<<` + /// `>>` + #[token(">>")] Gt2, - /// `<=` + /// `>=` + #[token(">=")] GtEq, - /// `<<=` + /// `>>=` + #[token(">>=")] Gt2Eq, /// `=` + #[token("=")] Eq, /// `==` + #[token("==")] Eq2, /// `!=` + #[token("!=")] NonEq, /// `true' + #[token("true")] TrueKw, /// `false` + #[token("false")] FalseKw, - /// `assert` - AssertKw, /// `break` + #[token("break")] BreakKw, /// `continue` + #[token("continue")] ContinueKw, /// `contract` + #[token("contract")] ContractKw, /// `fn` + #[token("fn")] FnKw, /// `const` + #[token("const")] ConstKw, /// `if` + #[token("if")] IfKw, /// `else` + #[token("else")] ElseKw, /// `match` + #[token("match")] MatchKw, /// `for` + #[token("for")] ForKw, /// `while` + #[token("while")] WhileKw, /// `pub` + #[token("pub")] PubKw, /// `return` + #[token("return")] ReturnKw, - /// `revert` - RevertKw, /// `self` + #[token("self")] SelfKw, /// `struct` + #[token("struct")] StructKw, /// `enum` + #[token("enum")] EnumKw, /// `trait` + #[token("trait")] TraitKw, /// `impl` + #[token("impl")] ImplKw, /// `type` + #[token("type")] TypeKw, /// `let` + #[token("let")] LetKw, /// `mut` + #[token("mut")] MutKw, /// `use` + #[token("use")] UseKw, /// `extern` + #[token("extern")] ExternKw, // Expressions. These are non-leaf nodes. From 10316faa742fdcab23744238a778d1f49ec14c60 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 31 Dec 2022 22:11:22 +0100 Subject: [PATCH 008/678] Define Parser --- Cargo.lock | 219 +++----------------- crates/parser2/Cargo.toml | 3 +- crates/parser2/src/lexer.rs | 4 +- crates/parser2/src/lib.rs | 13 ++ crates/parser2/src/parser/mod.rs | 239 +++++++++++++++------- crates/parser2/src/parser/token_stream.rs | 107 ++++++++++ crates/parser2/src/syntax_kind.rs | 52 +++-- crates/parser2/src/syntax_node.rs | 18 ++ 8 files changed, 370 insertions(+), 285 deletions(-) create mode 100644 crates/parser2/src/parser/token_stream.rs create mode 100644 crates/parser2/src/syntax_node.rs diff --git a/Cargo.lock b/Cargo.lock index 548fe4d907..dc1b0446a2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -102,15 +102,9 @@ dependencies = [ [[package]] name = "bumpalo" -<<<<<<< HEAD version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" -======= -version = "3.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" ->>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "byte-slice-cast" @@ -126,7 +120,6 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -<<<<<<< HEAD version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" @@ -136,17 +129,6 @@ name = "camino" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" -======= -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" - -[[package]] -name = "camino" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e" ->>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "cast" @@ -156,15 +138,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -<<<<<<< HEAD version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" -======= -version = "1.0.78" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" ->>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "cfg-if" @@ -267,11 +243,6 @@ dependencies = [ ] [[package]] -<<<<<<< HEAD -name = "cpufeatures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -======= name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -281,7 +252,6 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" name = "cpufeatures" version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" ->>>>>>> b04a24a3 (Define `SyntaxKind`) checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", @@ -353,11 +323,7 @@ dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", -<<<<<<< HEAD - "memoffset", -======= "memoffset 0.7.1", ->>>>>>> b04a24a3 (Define `SyntaxKind`) "scopeguard", ] @@ -459,7 +425,6 @@ checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" [[package]] name = "either" -<<<<<<< HEAD version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" @@ -469,17 +434,6 @@ name = "ethabi" version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -======= -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" - -[[package]] -name = "ethabi" -version = "17.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4966fba78396ff92db3b817ee71143eccd98acf0f876b8d600e585a670c5d1b" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "ethereum-types", "hex", @@ -790,6 +744,7 @@ dependencies = [ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ + "fxhash", "logos", "rowan", ] @@ -1043,15 +998,9 @@ dependencies = [ [[package]] name = "insta" -<<<<<<< HEAD version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff" -======= -version = "1.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5686bd8e9239eabe90bb30a0c341bffd6fdc177fb556708f2cb792bf00352d" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "lazy_static", "linked-hash-map", @@ -1091,15 +1040,9 @@ checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" [[package]] name = "js-sys" -<<<<<<< HEAD version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" -======= -version = "0.3.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "wasm-bindgen", ] @@ -1125,6 +1068,12 @@ version = "0.2.139" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +[[package]] +name = "libm" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" + [[package]] name = "linked-hash-map" version = "0.5.6" @@ -1179,6 +1128,15 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + [[package]] name = "memoffset" version = "0.7.1" @@ -1189,21 +1147,12 @@ dependencies = [ ] [[package]] -<<<<<<< HEAD name = "nom8" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" dependencies = [ "memchr", -======= -name = "memoffset" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" -dependencies = [ - "autocfg", ->>>>>>> b04a24a3 (Define `SyntaxKind`) ] [[package]] @@ -1234,6 +1183,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -1275,15 +1225,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -<<<<<<< HEAD version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" -======= -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "366e44391a8af4cfd6002ef6ba072bae071a96aafca98d7d448a34c5dca38b6a" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "arrayvec", "bitvec", @@ -1295,15 +1239,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -<<<<<<< HEAD version = "3.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" -======= -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9299338969a3d2f491d65f140b00ddec470858402f888af98e8642fb5e8965cd" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro-crate", "proc-macro2", @@ -1409,22 +1347,12 @@ dependencies = [ [[package]] name = "proc-macro-crate" -<<<<<<< HEAD version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" dependencies = [ "once_cell", "toml_edit", -======= -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" -dependencies = [ - "once_cell", - "thiserror", - "toml", ->>>>>>> b04a24a3 (Define `SyntaxKind`) ] [[package]] @@ -1453,24 +1381,18 @@ dependencies = [ [[package]] name = "proc-macro2" -<<<<<<< HEAD version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" -======= -version = "1.0.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" +checksum = "29f1b898011ce9595050a68e60f90bad083ff2987a695a42357134c8381fba70" dependencies = [ "bitflags", "byteorder", @@ -1481,6 +1403,7 @@ dependencies = [ "rand_chacha 0.3.1", "rand_xorshift", "regex-syntax", + "unarray", ] [[package]] @@ -1596,15 +1519,9 @@ dependencies = [ [[package]] name = "rayon-core" -<<<<<<< HEAD version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" -======= -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -1620,15 +1537,9 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "regex" -<<<<<<< HEAD version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" -======= -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "aho-corasick", "memchr", @@ -1866,15 +1777,9 @@ dependencies = [ [[package]] name = "serde_json" -<<<<<<< HEAD version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7434af0dc1cbd59268aa98b4c22c131c0584d2232f6fb166efb993e2832e896a" -======= -version = "1.0.91" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "itoa 1.0.5", "ryu", @@ -1914,15 +1819,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "smol_str" -<<<<<<< HEAD version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" -======= -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "serde", ] @@ -1989,15 +1888,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "termcolor" -<<<<<<< HEAD version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" -======= -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "winapi-util", ] @@ -2064,15 +1957,9 @@ dependencies = [ [[package]] name = "toml" -<<<<<<< HEAD version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -======= -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "serde", ] @@ -2122,6 +2009,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unescape" version = "0.1.0" @@ -2136,15 +2029,9 @@ checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" [[package]] name = "unicode-segmentation" -<<<<<<< HEAD version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" -======= -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" ->>>>>>> b04a24a3 (Define `SyntaxKind`) [[package]] name = "unicode-width" @@ -2201,15 +2088,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" -======= -version = "0.2.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -2217,15 +2098,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" -======= -version = "0.2.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "bumpalo", "log", @@ -2238,15 +2113,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -<<<<<<< HEAD version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" -======= -version = "0.4.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2256,15 +2125,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" -======= -version = "0.2.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2272,15 +2135,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" -======= -version = "0.2.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro2", "quote", @@ -2291,7 +2148,6 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -<<<<<<< HEAD version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" @@ -2301,17 +2157,6 @@ name = "wasm-bindgen-test" version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b" -======= -version = "0.2.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" - -[[package]] -name = "wasm-bindgen-test" -version = "0.3.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d2fff962180c3fadf677438054b1db62bee4aa32af26a45388af07d1287e1d" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "console_error_panic_hook", "js-sys", @@ -2323,15 +2168,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -<<<<<<< HEAD version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9" -======= -version = "0.3.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4683da3dfc016f704c9f82cf401520c4f1cb3ee440f7f52b3d6ac29506a49ca7" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "proc-macro2", "quote", @@ -2339,15 +2178,9 @@ dependencies = [ [[package]] name = "web-sys" -<<<<<<< HEAD version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" -======= -version = "0.3.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" ->>>>>>> b04a24a3 (Define `SyntaxKind`) dependencies = [ "js-sys", "wasm-bindgen", diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 18d5ec9fa5..646a924060 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -11,4 +11,5 @@ description = "Parser lib for Fe." [dependencies] rowan = "0.15.10" -logos = "0.12.1" \ No newline at end of file +logos = "0.12.1" +fxhash = "0.2.1" \ No newline at end of file diff --git a/crates/parser2/src/lexer.rs b/crates/parser2/src/lexer.rs index d9cda71042..815303a448 100644 --- a/crates/parser2/src/lexer.rs +++ b/crates/parser2/src/lexer.rs @@ -1,5 +1,5 @@ use crate::{ - parser::{SyntaxToken, TokenStream}, + parser::token_stream::{SyntaxToken, TokenStream}, SyntaxKind, }; @@ -24,7 +24,7 @@ impl<'s> TokenStream for Lexer<'s> { } fn peek(&mut self) -> Option<&Self::Token> { - if !self.peek.is_some() { + if self.peek.is_none() { self.peek = self.next(); } self.peek.as_ref() diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 85bf6a84ea..40108c6df3 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -1,5 +1,18 @@ pub mod lexer; pub mod parser; pub mod syntax_kind; +pub mod syntax_node; pub use syntax_kind::SyntaxKind; + +pub type TextRange = rowan::TextRange; + +/// An parse error which is accumulated in the [`parser::Parser`] while parsing. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParseError { + /// An error message. + pub msg: String, + + /// A range of the error. + pub range: TextRange, +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index ed1daf687c..9a2689a3c8 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -1,106 +1,193 @@ -use crate::SyntaxKind; +use fxhash::FxHashSet; -/// This trait works as an abstraction layer to encapsulate the differences -/// between input sources. There are mainly two types of input sources, -/// 1. text in source file -/// 2. tokens stream produced by procedural macros. -pub trait TokenStream { - type Token: SyntaxToken; +use crate::{ParseError, SyntaxKind, TextRange}; - /// Returns the next token in the stream. - fn next(&mut self) -> Option; +use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; - /// Returns the next token in the stream without consuming it. - fn peek(&mut self) -> Option<&Self::Token>; -} +pub mod token_stream; -/// This trait represents a single token in the token stream. -pub trait SyntaxToken: Clone { - /// Returns `SyntaxKind` of the token. - fn syntax_kind(&self) -> SyntaxKind; +/// Parser to build a rowan syntax tree. +pub struct Parser { + /// Token stream to parse. + stream: BackTrackableTokenStream, - /// Returns raw text of the token. - fn text(&self) -> &str; -} + builder: rowan::GreenNodeBuilder<'static>, + scopes: Vec>, + errors: Vec, -/// This struct is a thin wrapper around `TokenStream` which allows the parser -/// to backtrack. -pub struct BackTrackableTokenStream { - stream: T, - /// Backtrack buffer which stores tokens that have been already consumed. - bt_buffer: Vec, - bt_points: Vec, - /// Points to the current position of the backtrack buffer. - bt_cursor: Option, + /// The checkpoint where the scope/branch is wrapped up later by another + /// scope/branch when the `wrap_scope_with` method. + check_point: Option<(rowan::Checkpoint, usize)>, + + current_pos: rowan::TextSize, } -impl BackTrackableTokenStream { - /// Creates a new `BackTrackableTokenStream` from the given `TokenStream`. - pub fn new(stream: T) -> Self { - Self { - stream, - bt_buffer: Vec::new(), - bt_points: Vec::new(), - bt_cursor: None, +impl Parser { + /// Returns the current token of the parser. + pub fn current_token(&mut self) -> Option<&S::Token> { + self.stream.peek() + } + + /// Enters the scope and set the `scope` to the current scope. + /// If `is_checkpoint` is true, the current scope/branch is wrapped up by + /// another scope/branch later when the [`wrap_scope_with`] method is + /// called. + pub fn enter(&mut self, scope: Box, is_checkpoint: bool) { + if is_checkpoint { + self.check_point = Some((self.builder.checkpoint(), self.scopes.len())); } + + self.builder.start_node(scope.syntax_kind().into()); + self.scopes.push(scope); + } + + /// Enters the errors scope and add the `msg` to the error list. + pub fn enter_with_error(&mut self, msg: &str) { + let start = self.current_pos; + let end = if let Some(current_token) = self.current_token() { + start + current_token.text_size() + } else { + start + }; + let range = TextRange::new(start, end); + + self.errors.push(ParseError { + range, + msg: msg.to_string(), + }); + self.scopes.push(Box::new(ErrorScope())); } - /// Returns the next token in the stream. - pub fn next(&mut self) -> Option { - if let Some(cursor) = self.bt_cursor { - if cursor < self.bt_buffer.len() { - let token = self.bt_buffer.get(cursor).cloned(); - self.bt_cursor = Some(cursor + 1); - return token; + /// Leaves the current scope/branch. + pub fn leave(&mut self) { + self.scopes.pop(); + self.builder.finish_node(); + } + + /// Wrap up the marked scope/branch with another scope/branch and set the + /// `scope` to the current scope. + pub fn wrap_scope_with(&mut self, scope: Box) { + debug_assert!(self.check_point.is_some(), "No checkpoint"); + let check_point = self.check_point.take().unwrap(); + let syntax_kind = scope.syntax_kind(); + + self.scopes.truncate(check_point.1); + self.scopes.push(scope); + + self.builder + .start_node_at(check_point.0, syntax_kind.into()); + } + + /// Bumps the current token and adds it to the current branch. + pub fn bump(&mut self) { + let tok = self.stream.next().unwrap(); + self.current_pos += rowan::TextSize::of(tok.text()); + self.builder.token(tok.syntax_kind().into(), tok.text()); + } + + /// Bumps consecutive trivia tokens. + pub fn bump_trivias(&mut self) { + while let Some(tok) = self.current_token() { + let kind = tok.syntax_kind(); + if kind.is_trivia() { + self.bump(); + } else { + break; } } + } - let token = self.stream.next()?; - if self.has_bt_point() { - self.bt_buffer.push(token.clone()); - } - if let Some(cursor) = self.bt_cursor { - self.bt_cursor = Some(cursor + 1); + /// Bump consecutive newlines. + pub fn bump_newlines(&mut self) { + while let Some(tok) = self.current_token() { + if tok.syntax_kind() == SyntaxKind::Newline { + self.bump(); + } else { + break; + } } - Some(token) } - /// Returns the next token in the stream without consuming it. - pub fn peek(&mut self) -> Option<&T::Token> { - if let Some(cursor) = self.bt_cursor { - if cursor < self.bt_buffer.len() { - return self.bt_buffer.get(cursor); + /// Proceeds the parser to the recovery token of the current scope. Then + /// leave the current branch/scope. + pub fn recovery(&mut self) { + let mut scope_index = self.scopes.len() - 1; + // Finds the nearest scope that has its own recovery set. + loop { + if self.scopes[scope_index].recovery_method() != &RecoveryMethod::Inheritance + || scope_index == 0 + { + break; + } else { + scope_index -= 1; } } - self.stream.peek() + while let Some(tok) = self.stream.peek() { + let syntax_kind = tok.syntax_kind(); + if self.scopes[scope_index] + .recovery_method() + .contains(syntax_kind) + { + break; + } else { + self.bump(); + } + } + + self.leave(); + } +} + +/// The current scope of parsing. +pub trait ParsingScope { + /// Returns the recovery method of the current scope. + fn recovery_method(&self) -> &RecoveryMethod; + + fn syntax_kind(&self) -> SyntaxKind; +} + +pub struct ErrorScope(); + +impl ParsingScope for ErrorScope { + fn recovery_method(&self) -> &RecoveryMethod { + &RecoveryMethod::Inheritance } - /// Set a backtrack point which allows the parser to backtrack to this - /// point. - pub fn set_bt_point(&mut self) { - self.bt_points.push(self.bt_buffer.len()); + fn syntax_kind(&self) -> SyntaxKind { + SyntaxKind::Error } +} + +/// Represents the recovery method of the current scope. +#[derive(PartialEq, Eq)] +pub enum RecoveryMethod { + /// Uses the recovery method of the parent scope. + Inheritance, - /// Remove the last resume points. - pub fn complete(&mut self) { - self.bt_cursor = None; - if !self.has_bt_point() { - self.bt_buffer.clear(); + /// The scope has its own recovery set. + RecoverySet(FxHashSet), +} + +impl RecoveryMethod { + /// Returns `true` if the recovery set contains the given syntax kind. + fn contains(&self, syntax_kind: SyntaxKind) -> bool { + match self { + RecoveryMethod::Inheritance => false, + RecoveryMethod::RecoverySet(set) => set.contains(&syntax_kind), } } +} - /// Backtracks to the last backtrack point. - /// - /// # Panics - /// Panics if the `set_bt_point` method has not been called before. - pub fn backtrack(&mut self) { - debug_assert!(self.has_bt_point(), "backtrack without `bt_point`"); - self.bt_cursor = Some(self.bt_points.pop().unwrap()); - } +trait TextSize { + fn text_size(&self) -> rowan::TextSize; +} - /// Returns `true` if the stream has a backtrack point. - pub fn has_bt_point(&mut self) -> bool { - !self.bt_points.is_empty() +impl TextSize for T +where + T: SyntaxToken, +{ + fn text_size(&self) -> rowan::TextSize { + rowan::TextSize::of(self.text()) } } diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs new file mode 100644 index 0000000000..b053143155 --- /dev/null +++ b/crates/parser2/src/parser/token_stream.rs @@ -0,0 +1,107 @@ +use crate::SyntaxKind; + +/// This trait works as an abstraction layer to encapsulate the differences +/// between input sources. There are mainly two types of input sources, +/// 1. text in source file +/// 2. tokens stream produced by procedural macros. +pub trait TokenStream { + type Token: SyntaxToken; + + /// Returns the next token in the stream. + fn next(&mut self) -> Option; + + /// Returns the next token in the stream without consuming it. + fn peek(&mut self) -> Option<&Self::Token>; +} + +/// This trait represents a single token in the token stream. +pub trait SyntaxToken: Clone { + /// Returns `SyntaxKind` of the token. + fn syntax_kind(&self) -> SyntaxKind; + + /// Returns raw text of the token. + fn text(&self) -> &str; +} + +/// This struct is a thin wrapper around `TokenStream` which allows the parser +/// to backtrack. +pub struct BackTrackableTokenStream { + stream: T, + /// Backtrack buffer which stores tokens that have been already consumed. + bt_buffer: Vec, + bt_points: Vec, + /// Points to the current position of the backtrack buffer. + bt_cursor: Option, +} + +impl BackTrackableTokenStream { + /// Creates a new `BackTrackableTokenStream` from the given `TokenStream`. + pub fn new(stream: T) -> Self { + Self { + stream, + bt_buffer: Vec::new(), + bt_points: Vec::new(), + bt_cursor: None, + } + } + + /// Returns the next token in the stream. + #[allow(clippy::should_implement_trait)] + pub fn next(&mut self) -> Option { + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + let token = self.bt_buffer.get(cursor).cloned(); + self.bt_cursor = Some(cursor + 1); + return token; + } + } + + let token = self.stream.next()?; + if self.has_bt_point() { + self.bt_buffer.push(token.clone()); + } + if let Some(cursor) = self.bt_cursor { + self.bt_cursor = Some(cursor + 1); + } + Some(token) + } + + /// Returns the next token in the stream without consuming it. + pub fn peek(&mut self) -> Option<&T::Token> { + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + return self.bt_buffer.get(cursor); + } + } + + self.stream.peek() + } + + /// Set a backtrack point which allows the parser to backtrack to this + /// point. + pub fn set_bt_point(&mut self) { + self.bt_points.push(self.bt_buffer.len()); + } + + /// Remove the last resume points. + pub fn complete(&mut self) { + self.bt_cursor = None; + if !self.has_bt_point() { + self.bt_buffer.clear(); + } + } + + /// Backtracks to the last backtrack point. + /// + /// # Panics + /// Panics if the `set_bt_point` method has not been called before. + pub fn backtrack(&mut self) { + debug_assert!(self.has_bt_point(), "backtrack without `bt_point`"); + self.bt_cursor = Some(self.bt_points.pop().unwrap()); + } + + /// Returns `true` if the stream has a backtrack point. + pub fn has_bt_point(&mut self) -> bool { + !self.bt_points.is_empty() + } +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index d3d75930dc..936b54d0c6 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -8,7 +8,7 @@ use logos::Logos; pub enum SyntaxKind { // Atom kinds. These are leaf nodes. #[error] - Error = 0, + InvalidToken = 0, #[regex(r"\n[ \t]*")] Newline, #[regex(r"[ \s\t]")] @@ -76,6 +76,9 @@ pub enum SyntaxKind { /// `// Comment` #[regex(r"//[^\n]*")] Comment, + /// `/// DocComment` + #[regex(r"///[^\n]*")] + DocComment, /// `+` #[token("+")] @@ -278,17 +281,10 @@ pub enum SyntaxKind { Fn, /// `struct Foo { .. }` Struct, - /// `x: i32` - FieldDef, - FieldDefList, /// `contract Foo { .. }` ContractDef, - /// `(i32, u32)` - Tuple, /// `enum Foo { .. }` Enum, - VariantDef, - VariantDefList, /// `type Foo = i32` TypeAlias, /// `impl Foo { .. }` @@ -297,11 +293,6 @@ pub enum SyntaxKind { Trait, /// `impl Trait for Foo { .. }` TraitImpl, - /// `T` - /// `T: Trait` - TypeBound, - /// `` - GenericParamList, /// `const FOO: i32 = 1` Const, /// `use foo::bar` @@ -322,4 +313,39 @@ pub enum SyntaxKind { /// `pub` Visibility, + + /// `x: i32` + FieldDef, + FieldDefList, + + /// `(i32, u32)` + Tuple, + + VariantDef, + VariantDefList, + + /// `T` + /// `T: Trait` + TypeBound, + /// `` + GenericParamList, + + /// Root node of the input source. + Root, + + /// Represents an error branch. + Error, +} + +impl SyntaxKind { + /// Returns `true` if this is a trivia token. + pub fn is_trivia(self) -> bool { + matches!(self, SyntaxKind::WhiteSpace | SyntaxKind::Comment) + } +} + +impl From for rowan::SyntaxKind { + fn from(kind: SyntaxKind) -> Self { + Self(kind as u16) + } } diff --git a/crates/parser2/src/syntax_node.rs b/crates/parser2/src/syntax_node.rs new file mode 100644 index 0000000000..a9c9a8d3c7 --- /dev/null +++ b/crates/parser2/src/syntax_node.rs @@ -0,0 +1,18 @@ +use crate::SyntaxKind; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum FeLang {} + +impl rowan::Language for FeLang { + type Kind = SyntaxKind; + + fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { + unsafe { std::mem::transmute::(raw.0) } + } + + fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { + kind.into() + } +} + +pub type SyntaxNode = rowan::SyntaxNode; From ea7c40daec60b2dc90f82d4448e206cf4859435a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 5 Jan 2023 14:26:25 +0100 Subject: [PATCH 009/678] Define macros to help parsing scope definition --- Cargo.lock | 1 + crates/parser2/Cargo.toml | 3 +- crates/parser2/src/parser/mod.rs | 55 +++++++++++++++++++++++++++++++ crates/parser2/src/syntax_kind.rs | 3 ++ 4 files changed, 61 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index dc1b0446a2..fd847604e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -745,6 +745,7 @@ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ "fxhash", + "lazy_static", "logos", "rowan", ] diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 646a924060..25613ea814 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -12,4 +12,5 @@ description = "Parser lib for Fe." [dependencies] rowan = "0.15.10" logos = "0.12.1" -fxhash = "0.2.1" \ No newline at end of file +fxhash = "0.2.1" +lazy_static = "1.4.0" \ No newline at end of file diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 9a2689a3c8..e8a1fb1263 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -6,6 +6,8 @@ use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; pub mod token_stream; +mod item; + /// Parser to build a rowan syntax tree. pub struct Parser { /// Token stream to parse. @@ -191,3 +193,56 @@ where rowan::TextSize::of(self.text()) } } + +macro_rules! define_scope { + ($scope_name: ident, $kind: path ,Inheritance) => { + #[derive(Default)] + pub(crate) struct $scope_name {} + + impl crate::parser::ParsingScope for $scope_name { + fn recovery_method(&self) -> &crate::parser::RecoveryMethod { + lazy_static::lazy_static! { + pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { + crate::parser::RecoveryMethod::Inheritance + }; + } + + &RECOVERY_METHOD + } + + fn syntax_kind(&self) -> crate::SyntaxKind { + use crate::SyntaxKind::*; + $kind + } + } + }; + + ($scope_name: ident, $kind: path, RecoverySet($($recoveries: path), *)) => { + #[derive(Default)] + pub(crate) struct $scope_name {} + + impl crate::parser::ParsingScope for $scope_name { + fn recovery_method(&self) -> &crate::parser::RecoveryMethod { + lazy_static::lazy_static! { + pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { + use crate::SyntaxKind::*; + let set: fxhash::FxHashSet = vec![ + $($recoveries), * + ].into_iter().map(|kind| kind.into()).collect(); + + crate::parser::RecoveryMethod::RecoverySet(set) + }; + } + + &RECOVERY_METHOD + } + + fn syntax_kind(&self) -> crate::SyntaxKind { + use crate::SyntaxKind::*; + $kind + } + } + }; +} + +use define_scope; diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 936b54d0c6..078ef057cf 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -330,6 +330,9 @@ pub enum SyntaxKind { /// `` GenericParamList, + /// Modules inside a file. + Module, + /// Root node of the input source. Root, From 382dfd9c5b4de45ea3e026887750639a5932fafa Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 5 Jan 2023 14:37:50 +0100 Subject: [PATCH 010/678] Implement parser for `ItemList` --- crates/parser2/src/parser/attr.rs | 15 +++ crates/parser2/src/parser/func.rs | 13 ++ crates/parser2/src/parser/item.rs | 210 ++++++++++++++++++++++++++++++ crates/parser2/src/parser/mod.rs | 138 +++++++++++--------- crates/parser2/src/syntax_kind.rs | 6 +- 5 files changed, 315 insertions(+), 67 deletions(-) create mode 100644 crates/parser2/src/parser/attr.rs create mode 100644 crates/parser2/src/parser/func.rs create mode 100644 crates/parser2/src/parser/item.rs diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs new file mode 100644 index 0000000000..f6bed7300d --- /dev/null +++ b/crates/parser2/src/parser/attr.rs @@ -0,0 +1,15 @@ +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + AttrListScope, + AttrList, + RecoverySet( + Newline + ) +} + +impl super::Parse for AttrListScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs new file mode 100644 index 0000000000..24c7433722 --- /dev/null +++ b/crates/parser2/src/parser/func.rs @@ -0,0 +1,13 @@ +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + FnScope, + Fn, + Inheritance +} + +impl super::Parse for FnScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs new file mode 100644 index 0000000000..c45d84be31 --- /dev/null +++ b/crates/parser2/src/parser/item.rs @@ -0,0 +1,210 @@ +use std::cell::RefCell; + +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + ItemListScope, + ItemList, + RecoverySet( + FnKw, + StructKw, + EnumKw, + TraitKw, + ImplKw, + UseKw, + ConstKw, + ExternKw, + TypeKw, + PubKw, + UnsafeKw, + DocComment, + Pound + ) +} + +impl super::Parse for ItemListScope { + fn parse(&mut self, parser: &mut Parser) { + use crate::SyntaxKind::*; + + while parser.current_kind().is_some() { + let mut checkpoint = None; + + parser.bump_trivias(true); + if let Some(DocComment) | Some(Pound) = parser.current_kind() { + checkpoint.get_or_insert_with(|| parser.checkpoint()); + parser.parse(super::attr::AttrListScope::default(), None); + } + + parser.bump_trivias(true); + let modifier = match parser.current_kind() { + Some(PubKw) => { + checkpoint.get_or_insert_with(|| parser.checkpoint()); + parser.bump(); + + if parser.current_kind() == Some(UnsafeKw) { + parser.bump_trivias(true); + Modifier::PubAndUnsafe + } else { + Modifier::Pub + } + } + + Some(UnsafeKw) => { + checkpoint.get_or_insert_with(|| parser.checkpoint()); + parser.bump(); + Modifier::Unsafe + } + + Some(_) => Modifier::None, + + None => { + parser.error_and_recover("expected item", checkpoint); + continue; + } + }; + + if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { + parser.error("expected `fn` after `unsafe` keyword"); + } else if modifier.is_pub() && parser.current_kind() == Some(ExternKw) { + parser.error("`pub` can't be used for `extern` block"); + } + + match parser.current_kind() { + Some(FnKw) => parser.parse(super::func::FnScope::default(), checkpoint), + Some(StructKw) => parser.parse(StructScope::default(), checkpoint), + Some(EnumKw) => parser.parse(EnumScope::default(), checkpoint), + Some(TraitKw) => parser.parse(TraitScope::default(), checkpoint), + Some(ImplKw) => parser.parse(ImplScope::default(), checkpoint), + Some(UseKw) => parser.parse(UseScope::default(), checkpoint), + Some(ConstKw) => parser.parse(ConstScope::default(), checkpoint), + Some(ExternKw) => parser.parse(ExternScope::default(), checkpoint), + Some(TypeKw) => parser.parse(TypeAliasScope::default(), checkpoint), + _ => parser.error_and_recover("expected item", checkpoint), + } + } + } +} + +enum Modifier { + None, + Pub, + Unsafe, + PubAndUnsafe, +} + +impl Modifier { + fn is_pub(&self) -> bool { + matches!(self, Modifier::Pub | Modifier::PubAndUnsafe) + } + + fn is_unsafe(&self) -> bool { + matches!(self, Modifier::Unsafe | Modifier::PubAndUnsafe) + } +} + +define_scope! { + StructScope, + Struct, + Inheritance +} +impl super::Parse for StructScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + EnumScope, + Enum, + Inheritance +} +impl super::Parse for EnumScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + TraitScope, + Trait, + Inheritance +} +impl super::Parse for TraitScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +// We can't use `define_scope` here since the `syntax_kind` of the scope can be +// determined after parsing. +#[derive(Debug, Clone)] +struct ImplScope { + syntax_kind: RefCell, +} +impl Default for ImplScope { + fn default() -> Self { + Self { + syntax_kind: SyntaxKind::Impl.into(), + } + } +} +impl super::ParsingScope for ImplScope { + fn recovery_method(&self) -> &super::RecoveryMethod { + &super::RecoveryMethod::Inheritance + } + + fn syntax_kind(&self) -> SyntaxKind { + *self.syntax_kind.borrow() + } +} +impl super::Parse for ImplScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + UseScope, + Use, + Inheritance +} +impl super::Parse for UseScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + ConstScope, + Const, + Inheritance +} +impl super::Parse for ConstScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + ExternScope, + Extern, + Inheritance +} +impl super::Parse for ExternScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} + +define_scope! { + TypeAliasScope, + TypeAlias, + Inheritance +} +impl super::Parse for TypeAliasScope { + fn parse(&mut self, parser: &mut Parser) { + todo!() + } +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index e8a1fb1263..5f979487cd 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -6,6 +6,8 @@ use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; pub mod token_stream; +mod attr; +mod func; mod item; /// Parser to build a rowan syntax tree. @@ -17,10 +19,6 @@ pub struct Parser { scopes: Vec>, errors: Vec, - /// The checkpoint where the scope/branch is wrapped up later by another - /// scope/branch when the `wrap_scope_with` method. - check_point: Option<(rowan::Checkpoint, usize)>, - current_pos: rowan::TextSize, } @@ -30,68 +28,54 @@ impl Parser { self.stream.peek() } - /// Enters the scope and set the `scope` to the current scope. - /// If `is_checkpoint` is true, the current scope/branch is wrapped up by - /// another scope/branch later when the [`wrap_scope_with`] method is - /// called. - pub fn enter(&mut self, scope: Box, is_checkpoint: bool) { - if is_checkpoint { - self.check_point = Some((self.builder.checkpoint(), self.scopes.len())); - } - - self.builder.start_node(scope.syntax_kind().into()); - self.scopes.push(scope); + /// Returns the current token kind of the parser. + pub fn current_kind(&mut self) -> Option { + self.current_token().map(|token| token.syntax_kind()) } - /// Enters the errors scope and add the `msg` to the error list. - pub fn enter_with_error(&mut self, msg: &str) { - let start = self.current_pos; - let end = if let Some(current_token) = self.current_token() { - start + current_token.text_size() - } else { - start - }; - let range = TextRange::new(start, end); - - self.errors.push(ParseError { - range, - msg: msg.to_string(), - }); - self.scopes.push(Box::new(ErrorScope())); + pub fn parse(&mut self, mut scope: T, checkpoint: Option) + where + T: Parse + 'static, + { + let checkpoint = self.enter(scope.clone(), checkpoint); + scope.parse(self); + self.leave(checkpoint); } - /// Leaves the current scope/branch. - pub fn leave(&mut self) { - self.scopes.pop(); - self.builder.finish_node(); + /// Mark the current branch as a checkpoint. + /// The checked branch is wrapped up later when [`parse]` is + /// called with the `checkpoint`. + pub fn checkpoint(&mut self) -> rowan::Checkpoint { + self.builder.checkpoint() } - /// Wrap up the marked scope/branch with another scope/branch and set the - /// `scope` to the current scope. - pub fn wrap_scope_with(&mut self, scope: Box) { - debug_assert!(self.check_point.is_some(), "No checkpoint"); - let check_point = self.check_point.take().unwrap(); - let syntax_kind = scope.syntax_kind(); - - self.scopes.truncate(check_point.1); - self.scopes.push(scope); - - self.builder - .start_node_at(check_point.0, syntax_kind.into()); + pub fn error_and_recover(&mut self, msg: &str, checkpoint: Option) { + let err_scope = self.error(msg); + let checkpoint = self.enter(err_scope, checkpoint); + self.recover(); + self.leave(checkpoint); } - /// Bumps the current token and adds it to the current branch. + /// Bumps the current token and trailing trivias and adds them to the + /// current branch. pub fn bump(&mut self) { + self.bump_raw(); + self.bump_trivias(false); + } + + /// Bumps the current token adds it to the current branch. + pub fn bump_raw(&mut self) { let tok = self.stream.next().unwrap(); self.current_pos += rowan::TextSize::of(tok.text()); self.builder.token(tok.syntax_kind().into(), tok.text()); } /// Bumps consecutive trivia tokens. - pub fn bump_trivias(&mut self) { + /// If `bump_newlines` is true, newlines are also bumped. + pub fn bump_trivias(&mut self, skip_newlines: bool) { while let Some(tok) = self.current_token() { let kind = tok.syntax_kind(); - if kind.is_trivia() { + if kind.is_trivia() || (skip_newlines && kind == SyntaxKind::Newline) { self.bump(); } else { break; @@ -110,9 +94,8 @@ impl Parser { } } - /// Proceeds the parser to the recovery token of the current scope. Then - /// leave the current branch/scope. - pub fn recovery(&mut self) { + /// Proceeds the parser to the recovery token of the current scope. + pub fn recover(&mut self) { let mut scope_index = self.scopes.len() - 1; // Finds the nearest scope that has its own recovery set. loop { @@ -136,8 +119,37 @@ impl Parser { self.bump(); } } + } - self.leave(); + /// Add the `msg` to the error list. + fn error(&mut self, msg: &str) -> ErrorScope { + let start = self.current_pos; + let end = if let Some(current_token) = self.current_token() { + start + current_token.text_size() + } else { + start + }; + let range = TextRange::new(start, end); + + self.errors.push(ParseError { + range, + msg: msg.to_string(), + }); + ErrorScope::default() + } + + fn enter(&mut self, scope: T, checkpoint: Option) -> rowan::Checkpoint + where + T: ParsingScope + 'static, + { + self.scopes.push(Box::new(scope)); + checkpoint.unwrap_or_else(|| self.checkpoint()) + } + + fn leave(&mut self, checkpoint: rowan::Checkpoint) { + let scope = self.scopes.pop().unwrap(); + self.builder + .start_node_at(checkpoint, scope.syntax_kind().into()); } } @@ -149,16 +161,14 @@ pub trait ParsingScope { fn syntax_kind(&self) -> SyntaxKind; } -pub struct ErrorScope(); - -impl ParsingScope for ErrorScope { - fn recovery_method(&self) -> &RecoveryMethod { - &RecoveryMethod::Inheritance - } +pub trait Parse: ParsingScope + Clone { + fn parse(&mut self, parser: &mut Parser); +} - fn syntax_kind(&self) -> SyntaxKind { - SyntaxKind::Error - } +define_scope! { + ErrorScope, + Error, + Inheritance } /// Represents the recovery method of the current scope. @@ -196,7 +206,7 @@ where macro_rules! define_scope { ($scope_name: ident, $kind: path ,Inheritance) => { - #[derive(Default)] + #[derive(Default,Debug, Clone, Copy)] pub(crate) struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { @@ -218,7 +228,7 @@ macro_rules! define_scope { }; ($scope_name: ident, $kind: path, RecoverySet($($recoveries: path), *)) => { - #[derive(Default)] + #[derive(Default, Debug, Clone, Copy)] pub(crate) struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 078ef057cf..173ee66aa8 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -213,6 +213,9 @@ pub enum SyntaxKind { /// `extern` #[token("extern")] ExternKw, + /// `unsafe` + #[token("unsafe")] + UnsafeKw, // Expressions. These are non-leaf nodes. /// `x + 1` @@ -330,9 +333,6 @@ pub enum SyntaxKind { /// `` GenericParamList, - /// Modules inside a file. - Module, - /// Root node of the input source. Root, From b6005dc5e8096a99ad8a7a3541b4f3408887a6bc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 15 Jan 2023 00:31:30 +0100 Subject: [PATCH 011/678] Implement parser for `Attribute` --- crates/parser2/src/parser/attr.rs | 107 +++++++++++++++++++++++++++++- crates/parser2/src/parser/mod.rs | 59 +++++++++++++++- crates/parser2/src/syntax_kind.rs | 14 ++-- 3 files changed, 172 insertions(+), 8 deletions(-) diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index f6bed7300d..5c4f766184 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -1,5 +1,7 @@ use super::{define_scope, token_stream::TokenStream, Parser}; +use crate::SyntaxKind; + define_scope! { AttrListScope, AttrList, @@ -10,6 +12,109 @@ define_scope! { impl super::Parse for AttrListScope { fn parse(&mut self, parser: &mut Parser) { - todo!() + use SyntaxKind::*; + + loop { + parser.bump_trivias(true); + match parser.current_kind() { + Some(Pound) => parser.parse(AttrScope::default(), None), + Some(DocComment) => parser.parse(DocCommentAttrScope::default(), None), + _ => break, + } + } + } +} + +define_scope! { + AttrScope, + Attr, + Inheritance +} +impl super::Parse for AttrScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Pound); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected attribute name", None); + return; + } + + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(AttrParamListScope::default(), None); + } + parser.bump_if(SyntaxKind::Newline); + } +} + +define_scope! { + AttrParamListScope, + AttrParamList, + RecoverySet( + RParen + ) +} + +impl super::Parse for AttrParamListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + parser.bump_trivias(true); + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parser.parse(AttrParam::default(), None); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(AttrParam::default(), None); + parser.bump_trivias(true); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_recover("expected `)`", None); + } + } +} + +define_scope! { + AttrParam, + AttrParam, + RecoverySet( + Comma, + RParen + ) +} +impl super::Parse for AttrParam { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected `key: value`", None); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected `key: value`", None); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected `ident`", None) + } + + match parser.peek_non_trivia(true) { + Some(SyntaxKind::Comma) | Some(SyntaxKind::RParen) | None => {} + + _ => parser.error_and_recover("unexpected token", None), + } + } +} + +define_scope! { + DocCommentAttrScope, + DocCommentAttr, + Inheritance +} +impl super::Parse for DocCommentAttrScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::DocComment); + parser.bump_if(SyntaxKind::Newline); } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 5f979487cd..f581b1093f 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -33,6 +33,13 @@ impl Parser { self.current_token().map(|token| token.syntax_kind()) } + /// Invoke the scope to parse. The scope is wrapped up by the node specified + /// by the scope. + /// + /// * If the checkpoint is `Some`, the marked branch is wrapped up by the + /// node. + /// * If the checkpoint is `None`, the current branch is wrapped up by the + /// node. pub fn parse(&mut self, mut scope: T, checkpoint: Option) where T: Parse + 'static, @@ -42,13 +49,20 @@ impl Parser { self.leave(checkpoint); } - /// Mark the current branch as a checkpoint. + /// Marks the current branch as a checkpoint. /// The checked branch is wrapped up later when [`parse]` is /// called with the `checkpoint`. pub fn checkpoint(&mut self) -> rowan::Checkpoint { self.builder.checkpoint() } + /// Add `msg` as an error to the error list, then bumps consecutive tokens + /// until a token in the recovery set is found. + /// + /// * If checkpoint is `Some`, the marked branch is wrapped up by an error + /// node. + /// * If checkpoint is `None`, the current branch is wrapped up by an error + /// node. pub fn error_and_recover(&mut self, msg: &str, checkpoint: Option) { let err_scope = self.error(msg); let checkpoint = self.enter(err_scope, checkpoint); @@ -63,6 +77,45 @@ impl Parser { self.bump_trivias(false); } + /// Peek the next non-trivia token. + /// If `skip_newlines` is `true`, newlines are also treated as trivia. + pub fn peek_non_trivia(&mut self, skip_newlines: bool) -> Option { + self.stream.set_bt_point(); + + while let Some(next) = self.stream.next() { + let kind = next.syntax_kind(); + if kind.is_trivia() || (skip_newlines && kind == SyntaxKind::Newline) { + continue; + } else { + self.stream.backtrack(); + return Some(kind); + } + } + + self.stream.backtrack(); + None + } + + /// Bumps the current token if the current token is the `expected` kind. + /// + /// # Panics + /// Panics If the current token is not the `expected` kind. + pub fn bump_expected(&mut self, expected: SyntaxKind) { + assert_eq!(self.current_kind(), Some(expected)); + self.bump(); + } + + /// Bumps the current token if the current token is the `expected` kind. + /// Return `true` if the current token is the `expected` kind. + pub fn bump_if(&mut self, expected: SyntaxKind) -> bool { + if self.current_kind() == Some(expected) { + self.bump(); + true + } else { + false + } + } + /// Bumps the current token adds it to the current branch. pub fn bump_raw(&mut self) { let tok = self.stream.next().unwrap(); @@ -72,10 +125,10 @@ impl Parser { /// Bumps consecutive trivia tokens. /// If `bump_newlines` is true, newlines are also bumped. - pub fn bump_trivias(&mut self, skip_newlines: bool) { + pub fn bump_trivias(&mut self, bump_newlines: bool) { while let Some(tok) = self.current_token() { let kind = tok.syntax_kind(); - if kind.is_trivia() || (skip_newlines && kind == SyntaxKind::Newline) { + if kind.is_trivia() || (bump_newlines && kind == SyntaxKind::Newline) { self.bump(); } else { break; diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 173ee66aa8..ff54a28a09 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -9,9 +9,9 @@ pub enum SyntaxKind { // Atom kinds. These are leaf nodes. #[error] InvalidToken = 0, - #[regex(r"\n[ \t]*")] + #[regex(r"\n|\r\n|\r")] Newline, - #[regex(r"[ \s\t]")] + #[regex(r"\s")] WhiteSpace, /// `foo` #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] @@ -74,10 +74,10 @@ pub enum SyntaxKind { #[token("#")] Pound, /// `// Comment` - #[regex(r"//[^\n]*")] + #[regex(r"//[^\n\r]*")] Comment, /// `/// DocComment` - #[regex(r"///[^\n]*")] + #[regex(r"///[^\n\r]*")] DocComment, /// `+` @@ -312,6 +312,12 @@ pub enum SyntaxKind { /// `#attr` Attr, + /// `(key1: value1, key2: value2)` + AttrParamList, + /// `key: value` + AttrParam, + /// `/// Comment` + DocCommentAttr, AttrList, /// `pub` From e3fb97565f55344497a2e0b848482dca85680c52 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 15 Jan 2023 16:13:30 +0100 Subject: [PATCH 012/678] Implement parser for `Path` --- crates/parser2/src/parser/func.rs | 2 +- crates/parser2/src/parser/item.rs | 16 ++++++++-------- crates/parser2/src/parser/mod.rs | 25 ++++++++++++++---------- crates/parser2/src/parser/path.rs | 32 +++++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 19 deletions(-) create mode 100644 crates/parser2/src/parser/path.rs diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 24c7433722..deb926e81e 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -7,7 +7,7 @@ define_scope! { } impl super::Parse for FnScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index c45d84be31..ca6acb0a72 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -110,7 +110,7 @@ define_scope! { Inheritance } impl super::Parse for StructScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -121,7 +121,7 @@ define_scope! { Inheritance } impl super::Parse for EnumScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -132,7 +132,7 @@ define_scope! { Inheritance } impl super::Parse for TraitScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -160,7 +160,7 @@ impl super::ParsingScope for ImplScope { } } impl super::Parse for ImplScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -171,7 +171,7 @@ define_scope! { Inheritance } impl super::Parse for UseScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -182,7 +182,7 @@ define_scope! { Inheritance } impl super::Parse for ConstScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -193,7 +193,7 @@ define_scope! { Inheritance } impl super::Parse for ExternScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } @@ -204,7 +204,7 @@ define_scope! { Inheritance } impl super::Parse for TypeAliasScope { - fn parse(&mut self, parser: &mut Parser) { + fn parse(&mut self, _parser: &mut Parser) { todo!() } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index f581b1093f..a70687b95c 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -9,6 +9,7 @@ pub mod token_stream; mod attr; mod func; mod item; +mod path; /// Parser to build a rowan syntax tree. pub struct Parser { @@ -70,11 +71,22 @@ impl Parser { self.leave(checkpoint); } - /// Bumps the current token and trailing trivias and adds them to the + /// Add the `msg` to the error list and bumps n token in the error branch. + pub fn error_and_bump(&mut self, msg: &str, bump_n: usize) { + let error = self.error(msg); + let checkpoint = self.enter(error, None); + for _ in 0..bump_n { + self.bump(); + } + self.leave(checkpoint); + } + + /// Bumps the current token and /// current branch. pub fn bump(&mut self) { - self.bump_raw(); - self.bump_trivias(false); + let tok = self.stream.next().unwrap(); + self.current_pos += rowan::TextSize::of(tok.text()); + self.builder.token(tok.syntax_kind().into(), tok.text()); } /// Peek the next non-trivia token. @@ -116,13 +128,6 @@ impl Parser { } } - /// Bumps the current token adds it to the current branch. - pub fn bump_raw(&mut self) { - let tok = self.stream.next().unwrap(); - self.current_pos += rowan::TextSize::of(tok.text()); - self.builder.token(tok.syntax_kind().into(), tok.text()); - } - /// Bumps consecutive trivia tokens. /// If `bump_newlines` is true, newlines are also bumped. pub fn bump_trivias(&mut self, bump_newlines: bool) { diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs new file mode 100644 index 0000000000..e0b036fc27 --- /dev/null +++ b/crates/parser2/src/parser/path.rs @@ -0,0 +1,32 @@ +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + PathScope, + Path, + Inheritance +} +impl super::Parse for PathScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(PathSegmentScope::default(), None); + while parser.peek_non_trivia(false) == Some(SyntaxKind::Colon2) { + parser.bump_trivias(false); + parser.bump_expected(SyntaxKind::Colon2); + parser.parse(PathSegmentScope::default(), None); + } + } +} + +define_scope! { + PathSegmentScope, + Path, + Inheritance +} +impl super::Parse for PathSegmentScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_bump("expected path segment", 1); + } + } +} From e6957cf58938ba93e3bab8fc5c1dba9ac0b42ffb Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 15 Jan 2023 22:46:14 +0100 Subject: [PATCH 013/678] Implement parser for `struct` --- crates/parser2/src/parser/attr.rs | 2 - crates/parser2/src/parser/item.rs | 13 +--- crates/parser2/src/parser/mod.rs | 1 + crates/parser2/src/parser/struct_.rs | 88 ++++++++++++++++++++++++++++ crates/parser2/src/syntax_kind.rs | 4 +- 5 files changed, 92 insertions(+), 16 deletions(-) create mode 100644 crates/parser2/src/parser/struct_.rs diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index 5c4f766184..fc90f67c55 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -9,7 +9,6 @@ define_scope! { Newline ) } - impl super::Parse for AttrListScope { fn parse(&mut self, parser: &mut Parser) { use SyntaxKind::*; @@ -52,7 +51,6 @@ define_scope! { RParen ) } - impl super::Parse for AttrParamListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index ca6acb0a72..ac5d0a047b 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -73,7 +73,7 @@ impl super::Parse for ItemListScope { match parser.current_kind() { Some(FnKw) => parser.parse(super::func::FnScope::default(), checkpoint), - Some(StructKw) => parser.parse(StructScope::default(), checkpoint), + Some(StructKw) => parser.parse(super::struct_::StructScope::default(), checkpoint), Some(EnumKw) => parser.parse(EnumScope::default(), checkpoint), Some(TraitKw) => parser.parse(TraitScope::default(), checkpoint), Some(ImplKw) => parser.parse(ImplScope::default(), checkpoint), @@ -104,17 +104,6 @@ impl Modifier { } } -define_scope! { - StructScope, - Struct, - Inheritance -} -impl super::Parse for StructScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() - } -} - define_scope! { EnumScope, Enum, diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index a70687b95c..0e79e046cc 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -10,6 +10,7 @@ mod attr; mod func; mod item; mod path; +mod struct_; /// Parser to build a rowan syntax tree. pub struct Parser { diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs new file mode 100644 index 0000000000..3f816695c9 --- /dev/null +++ b/crates/parser2/src/parser/struct_.rs @@ -0,0 +1,88 @@ +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + StructScope, + Struct, + Inheritance +} +impl super::Parse for StructScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Struct); + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the struct name", None) + } + + parser.bump_trivias(true); + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(StructFieldDefListScope::default(), None); + } else { + parser.error_and_recover("expected the struct field definition", None); + } + } +} + +define_scope! { + StructFieldDefListScope, + StructFieldDefList, + RecoverySet( + RBrace + ) +} +impl super::Parse for StructFieldDefListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + parser.bump_trivias(true); + while !matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + parser.parse(StructFieldDefScope::default(), None); + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover( + "expected the closing brace of the struct field definition", + None, + ); + } + } +} + +define_scope! { + StructFieldDefScope, + StructFieldDef, + RecoverySet( + Newline + ) +} +impl super::Parse for StructFieldDefScope { + fn parse(&mut self, parser: &mut Parser) { + if matches!( + parser.current_kind(), + Some(SyntaxKind::Pound | SyntaxKind::DocComment) + ) { + parser.parse(super::attr::AttrListScope::default(), None); + } + parser.bump_trivias(true); + + parser.bump_if(SyntaxKind::PubKw); + parser.bump_trivias(false); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the field name", None); + } + parser.bump_trivias(false); + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected `name: type` for the field definition", None); + } + parser.bump_trivias(false); + parser.parse(super::path::PathScope::default(), None); + if !matches!( + parser.peek_non_trivia(false), + Some(SyntaxKind::Newline) | Some(SyntaxKind::RBrace) + ) { + parser.error_and_recover("expected newline after the field definition", None); + } + } +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ff54a28a09..ef47c6fdca 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -324,8 +324,8 @@ pub enum SyntaxKind { Visibility, /// `x: i32` - FieldDef, - FieldDefList, + StructFieldDef, + StructFieldDefList, /// `(i32, u32)` Tuple, From e54f5d88a0dfc3ce52ada77d1d386638cf4e4039 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 17 Jan 2023 00:59:21 +0100 Subject: [PATCH 014/678] Add proc-macro to generate insta tests from a fixture directory --- crates/test-utils/Cargo.toml | 2 + crates/test-utils/macros/Cargo.toml | 12 ++ crates/test-utils/macros/src/lib.rs | 176 ++++++++++++++++++++++++++++ crates/test-utils/src/lib.rs | 20 ++++ 4 files changed, 210 insertions(+) create mode 100644 crates/test-utils/macros/Cargo.toml create mode 100644 crates/test-utils/macros/src/lib.rs diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index beceb82903..1dc56a0a0a 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -14,6 +14,7 @@ fe-common = {path = "../common", version = "^0.20.0-alpha"} fe-driver = {path = "../driver", version = "^0.20.0-alpha"} fe-yulc = {path = "../yulc", version = "^0.20.0-alpha", optional = true, features = ["solc-backend"]} fe-analyzer = {path = "../analyzer", version = "^0.20.0-alpha"} +fe-test-utils-macros = { path = "macros", version = "0.20.0-alpha" } test-files = {path = "../test-files", package = "fe-test-files" } hex = "0.4" primitive-types = {version = "0.12", default-features = false, features = ["rlp"]} @@ -26,5 +27,6 @@ insta = { default-features = false, version = "1.26" } # used by ethabi, we need to force the js feature for wasm support getrandom = { version = "0.2.3", features = ["js"] } + [features] solc-backend = ["fe-yulc", "solc", "fe-driver/solc-backend"] diff --git a/crates/test-utils/macros/Cargo.toml b/crates/test-utils/macros/Cargo.toml new file mode 100644 index 0000000000..691c864b6b --- /dev/null +++ b/crates/test-utils/macros/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "fe-test-utils-macros" +version = "0.20.0-alpha" +edition = "2021" + +[lib] +proc_macro = true + +[dependencies] +syn = { version = "1.0", features = ["full"] } +proc-macro2 = "1.0" +quote = "1.0" \ No newline at end of file diff --git a/crates/test-utils/macros/src/lib.rs b/crates/test-utils/macros/src/lib.rs new file mode 100644 index 0000000000..bb5894725a --- /dev/null +++ b/crates/test-utils/macros/src/lib.rs @@ -0,0 +1,176 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use quote::quote; + +type Error = syn::Error; +type Result = syn::Result; + +#[proc_macro] +pub fn build_snap_tests(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + match expand(input) { + Ok(ts) => ts, + Err(err) => err.to_compile_error().into(), + } +} + +fn expand(input: proc_macro::TokenStream) -> Result { + let args: Args = syn::parse(input)?; + + let builder = SnapTestBuilder::from_args(args)?; + builder.build().map(|ts| ts.into()) +} + +struct SnapTestBuilder { + fixture_dir: PathBuf, + snapshot_dir: PathBuf, + target_fn: syn::Path, + insta_assert_macro: syn::Path, +} + +impl SnapTestBuilder { + fn from_args(args: Args) -> Result { + let workspace_root = cargo_workspace_dir(); + + let fixture_dir: PathBuf = workspace_root.join(args.fixture_dir.value()); + let snapshot_dir: PathBuf = workspace_root.join(args.snapshot_dir.value()); + + if !fixture_dir.is_dir() | !fixture_dir.exists() { + return Err(Error::new_spanned( + args.fixture_dir, + format! {"invalid path for `fixture_dir`: `{}` is invalid path", + fixture_dir.display()}, + )); + } else if !snapshot_dir.is_dir() { + return Err(Error::new_spanned( + args.snapshot_dir, + format! {"invalid path for `snapshot_dir`: `{}` is invalid path", + snapshot_dir.display()}, + )); + } + + Ok(Self { + fixture_dir, + snapshot_dir, + target_fn: args.target_fn, + insta_assert_macro: args.insta_assert_macro, + }) + } + + fn build(&self) -> Result { + let mut tests = Vec::new(); + + let dir = fs::read_dir(&self.fixture_dir).unwrap(); + for fixture in dir.flatten() { + let fixture_path = fixture.path(); + if fixture_path.is_file() + && fixture_path.extension().and_then(|ext| ext.to_str()) == Some("fe") + { + tests.push(self.build_test(&fixture_path)); + } + } + + Ok(quote! { + #(#tests)* + }) + } + + fn build_test(&self, fixture_file: &Path) -> proc_macro2::TokenStream { + let file_name = fixture_file.file_name().unwrap().to_str().unwrap(); + let file_stem_name = fixture_file.file_stem().unwrap().to_str().unwrap(); + let test_fn_ident = syn::Ident::new(file_stem_name, proc_macro2::Span::call_site()); + let fixture_file = fixture_file.to_str().unwrap(); + + let snapshot_dir = self.snapshot_dir.to_str().unwrap(); + let target_fn = &self.target_fn; + let insta_assert_macro = &self.insta_assert_macro; + + quote! { + #[test] + fn #test_fn_ident() { + let input = std::fs::read_to_string(#fixture_file).unwrap(); + let snapshot = #target_fn(&input); + fe_compiler_test_utils::_insta::with_settings! ( + { + snapshot_path => #snapshot_dir, + input_file => Some(#file_name.into()), + prepend_module_to_snapshot => false, + }, + { + #insta_assert_macro!(snapshot); + }) + } + + } + } +} + +// FIXME: This is quite hacky and should be removed when `span::source_file` is +// stabilized. +// See [`Tracking issue for proc_macro::Span inspection APIs #54725`](https://github.com/rust-lang/rust/issues/54725) for more information. +fn cargo_workspace_dir() -> PathBuf { + let mut cargo_workspace_dir: PathBuf = env!["CARGO_MANIFEST_DIR"].into(); + + for _ in 0..2 { + cargo_workspace_dir.pop(); + } + + cargo_workspace_dir +} +struct Args { + fixture_dir: syn::LitStr, + snapshot_dir: syn::LitStr, + target_fn: syn::Path, + insta_assert_macro: syn::Path, +} + +impl syn::parse::Parse for Args { + fn parse(input: syn::parse::ParseStream) -> Result { + let error_msg = "expected `build_snap_tests! { + fixture_dir: .., + snapshot_dir: .., + target_fn: .., + insta_assert_macro: .., + }`"; + + let ident = input.parse::()?; + if ident != "fixture_dir" { + return Err(Error::new_spanned(ident, error_msg)); + } + input.parse::()?; + let fixture_dir = input.parse::()?; + input.parse::()?; + + let ident = input.parse::()?; + if ident != "snapshot_dir" { + return Err(Error::new_spanned(ident, error_msg)); + } + input.parse::()?; + let snapshot_dir = input.parse::()?; + input.parse::()?; + + let ident = input.parse::()?; + if ident != "target_fn" { + return Err(Error::new_spanned(ident, error_msg)); + } + input.parse::()?; + let target_fn = input.parse::()?; + input.parse::()?; + + let ident = input.parse::()?; + if ident != "insta_assert_macro" { + return Err(Error::new_spanned(ident, error_msg)); + } + input.parse::()?; + let insta_assert_macro = input.parse::()?; + + Ok(Self { + fixture_dir, + snapshot_dir, + target_fn, + insta_assert_macro, + }) + } +} diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index df4112c5cb..9aabaa975a 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -1,7 +1,13 @@ +#[doc(hidden)] +pub use fe_test_utils_macros::build_snap_tests as _build_snap_tests; +#[doc(hidden)] +pub use insta as _insta; + use evm_runtime::{ExitReason, Handler}; use fe_common::diagnostics::print_diagnostics; use fe_common::utils::keccak; use fe_driver as driver; + use primitive_types::{H160, U256}; use std::cell::RefCell; use std::collections::BTreeMap; @@ -23,6 +29,20 @@ macro_rules! assert_harness_gas_report { } } +/// Build a set of snapshot tests from a directory of fixtures. +/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. +#[macro_export] +macro_rules! build_snap_tests { + ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path, $insta_assert_macro: path) => { + fe_compiler_test_utils::_build_snap_tests! { + fixture_dir: $fixture_dir, + snapshot_dir: $snapshot_dir, + target_fn: $target_fn, + insta_assert_macro: $insta_assert_macro + } + }; +} + #[derive(Default, Debug)] pub struct GasReporter { records: RefCell>, From e676cb44b2db7ea8de02ccd14760d46329101a00 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 17 Jan 2023 13:23:00 +0100 Subject: [PATCH 015/678] Make dependencies to be aggregated into --- Cargo.lock | 10 ++++ crates/test-utils/macros/src/lib.rs | 49 ++++++++++------- crates/test-utils/src/_macro_support.rs | 71 +++++++++++++++++++++++++ crates/test-utils/src/lib.rs | 19 ++----- 4 files changed, 113 insertions(+), 36 deletions(-) create mode 100644 crates/test-utils/src/_macro_support.rs diff --git a/Cargo.lock b/Cargo.lock index fd847604e9..28f92e70c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -638,6 +638,7 @@ dependencies = [ "fe-common", "fe-driver", "fe-test-files", + "fe-test-utils-macros", "fe-yulc", "getrandom 0.2.8", "hex", @@ -758,6 +759,15 @@ dependencies = [ "include_dir", ] +[[package]] +name = "fe-test-utils-macros" +version = "0.20.0-alpha" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "fe-yulc" version = "0.20.0-alpha" diff --git a/crates/test-utils/macros/src/lib.rs b/crates/test-utils/macros/src/lib.rs index bb5894725a..f67a33aadd 100644 --- a/crates/test-utils/macros/src/lib.rs +++ b/crates/test-utils/macros/src/lib.rs @@ -27,7 +27,7 @@ struct SnapTestBuilder { fixture_dir: PathBuf, snapshot_dir: PathBuf, target_fn: syn::Path, - insta_assert_macro: syn::Path, + debug_snap: bool, } impl SnapTestBuilder { @@ -55,7 +55,7 @@ impl SnapTestBuilder { fixture_dir, snapshot_dir, target_fn: args.target_fn, - insta_assert_macro: args.insta_assert_macro, + debug_snap: args.debug_snap.value(), }) } @@ -85,24 +85,33 @@ impl SnapTestBuilder { let snapshot_dir = self.snapshot_dir.to_str().unwrap(); let target_fn = &self.target_fn; - let insta_assert_macro = &self.insta_assert_macro; + let snapshot = syn::Ident::new("snapshot", proc_macro2::Span::call_site()); + let get_snapshot = if self.debug_snap { + quote! { + let #snapshot = format!("{:#?}", #target_fn(&input)); + } + } else { + quote! { + let #snapshot = #target_fn(&input); + } + }; quote! { #[test] fn #test_fn_ident() { - let input = std::fs::read_to_string(#fixture_file).unwrap(); - let snapshot = #target_fn(&input); - fe_compiler_test_utils::_insta::with_settings! ( - { - snapshot_path => #snapshot_dir, - input_file => Some(#file_name.into()), - prepend_module_to_snapshot => false, - }, - { - #insta_assert_macro!(snapshot); - }) + let input = ::std::fs::read_to_string(#fixture_file).unwrap(); + #get_snapshot + let mut settings = ::fe_compiler_test_utils::_macro_support::_insta::Settings::new(); + settings.set_snapshot_path(#snapshot_dir); + settings.set_input_file(#file_name); + settings.set_prepend_module_to_snapshot(false); + + + ::fe_compiler_test_utils::_insta_assert_snapshot!{ + #snapshot, + settings + } } - } } } @@ -123,7 +132,7 @@ struct Args { fixture_dir: syn::LitStr, snapshot_dir: syn::LitStr, target_fn: syn::Path, - insta_assert_macro: syn::Path, + debug_snap: syn::LitBool, } impl syn::parse::Parse for Args { @@ -132,7 +141,7 @@ impl syn::parse::Parse for Args { fixture_dir: .., snapshot_dir: .., target_fn: .., - insta_assert_macro: .., + debug_snap: .. }`"; let ident = input.parse::()?; @@ -160,17 +169,17 @@ impl syn::parse::Parse for Args { input.parse::()?; let ident = input.parse::()?; - if ident != "insta_assert_macro" { + if ident != "debug_snap" { return Err(Error::new_spanned(ident, error_msg)); } input.parse::()?; - let insta_assert_macro = input.parse::()?; + let debug_snap = input.parse::()?; Ok(Self { fixture_dir, snapshot_dir, target_fn, - insta_assert_macro, + debug_snap, }) } } diff --git a/crates/test-utils/src/_macro_support.rs b/crates/test-utils/src/_macro_support.rs new file mode 100644 index 0000000000..b46e00043f --- /dev/null +++ b/crates/test-utils/src/_macro_support.rs @@ -0,0 +1,71 @@ +#[doc(hidden)] +pub use insta as _insta; + +// NOTE: Borrowed from `insta` implementation from +// [here](https://docs.rs/insta/1.26/src/insta/macros.rs.html#2-16) +/// Utility macro to return the name of the current function. +#[doc(hidden)] +#[macro_export] +macro_rules! _function_name { + () => {{ + fn f() {} + fn type_name_of_val(_: T) -> &'static str { + std::any::type_name::() + } + let mut name = type_name_of_val(f).strip_suffix("::f").unwrap_or(""); + while let Some(rest) = name.strip_suffix("::{{closure}}") { + name = rest; + } + name + }}; +} + +#[doc(hidden)] +#[macro_export] +macro_rules! _insta_assert_snapshot { + ($value: expr, $setting: expr) => { + $setting.bind(|| { + $crate::_macro_support::_insta::_macro_support::assert_snapshot( + $crate::_macro_support::_insta::_macro_support::AutoName.into(), + &$value, + env!("CARGO_MANIFEST_DIR"), + $crate::_function_name!(), + module_path!(), + file!(), + line!(), + stringify!($value), + ) + .unwrap() + }) + }; +} + +/// Build a set of snapshot tests from a directory of fixtures. +/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. +/// `target_fn` should take `&str` and return ``. +#[macro_export] +macro_rules! build_snap_tests { + ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path) => { + fe_compiler_test_utils::_build_snap_tests! { + fixture_dir: $fixture_dir, + snapshot_dir: $snapshot_dir, + target_fn: $target_fn, + debug_snap: false + } + }; +} + +/// Build a set of snapshot tests from a directory of fixtures. +/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. +/// `target_fn` should take `&str` and return ``. +#[macro_export] +macro_rules! build_debug_snap_tests { + ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path) => { + fe_compiler_test_utils::_build_snap_tests! { + fixture_dir: $fixture_dir, + snapshot_dir: $snapshot_dir, + target_fn: $target_fn, + debug_snap: true + } + }; +} diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 9aabaa975a..b8f385f655 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -1,7 +1,8 @@ #[doc(hidden)] -pub use fe_test_utils_macros::build_snap_tests as _build_snap_tests; +pub mod _macro_support; + #[doc(hidden)] -pub use insta as _insta; +pub use fe_test_utils_macros::build_snap_tests as _build_snap_tests; use evm_runtime::{ExitReason, Handler}; use fe_common::diagnostics::print_diagnostics; @@ -29,20 +30,6 @@ macro_rules! assert_harness_gas_report { } } -/// Build a set of snapshot tests from a directory of fixtures. -/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. -#[macro_export] -macro_rules! build_snap_tests { - ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path, $insta_assert_macro: path) => { - fe_compiler_test_utils::_build_snap_tests! { - fixture_dir: $fixture_dir, - snapshot_dir: $snapshot_dir, - target_fn: $target_fn, - insta_assert_macro: $insta_assert_macro - } - }; -} - #[derive(Default, Debug)] pub struct GasReporter { records: RefCell>, From c9b49ec4d493c6b3ec3a05e186f20642e3a5ae07 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 17 Jan 2023 13:35:57 +0100 Subject: [PATCH 016/678] Bump up `insta` to `1.26.0` --- crates/analyzer/Cargo.toml | 2 +- crates/parser/Cargo.toml | 2 +- crates/parser2/Cargo.toml | 5 ++++- .../fe_compiler_tests__features__aug_assign-10.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-11.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-12.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-13.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-2.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-3.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-4.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-5.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-6.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-7.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-8.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign-9.snap | 6 ++++++ .../fe_compiler_tests__features__aug_assign.snap | 6 ++++++ .../fe_compiler_tests__features__execution_tests-2.snap | 6 ++++++ .../fe_compiler_tests__features__execution_tests-3.snap | 6 ++++++ .../fe_compiler_tests__features__execution_tests-4.snap | 6 ++++++ .../fe_compiler_tests__features__execution_tests-5.snap | 6 ++++++ .../fe_compiler_tests__features__execution_tests.snap | 6 ++++++ .../snapshots/fe_compiler_tests__features__map-2.snap | 9 +++++++++ .../snapshots/fe_compiler_tests__features__map-3.snap | 9 +++++++++ .../snapshots/fe_compiler_tests__features__map-4.snap | 9 +++++++++ .../snapshots/fe_compiler_tests__features__map-5.snap | 9 +++++++++ .../snapshots/fe_compiler_tests__features__map-6.snap | 9 +++++++++ .../src/snapshots/fe_compiler_tests__features__map.snap | 9 +++++++++ ...e_compiler_tests__features__signext_int_array1-2.snap | 6 ++++++ .../fe_compiler_tests__features__signext_int_array1.snap | 6 ++++++ ...e_compiler_tests__features__signext_int_array2-2.snap | 6 ++++++ .../fe_compiler_tests__features__signext_int_array2.snap | 6 ++++++ 31 files changed, 192 insertions(+), 3 deletions(-) create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap create mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap diff --git a/crates/analyzer/Cargo.toml b/crates/analyzer/Cargo.toml index a2c50aac1c..fb33129caf 100644 --- a/crates/analyzer/Cargo.toml +++ b/crates/analyzer/Cargo.toml @@ -25,7 +25,7 @@ petgraph = "0.6.0" smol_str = "0.1.21" [dev-dependencies] -insta = { default-features = false, version = "1.7.1" } +insta = { default-features = false, version = "1.26.0" } rstest = "0.6.4" test-files = {path = "../test-files", package = "fe-test-files" } fe-library = {path = "../library"} diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index 520e0f7b24..9e020cadb9 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -26,7 +26,7 @@ wasm-bindgen = "0.2" [dev-dependencies] fe-test-files = {path = "../test-files", version = "^0.20.0-alpha"} -insta = { default-features = false, version = "1.7.1" } +insta = { default-features = false, version = "1.26.0" } wasm-bindgen-test = "0.3" pretty_assertions = "1.0.0" criterion = "0.3.5" diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 25613ea814..42c872ba2d 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -13,4 +13,7 @@ description = "Parser lib for Fe." rowan = "0.15.10" logos = "0.12.1" fxhash = "0.2.1" -lazy_static = "1.4.0" \ No newline at end of file +lazy_static = "1.4.0" + +[dev-dependencies] +fe-compiler-test-utils = { path = "../test-utils" } \ No newline at end of file diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap new file mode 100644 index 0000000000..d4a3f5deae --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bit_xor([Uint(26), Uint(42)]) used 496 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap new file mode 100644 index 0000000000..1de8ef91c5 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bit_and([Uint(26), Uint(42)]) used 512 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap new file mode 100644 index 0000000000..8951446b54 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +add_from_sto([Uint(2), Uint(5)]) used 22618 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap new file mode 100644 index 0000000000..2995cf5704 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +add_from_mem([Uint(2), Uint(5)]) used 807 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap new file mode 100644 index 0000000000..6f7c291973 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +sub([Uint(42), Uint(26)]) used 288 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap new file mode 100644 index 0000000000..03cc0eab67 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +mul([Uint(10), Uint(42)]) used 335 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap new file mode 100644 index 0000000000..909c2eaced --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +div([Uint(43), Uint(5)]) used 331 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap new file mode 100644 index 0000000000..2a113b358b --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +mod([Uint(43), Uint(5)]) used 353 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap new file mode 100644 index 0000000000..077c7542cd --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +pow([Uint(3), Uint(5)]) used 618 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap new file mode 100644 index 0000000000..75e0b47ec1 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +lshift([Uint(1), Uint(7)]) used 433 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap new file mode 100644 index 0000000000..9299651af8 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +rshift([Uint(128), Uint(7)]) used 455 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap new file mode 100644 index 0000000000..32352fa398 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bit_or([Uint(26), Uint(42)]) used 477 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap new file mode 100644 index 0000000000..b29f4fc6b9 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +add([Uint(2), Uint(5)]) used 269 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap new file mode 100644 index 0000000000..2ae9708e79 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +run_test([]) used 32 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap new file mode 100644 index 0000000000..2ae9708e79 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +run_test([]) used 32 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap new file mode 100644 index 0000000000..2ae9708e79 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +run_test([]) used 32 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap new file mode 100644 index 0000000000..47753d7dd7 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +run_test([]) used 35 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap new file mode 100644 index 0000000000..7bee56fc1a --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +run_test([]) used 1361 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap new file mode 100644 index 0000000000..cd73de23d2 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22494 gas +write_bar([Uint(26), Uint(12)]) used 22494 gas +read_bar([Uint(4)]) used 468 gas +read_bar([Uint(26)]) used 468 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap new file mode 100644 index 0000000000..cd73de23d2 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22494 gas +write_bar([Uint(26), Uint(12)]) used 22494 gas +read_bar([Uint(4)]) used 468 gas +read_bar([Uint(26)]) used 468 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap new file mode 100644 index 0000000000..845b4aff1c --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22482 gas +write_bar([Uint(26), Uint(12)]) used 22482 gas +read_bar([Uint(4)]) used 438 gas +read_bar([Uint(26)]) used 438 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap new file mode 100644 index 0000000000..845b4aff1c --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22482 gas +write_bar([Uint(26), Uint(12)]) used 22482 gas +read_bar([Uint(4)]) used 438 gas +read_bar([Uint(26)]) used 438 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap new file mode 100644 index 0000000000..845b4aff1c --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22482 gas +write_bar([Uint(26), Uint(12)]) used 22482 gas +read_bar([Uint(4)]) used 438 gas +read_bar([Uint(26)]) used 438 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap new file mode 100644 index 0000000000..38a4c5cec1 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap @@ -0,0 +1,9 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +write_bar([Uint(4), Uint(42)]) used 22401 gas +write_bar([Uint(26), Uint(12)]) used 22401 gas +read_bar([Uint(4)]) used 383 gas +read_bar([Uint(26)]) used 383 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap new file mode 100644 index 0000000000..a5f9a97382 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([Int(100)]) used 22635 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap new file mode 100644 index 0000000000..5b6c0372db --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926)]) used 22635 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap new file mode 100644 index 0000000000..97511dc7aa --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +i32_array([FixedArray([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926), Int(100), Int(115792089237316195423570985008687907853269984665640564039457584007910982156288), Int(2147483647)])]) used 1380 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap new file mode 100644 index 0000000000..c48b5606a5 --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +i8_array([FixedArray([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926), Int(100), Int(115792089237316195423570985008687907853269984665640564039457584007913129639808), Int(127)])]) used 1312 gas + From 903868c5f56b051f65dda71cd89e01316045ca8a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 17 Jan 2023 13:36:06 +0100 Subject: [PATCH 017/678] Add tests for struct definition parsing --- crates/parser2/build.rs | 4 + crates/parser2/src/lexer.rs | 9 ++ crates/parser2/src/lib.rs | 9 ++ crates/parser2/src/parser/item.rs | 26 ++++- crates/parser2/src/parser/mod.rs | 28 ++++- crates/parser2/src/parser/path.rs | 2 +- crates/parser2/src/parser/struct_.rs | 2 +- crates/parser2/src/syntax_kind.rs | 4 +- .../test_files/syntax_node/struct_def.fe | 21 ++++ .../test_files/syntax_node/struct_def.snap | 103 ++++++++++++++++++ crates/parser2/tests/main.rs | 1 + crates/parser2/tests/syntax_node.rs | 17 +++ crates/test-utils/macros/src/lib.rs | 2 +- 13 files changed, 216 insertions(+), 12 deletions(-) create mode 100644 crates/parser2/build.rs create mode 100644 crates/parser2/test_files/syntax_node/struct_def.fe create mode 100644 crates/parser2/test_files/syntax_node/struct_def.snap create mode 100644 crates/parser2/tests/main.rs create mode 100644 crates/parser2/tests/syntax_node.rs diff --git a/crates/parser2/build.rs b/crates/parser2/build.rs new file mode 100644 index 0000000000..e041896d56 --- /dev/null +++ b/crates/parser2/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./std"); +} diff --git a/crates/parser2/src/lexer.rs b/crates/parser2/src/lexer.rs index 815303a448..8150d2843d 100644 --- a/crates/parser2/src/lexer.rs +++ b/crates/parser2/src/lexer.rs @@ -8,6 +8,15 @@ pub struct Lexer<'s> { inner: logos::Lexer<'s, SyntaxKind>, } +impl<'s> Lexer<'s> { + pub fn new(text: &'s str) -> Self { + Self { + peek: None, + inner: logos::Lexer::new(text), + } + } +} + impl<'s> TokenStream for Lexer<'s> { type Token = Token<'s>; diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 40108c6df3..d2d41970e8 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -4,9 +4,18 @@ pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; +use syntax_node::SyntaxNode; pub type TextRange = rowan::TextRange; +pub fn parse_source_file(text: &str) -> (SyntaxNode, Vec) { + let lexer = lexer::Lexer::new(text); + let mut parser = parser::Parser::new(lexer); + + parser.parse(parser::RootScope::default(), None); + parser.finish() +} + /// An parse error which is accumulated in the [`parser::Parser`] while parsing. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ParseError { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index ac5d0a047b..04d7bce46f 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -4,6 +4,17 @@ use crate::SyntaxKind; use super::{define_scope, token_stream::TokenStream, Parser}; +define_scope! { + RootScope, + Root, + RecoverySet() +} +impl super::Parse for RootScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(ItemListScope::default(), None); + } +} + define_scope! { ItemListScope, ItemList, @@ -28,9 +39,13 @@ impl super::Parse for ItemListScope { fn parse(&mut self, parser: &mut Parser) { use crate::SyntaxKind::*; - while parser.current_kind().is_some() { - let mut checkpoint = None; + loop { + parser.bump_trivias(true); + if parser.current_kind().is_none() { + break; + } + let mut checkpoint = None; parser.bump_trivias(true); if let Some(DocComment) | Some(Pound) = parser.current_kind() { checkpoint.get_or_insert_with(|| parser.checkpoint()); @@ -42,9 +57,10 @@ impl super::Parse for ItemListScope { Some(PubKw) => { checkpoint.get_or_insert_with(|| parser.checkpoint()); parser.bump(); + parser.bump_trivias(true); if parser.current_kind() == Some(UnsafeKw) { - parser.bump_trivias(true); + parser.bump(); Modifier::PubAndUnsafe } else { Modifier::Pub @@ -64,6 +80,7 @@ impl super::Parse for ItemListScope { continue; } }; + parser.bump_trivias(true); if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { parser.error("expected `fn` after `unsafe` keyword"); @@ -81,7 +98,8 @@ impl super::Parse for ItemListScope { Some(ConstKw) => parser.parse(ConstScope::default(), checkpoint), Some(ExternKw) => parser.parse(ExternScope::default(), checkpoint), Some(TypeKw) => parser.parse(TypeAliasScope::default(), checkpoint), - _ => parser.error_and_recover("expected item", checkpoint), + tok => parser + .error_and_recover(&format! {"expected item: but got {:?}", tok}, checkpoint), } } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 0e79e046cc..3c18c49255 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -1,6 +1,8 @@ +pub(crate) use item::RootScope; + use fxhash::FxHashSet; -use crate::{ParseError, SyntaxKind, TextRange}; +use crate::{syntax_node::SyntaxNode, ParseError, SyntaxKind, TextRange}; use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; @@ -25,6 +27,17 @@ pub struct Parser { } impl Parser { + /// Create a parser with the given token stream. + pub fn new(stream: S) -> Self { + Self { + stream: BackTrackableTokenStream::new(stream), + builder: rowan::GreenNodeBuilder::new(), + scopes: Vec::new(), + errors: Vec::new(), + current_pos: rowan::TextSize::from(0), + } + } + /// Returns the current token of the parser. pub fn current_token(&mut self) -> Option<&S::Token> { self.stream.peek() @@ -35,6 +48,13 @@ impl Parser { self.current_token().map(|token| token.syntax_kind()) } + /// Finish the parsing and return the syntax tree. + pub fn finish(self) -> (SyntaxNode, Vec) { + debug_assert!(self.scopes.is_empty()); + + (SyntaxNode::new_root(self.builder.finish()), self.errors) + } + /// Invoke the scope to parse. The scope is wrapped up by the node specified /// by the scope. /// @@ -209,6 +229,7 @@ impl Parser { let scope = self.scopes.pop().unwrap(); self.builder .start_node_at(checkpoint, scope.syntax_kind().into()); + self.builder.finish_node(); } } @@ -265,7 +286,7 @@ where macro_rules! define_scope { ($scope_name: ident, $kind: path ,Inheritance) => { - #[derive(Default,Debug, Clone, Copy)] + #[derive(Default, Debug, Clone, Copy)] pub(crate) struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { @@ -294,10 +315,11 @@ macro_rules! define_scope { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { lazy_static::lazy_static! { pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { + #[allow(unused)] use crate::SyntaxKind::*; let set: fxhash::FxHashSet = vec![ $($recoveries), * - ].into_iter().map(|kind| kind.into()).collect(); + ].into_iter().map(|kind: SyntaxKind| kind.into()).collect(); crate::parser::RecoveryMethod::RecoverySet(set) }; diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index e0b036fc27..30c88c8772 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -20,7 +20,7 @@ impl super::Parse for PathScope { define_scope! { PathSegmentScope, - Path, + PathSegment, Inheritance } impl super::Parse for PathSegmentScope { diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 3f816695c9..2419e33c05 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -9,7 +9,7 @@ define_scope! { } impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::Struct); + parser.bump_expected(SyntaxKind::StructKw); parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::Ident) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ef47c6fdca..edbf0b1962 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -9,9 +9,9 @@ pub enum SyntaxKind { // Atom kinds. These are leaf nodes. #[error] InvalidToken = 0, - #[regex(r"\n|\r\n|\r")] + #[regex(r"[\n|\r\n|\r]+")] Newline, - #[regex(r"\s")] + #[regex(r"[ ]+")] WhiteSpace, /// `foo` #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] diff --git a/crates/parser2/test_files/syntax_node/struct_def.fe b/crates/parser2/test_files/syntax_node/struct_def.fe new file mode 100644 index 0000000000..8b590b529c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/struct_def.fe @@ -0,0 +1,21 @@ +pub struct EmptyStruct { + +} + +/// DocComment1 +#attr +// normal comment +/// DocComment2 +pub struct StructAttr { + x: i32 + y: i32 +} + +#Event +pub struct StructFieldAttr { + /// `x` is a topic + #topic + x: i32 + + y: u32 +} diff --git a/crates/parser2/test_files/syntax_node/struct_def.snap b/crates/parser2/test_files/syntax_node/struct_def.snap new file mode 100644 index 0000000000..7b07437ef1 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/struct_def.snap @@ -0,0 +1,103 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..225 + ItemList@0..225 + Struct@0..27 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..22 "EmptyStruct" + WhiteSpace@22..23 " " + StructFieldDefList@23..27 + LBrace@23..24 "{" + Newline@24..26 "\n\n" + RBrace@26..27 "}" + Newline@27..29 "\n\n" + Struct@29..130 + AttrList@29..85 + DocCommentAttr@29..45 + DocComment@29..44 "/// DocComment1" + Newline@44..45 "\n" + Attr@45..51 + Pound@45..46 "#" + Ident@46..50 "attr" + Newline@50..51 "\n" + Comment@51..68 "// normal comment" + Newline@68..69 "\n" + DocCommentAttr@69..85 + DocComment@69..84 "/// DocComment2" + Newline@84..85 "\n" + PubKw@85..88 "pub" + WhiteSpace@88..89 " " + StructKw@89..95 "struct" + WhiteSpace@95..96 " " + Ident@96..106 "StructAttr" + WhiteSpace@106..107 " " + StructFieldDefList@107..130 + LBrace@107..108 "{" + Newline@108..109 "\n" + WhiteSpace@109..113 " " + StructFieldDef@113..119 + Ident@113..114 "x" + Colon@114..115 ":" + WhiteSpace@115..116 " " + Path@116..119 + PathSegment@116..119 + Ident@116..119 "i32" + WhiteSpace@119..123 " " + StructFieldDef@123..129 + Ident@123..124 "y" + Colon@124..125 ":" + WhiteSpace@125..126 " " + Path@126..129 + PathSegment@126..129 + Ident@126..129 "i32" + RBrace@129..130 "}" + Newline@130..132 "\n\n" + Struct@132..224 + AttrList@132..139 + Attr@132..139 + Pound@132..133 "#" + Ident@133..138 "Event" + Newline@138..139 "\n" + PubKw@139..142 "pub" + WhiteSpace@142..143 " " + StructKw@143..149 "struct" + WhiteSpace@149..150 " " + Ident@150..165 "StructFieldAttr" + WhiteSpace@165..166 " " + StructFieldDefList@166..224 + LBrace@166..167 "{" + Newline@167..168 "\n" + WhiteSpace@168..172 " " + StructFieldDef@172..213 + AttrList@172..207 + DocCommentAttr@172..192 + DocComment@172..191 "/// `x` is a topic " + Newline@191..192 "\n" + WhiteSpace@192..196 " " + Attr@196..203 + Pound@196..197 "#" + Ident@197..202 "topic" + Newline@202..203 "\n" + WhiteSpace@203..207 " " + Ident@207..208 "x" + Colon@208..209 ":" + WhiteSpace@209..210 " " + Path@210..213 + PathSegment@210..213 + Ident@210..213 "i32" + WhiteSpace@213..217 " " + StructFieldDef@217..223 + Ident@217..218 "y" + Colon@218..219 ":" + WhiteSpace@219..220 " " + Path@220..223 + PathSegment@220..223 + Ident@220..223 "u32" + RBrace@223..224 "}" + Newline@224..225 "\n" + diff --git a/crates/parser2/tests/main.rs b/crates/parser2/tests/main.rs new file mode 100644 index 0000000000..f893f25815 --- /dev/null +++ b/crates/parser2/tests/main.rs @@ -0,0 +1 @@ +mod syntax_node; diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs new file mode 100644 index 0000000000..04478bedd2 --- /dev/null +++ b/crates/parser2/tests/syntax_node.rs @@ -0,0 +1,17 @@ +use fe_parser2::syntax_node::SyntaxNode; + +#[allow(unused)] +fn build_cst(input: &str) -> SyntaxNode { + let (cst, errors) = fe_parser2::parse_source_file(input); + for error in &errors { + println!("{}", error.msg); + } + assert! {errors.is_empty()} + cst +} + +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/syntax_node", + "parser2/test_files/syntax_node", + build_cst +} diff --git a/crates/test-utils/macros/src/lib.rs b/crates/test-utils/macros/src/lib.rs index f67a33aadd..78a677e332 100644 --- a/crates/test-utils/macros/src/lib.rs +++ b/crates/test-utils/macros/src/lib.rs @@ -92,7 +92,7 @@ impl SnapTestBuilder { } } else { quote! { - let #snapshot = #target_fn(&input); + let #snapshot = format!("{}", #target_fn(&input)); } }; From 913d10c531968ebf72c119ca8a2036d3fb3e432e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 18 Jan 2023 00:16:59 +0100 Subject: [PATCH 018/678] Add parser for tuple def --- crates/parser2/src/parser/attr.rs | 2 +- crates/parser2/src/parser/mod.rs | 1 + crates/parser2/src/parser/path.rs | 2 +- crates/parser2/src/parser/struct_.rs | 8 +- crates/parser2/src/parser/tuple.rs | 35 +++++++++ crates/parser2/src/syntax_kind.rs | 2 +- .../test_files/syntax_node/struct_def.fe | 11 +++ .../test_files/syntax_node/struct_def.snap | 74 ++++++++++++++++++- 8 files changed, 127 insertions(+), 8 deletions(-) create mode 100644 crates/parser2/src/parser/tuple.rs diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index fc90f67c55..fdd39b7f8f 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -62,8 +62,8 @@ impl super::Parse for AttrParamListScope { parser.parse(AttrParam::default(), None); parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(AttrParam::default(), None); parser.bump_trivias(true); + parser.parse(AttrParam::default(), None); } parser.bump_trivias(true); diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 3c18c49255..3915c63b43 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -13,6 +13,7 @@ mod func; mod item; mod path; mod struct_; +mod tuple; /// Parser to build a rowan syntax tree. pub struct Parser { diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 30c88c8772..8b990ecdd8 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -26,7 +26,7 @@ define_scope! { impl super::Parse for PathSegmentScope { fn parse(&mut self, parser: &mut Parser) { if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_bump("expected path segment", 1); + parser.error_and_recover("expected path segment", None); } } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 2419e33c05..26d1ba751a 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -1,6 +1,6 @@ use crate::SyntaxKind; -use super::{define_scope, token_stream::TokenStream, Parser}; +use super::{define_scope, token_stream::TokenStream, tuple::TupleDefScope, Parser}; define_scope! { StructScope, @@ -77,7 +77,11 @@ impl super::Parse for StructFieldDefScope { parser.error_and_recover("expected `name: type` for the field definition", None); } parser.bump_trivias(false); - parser.parse(super::path::PathScope::default(), None); + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(TupleDefScope::default(), None); + } else { + parser.parse(super::path::PathScope::default(), None); + } if !matches!( parser.peek_non_trivia(false), Some(SyntaxKind::Newline) | Some(SyntaxKind::RBrace) diff --git a/crates/parser2/src/parser/tuple.rs b/crates/parser2/src/parser/tuple.rs new file mode 100644 index 0000000000..2f6240c556 --- /dev/null +++ b/crates/parser2/src/parser/tuple.rs @@ -0,0 +1,35 @@ +use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; + +use crate::SyntaxKind; + +define_scope! { + TupleDefScope, + TupleDef, + RecoverySet( + RParen, + Comma + ) +} +impl super::Parse for TupleDefScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + parser.bump_trivias(true); + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parser.parse(PathScope::default(), None); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parser.parse(PathScope::default(), None); + parser.bump_trivias(true); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_recover("expected `)`", None); + parser.bump_if(SyntaxKind::RParen); + } + } +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index edbf0b1962..8241a1ec7c 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -328,7 +328,7 @@ pub enum SyntaxKind { StructFieldDefList, /// `(i32, u32)` - Tuple, + TupleDef, VariantDef, VariantDefList, diff --git a/crates/parser2/test_files/syntax_node/struct_def.fe b/crates/parser2/test_files/syntax_node/struct_def.fe index 8b590b529c..a1b7d35422 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.fe +++ b/crates/parser2/test_files/syntax_node/struct_def.fe @@ -19,3 +19,14 @@ pub struct StructFieldAttr { y: u32 } + +pub struct StructWithTupleField { + x: (i32, u32) + y: ( + i32, + foo::Bar, + u32 + ) + z: () + +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/struct_def.snap b/crates/parser2/test_files/syntax_node/struct_def.snap index 7b07437ef1..e0c8a2cd4d 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.snap +++ b/crates/parser2/test_files/syntax_node/struct_def.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..225 - ItemList@0..225 +Root@0..348 + ItemList@0..348 Struct@0..27 PubKw@0..3 "pub" WhiteSpace@3..4 " " @@ -99,5 +99,73 @@ Root@0..225 PathSegment@220..223 Ident@220..223 "u32" RBrace@223..224 "}" - Newline@224..225 "\n" + Newline@224..226 "\n\n" + Struct@226..348 + PubKw@226..229 "pub" + WhiteSpace@229..230 " " + StructKw@230..236 "struct" + WhiteSpace@236..237 " " + Ident@237..257 "StructWithTupleField" + WhiteSpace@257..258 " " + StructFieldDefList@258..348 + LBrace@258..259 "{" + Newline@259..260 "\n" + WhiteSpace@260..264 " " + StructFieldDef@264..277 + Ident@264..265 "x" + Colon@265..266 ":" + WhiteSpace@266..267 " " + TupleDef@267..277 + LParen@267..268 "(" + Path@268..271 + PathSegment@268..271 + Ident@268..271 "i32" + Comma@271..272 "," + WhiteSpace@272..273 " " + Path@273..276 + PathSegment@273..276 + Ident@273..276 "u32" + RParen@276..277 ")" + Newline@277..278 "\n" + WhiteSpace@278..282 " " + StructFieldDef@282..335 + Ident@282..283 "y" + Colon@283..284 ":" + WhiteSpace@284..285 " " + TupleDef@285..335 + LParen@285..286 "(" + Newline@286..287 "\n" + WhiteSpace@287..295 " " + Path@295..298 + PathSegment@295..298 + Ident@295..298 "i32" + Comma@298..299 "," + Newline@299..300 "\n" + WhiteSpace@300..308 " " + Path@308..316 + PathSegment@308..311 + Ident@308..311 "foo" + Colon2@311..313 "::" + PathSegment@313..316 + Ident@313..316 "Bar" + Comma@316..317 "," + Newline@317..318 "\n" + WhiteSpace@318..326 " " + Path@326..329 + PathSegment@326..329 + Ident@326..329 "u32" + Newline@329..330 "\n" + WhiteSpace@330..334 " " + RParen@334..335 ")" + Newline@335..336 "\n" + WhiteSpace@336..340 " " + StructFieldDef@340..345 + Ident@340..341 "z" + Colon@341..342 ":" + WhiteSpace@342..343 " " + TupleDef@343..345 + LParen@343..344 "(" + RParen@344..345 ")" + Newline@345..347 "\n\n" + RBrace@347..348 "}" From 1dc19a46e386c50e14cf3f0ee56cafb0a1816bc5 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 18 Jan 2023 15:48:24 +0100 Subject: [PATCH 019/678] Add parser for generic parameter --- crates/parser2/src/parser/attr.rs | 7 +- crates/parser2/src/parser/item.rs | 8 +- crates/parser2/src/parser/mod.rs | 109 ++-- crates/parser2/src/parser/param.rs | 79 +++ crates/parser2/src/parser/struct_.rs | 20 +- crates/parser2/src/parser/token_stream.rs | 3 +- crates/parser2/src/parser/tuple.rs | 2 +- crates/parser2/src/syntax_kind.rs | 18 +- .../test_files/syntax_node/struct_def.fe | 29 +- .../test_files/syntax_node/struct_def.snap | 471 +++++++++++++----- crates/parser2/tests/syntax_node.rs | 1 + 11 files changed, 568 insertions(+), 179 deletions(-) create mode 100644 crates/parser2/src/parser/param.rs diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index fdd39b7f8f..c9554c4a05 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -5,7 +5,7 @@ use crate::SyntaxKind; define_scope! { AttrListScope, AttrList, - RecoverySet( + Override( Newline ) } @@ -40,14 +40,13 @@ impl super::Parse for AttrScope { if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(AttrParamListScope::default(), None); } - parser.bump_if(SyntaxKind::Newline); } } define_scope! { AttrParamListScope, AttrParamList, - RecoverySet( + Override( RParen ) } @@ -76,7 +75,7 @@ impl super::Parse for AttrParamListScope { define_scope! { AttrParam, AttrParam, - RecoverySet( + Override( Comma, RParen ) diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 04d7bce46f..c272f5724f 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -7,7 +7,7 @@ use super::{define_scope, token_stream::TokenStream, Parser}; define_scope! { RootScope, Root, - RecoverySet() + Override() } impl super::Parse for RootScope { fn parse(&mut self, parser: &mut Parser) { @@ -18,7 +18,7 @@ impl super::Parse for RootScope { define_scope! { ItemListScope, ItemList, - RecoverySet( + Override( FnKw, StructKw, EnumKw, @@ -149,17 +149,19 @@ impl super::Parse for TraitScope { #[derive(Debug, Clone)] struct ImplScope { syntax_kind: RefCell, + recovery_method: super::RecoveryMethod, } impl Default for ImplScope { fn default() -> Self { Self { syntax_kind: SyntaxKind::Impl.into(), + recovery_method: super::RecoveryMethod::inheritance_empty(), } } } impl super::ParsingScope for ImplScope { fn recovery_method(&self) -> &super::RecoveryMethod { - &super::RecoveryMethod::Inheritance + &self.recovery_method } fn syntax_kind(&self) -> SyntaxKind { diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 3915c63b43..34fc084f23 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -1,3 +1,5 @@ +use std::collections::VecDeque; + pub(crate) use item::RootScope; use fxhash::FxHashSet; @@ -11,6 +13,7 @@ pub mod token_stream; mod attr; mod func; mod item; +mod param; mod path; mod struct_; mod tuple; @@ -25,6 +28,7 @@ pub struct Parser { errors: Vec, current_pos: rowan::TextSize, + next_trivias: VecDeque, } impl Parser { @@ -36,12 +40,17 @@ impl Parser { scopes: Vec::new(), errors: Vec::new(), current_pos: rowan::TextSize::from(0), + next_trivias: VecDeque::new(), } } /// Returns the current token of the parser. pub fn current_token(&mut self) -> Option<&S::Token> { - self.stream.peek() + if !self.next_trivias.is_empty() { + Some(&self.next_trivias[0]) + } else { + self.stream.peek() + } } /// Returns the current token kind of the parser. @@ -106,7 +115,11 @@ impl Parser { /// Bumps the current token and /// current branch. pub fn bump(&mut self) { - let tok = self.stream.next().unwrap(); + let tok = match self.next_trivias.pop_front() { + Some(tok) => tok, + None => self.stream.next().unwrap(), + }; + self.current_pos += rowan::TextSize::of(tok.text()); self.builder.token(tok.syntax_kind().into(), tok.text()); } @@ -114,19 +127,24 @@ impl Parser { /// Peek the next non-trivia token. /// If `skip_newlines` is `true`, newlines are also treated as trivia. pub fn peek_non_trivia(&mut self, skip_newlines: bool) -> Option { - self.stream.set_bt_point(); + if !skip_newlines { + for tok in &self.next_trivias { + if tok.syntax_kind() == SyntaxKind::Newline { + return Some(SyntaxKind::Newline); + } + } + } - while let Some(next) = self.stream.next() { + while let Some(next) = self.stream.peek() { let kind = next.syntax_kind(); if kind.is_trivia() || (skip_newlines && kind == SyntaxKind::Newline) { + self.next_trivias.push_back(self.stream.next().unwrap()); continue; } else { - self.stream.backtrack(); return Some(kind); } } - self.stream.backtrack(); None } @@ -176,24 +194,30 @@ impl Parser { /// Proceeds the parser to the recovery token of the current scope. pub fn recover(&mut self) { + let mut recovery_set: FxHashSet = fxhash::FxHashSet::default(); let mut scope_index = self.scopes.len() - 1; - // Finds the nearest scope that has its own recovery set. loop { - if self.scopes[scope_index].recovery_method() != &RecoveryMethod::Inheritance - || scope_index == 0 + match self + .scopes + .get(scope_index) + .map(|scope| scope.recovery_method()) { - break; - } else { - scope_index -= 1; + Some(RecoveryMethod::Inheritance(set)) => { + recovery_set.extend(set.iter()); + scope_index -= 1; + } + Some(RecoveryMethod::Override(set)) => { + recovery_set.extend(set.iter()); + break; + } + + None => break, } } while let Some(tok) = self.stream.peek() { let syntax_kind = tok.syntax_kind(); - if self.scopes[scope_index] - .recovery_method() - .contains(syntax_kind) - { + if recovery_set.contains(&syntax_kind) { break; } else { self.bump(); @@ -253,22 +277,19 @@ define_scope! { } /// Represents the recovery method of the current scope. -#[derive(PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub enum RecoveryMethod { - /// Uses the recovery method of the parent scope. - Inheritance, + /// Uses the recovery method of the parent scope and its own recovery set. + Inheritance(FxHashSet), - /// The scope has its own recovery set. - RecoverySet(FxHashSet), + /// The scope has its own recovery set and don't use parent scope's recovery + /// set. + Override(FxHashSet), } impl RecoveryMethod { - /// Returns `true` if the recovery set contains the given syntax kind. - fn contains(&self, syntax_kind: SyntaxKind) -> bool { - match self { - RecoveryMethod::Inheritance => false, - RecoveryMethod::RecoverySet(set) => set.contains(&syntax_kind), - } + fn inheritance_empty() -> Self { + RecoveryMethod::Inheritance(fxhash::FxHashSet::default()) } } @@ -294,7 +315,35 @@ macro_rules! define_scope { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { lazy_static::lazy_static! { pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { - crate::parser::RecoveryMethod::Inheritance + crate::parser::RecoveryMethod::Inheritance(fxhash::FxHashSet::default()) + }; + } + + &RECOVERY_METHOD + } + + fn syntax_kind(&self) -> crate::SyntaxKind { + use crate::SyntaxKind::*; + $kind + } + } + }; + + ($scope_name: ident, $kind: path, Inheritance($($recoveries: path), *)) => { + #[derive(Default, Debug, Clone, Copy)] + pub(crate) struct $scope_name {} + + impl crate::parser::ParsingScope for $scope_name { + fn recovery_method(&self) -> &crate::parser::RecoveryMethod { + lazy_static::lazy_static! { + pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { + #[allow(unused)] + use crate::SyntaxKind::*; + let set: fxhash::FxHashSet = vec![ + $($recoveries), * + ].into_iter().map(|kind: SyntaxKind| kind.into()).collect(); + + crate::parser::RecoveryMethod::Inheritance(set) }; } @@ -308,7 +357,7 @@ macro_rules! define_scope { } }; - ($scope_name: ident, $kind: path, RecoverySet($($recoveries: path), *)) => { + ($scope_name: ident, $kind: path, Override($($recoveries: path), *)) => { #[derive(Default, Debug, Clone, Copy)] pub(crate) struct $scope_name {} @@ -322,7 +371,7 @@ macro_rules! define_scope { $($recoveries), * ].into_iter().map(|kind: SyntaxKind| kind.into()).collect(); - crate::parser::RecoveryMethod::RecoverySet(set) + crate::parser::RecoveryMethod::Override(set) }; } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs new file mode 100644 index 0000000000..f11744b0a4 --- /dev/null +++ b/crates/parser2/src/parser/param.rs @@ -0,0 +1,79 @@ +use crate::SyntaxKind; + +use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; + +define_scope! { + GenericParamListScope, + GenericParamList, + Override(Gt) +} +impl super::Parse for GenericParamListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Lt); + parser.bump_trivias(true); + if parser.bump_if(SyntaxKind::Gt) { + return; + } + + parser.parse(GenericParamScope::default(), None); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parser.parse(GenericParamScope::default(), None); + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::Gt) { + parser.error_and_recover("expected closing `>`", None); + parser.bump_if(SyntaxKind::Gt); + } + } +} + +define_scope! { + GenericParamScope, + GenericParam, + Inheritance(Comma) +} +impl super::Parse for GenericParamScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected type parameter", None); + } + + if parser.peek_non_trivia(true) == Some(SyntaxKind::Colon) { + parser.bump_trivias(true); + parser.bump_expected(SyntaxKind::Colon); + parser.bump_trivias(true); + parser.parse(TraitBoundListScope::default(), None); + } + } +} + +define_scope! { + TraitBoundListScope, + TraitBoundList, + Inheritance(Plus) +} +impl super::Parse for TraitBoundListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(TraitBoundScope::default(), None); + while parser.peek_non_trivia(true) == Some(SyntaxKind::Plus) { + parser.bump_trivias(true); + parser.bump_expected(SyntaxKind::Plus); + parser.bump_trivias(true); + parser.parse(TraitBoundScope::default(), None); + } + } +} + +define_scope! { + TraitBoundScope, + TraitBound, + Inheritance +} +impl super::Parse for TraitBoundScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(PathScope::default(), None); + } +} diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 26d1ba751a..7e062d9c86 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -1,6 +1,9 @@ use crate::SyntaxKind; -use super::{define_scope, token_stream::TokenStream, tuple::TupleDefScope, Parser}; +use super::{ + define_scope, param::GenericParamListScope, token_stream::TokenStream, tuple::TupleDefScope, + Parser, +}; define_scope! { StructScope, @@ -16,6 +19,11 @@ impl super::Parse for StructScope { parser.error_and_recover("expected ident for the struct name", None) } + parser.bump_trivias(true); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + parser.bump_trivias(true); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(StructFieldDefListScope::default(), None); @@ -28,8 +36,9 @@ impl super::Parse for StructScope { define_scope! { StructFieldDefListScope, StructFieldDefList, - RecoverySet( - RBrace + Override( + RBrace, + Newline ) } impl super::Parse for StructFieldDefListScope { @@ -53,9 +62,7 @@ impl super::Parse for StructFieldDefListScope { define_scope! { StructFieldDefScope, StructFieldDef, - RecoverySet( - Newline - ) + Inheritance } impl super::Parse for StructFieldDefScope { fn parse(&mut self, parser: &mut Parser) { @@ -86,6 +93,7 @@ impl super::Parse for StructFieldDefScope { parser.peek_non_trivia(false), Some(SyntaxKind::Newline) | Some(SyntaxKind::RBrace) ) { + println!("{:?}", parser.peek_non_trivia(false)); parser.error_and_recover("expected newline after the field definition", None); } } diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs index b053143155..10a758fec2 100644 --- a/crates/parser2/src/parser/token_stream.rs +++ b/crates/parser2/src/parser/token_stream.rs @@ -80,10 +80,11 @@ impl BackTrackableTokenStream { /// Set a backtrack point which allows the parser to backtrack to this /// point. pub fn set_bt_point(&mut self) { + println!("{}", self.bt_buffer.len()); self.bt_points.push(self.bt_buffer.len()); } - /// Remove the last resume points. + /// Remove the last backtrack point. pub fn complete(&mut self) { self.bt_cursor = None; if !self.has_bt_point() { diff --git a/crates/parser2/src/parser/tuple.rs b/crates/parser2/src/parser/tuple.rs index 2f6240c556..2e7a0ef35c 100644 --- a/crates/parser2/src/parser/tuple.rs +++ b/crates/parser2/src/parser/tuple.rs @@ -5,7 +5,7 @@ use crate::SyntaxKind; define_scope! { TupleDefScope, TupleDef, - RecoverySet( + Override( RParen, Comma ) diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 8241a1ec7c..065659b12d 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -224,10 +224,14 @@ pub enum SyntaxKind { UnExpr, /// `foo(x, y)` CallExpr, - /// `(x, y)` + /// `(arg: 1, y)` CallArgList, + /// `arg: 1`, `y` + CallArg, /// `` - CallTypeArgList, + GenericArgList, + /// `T` + GenericArg, /// `FOO::Bar` PathExpr, /// `foo.bar(x, y)` @@ -335,10 +339,18 @@ pub enum SyntaxKind { /// `T` /// `T: Trait` - TypeBound, + GenericParam, /// `` GenericParamList, + /// `foo::Trait1 + Trait2` + TraitBoundList, + /// `Trait1` + TraitBound, + + /// `1`, `"foo"` + Literal, + /// Root node of the input source. Root, diff --git a/crates/parser2/test_files/syntax_node/struct_def.fe b/crates/parser2/test_files/syntax_node/struct_def.fe index a1b7d35422..d8423d952d 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.fe +++ b/crates/parser2/test_files/syntax_node/struct_def.fe @@ -7,7 +7,7 @@ pub struct EmptyStruct { // normal comment /// DocComment2 pub struct StructAttr { - x: i32 + x: foo::Bar y: i32 } @@ -20,7 +20,7 @@ pub struct StructFieldAttr { y: u32 } -pub struct StructWithTupleField { +struct StructWithTupleField { x: (i32, u32) y: ( i32, @@ -28,5 +28,30 @@ pub struct StructWithTupleField { u32 ) z: () +} + +pub struct StructWithGenericParam { + x: S + y: T + z: U +} + +pub struct StructWithGenericParam2< + S, + T: foo::Trait, + U +> { + x: S + y: T + z: U +} +pub struct StructWithGenericParam3< + S: foo::Trait + bar::Trait, + T, + U: bar::Trait +> { + x: S + y: T + z: U } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/struct_def.snap b/crates/parser2/test_files/syntax_node/struct_def.snap index e0c8a2cd4d..216e4d5ca4 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.snap +++ b/crates/parser2/test_files/syntax_node/struct_def.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..348 - ItemList@0..348 +Root@0..658 + ItemList@0..658 Struct@0..27 PubKw@0..3 "pub" WhiteSpace@3..4 " " @@ -16,15 +16,15 @@ Root@0..348 Newline@24..26 "\n\n" RBrace@26..27 "}" Newline@27..29 "\n\n" - Struct@29..130 + Struct@29..137 AttrList@29..85 DocCommentAttr@29..45 DocComment@29..44 "/// DocComment1" Newline@44..45 "\n" - Attr@45..51 + Attr@45..50 Pound@45..46 "#" Ident@46..50 "attr" - Newline@50..51 "\n" + Newline@50..51 "\n" Comment@51..68 "// normal comment" Newline@68..69 "\n" DocCommentAttr@69..85 @@ -36,136 +36,349 @@ Root@0..348 WhiteSpace@95..96 " " Ident@96..106 "StructAttr" WhiteSpace@106..107 " " - StructFieldDefList@107..130 + StructFieldDefList@107..137 LBrace@107..108 "{" Newline@108..109 "\n" WhiteSpace@109..113 " " - StructFieldDef@113..119 + StructFieldDef@113..124 Ident@113..114 "x" Colon@114..115 ":" WhiteSpace@115..116 " " - Path@116..119 + Path@116..124 PathSegment@116..119 - Ident@116..119 "i32" - WhiteSpace@119..123 " " - StructFieldDef@123..129 - Ident@123..124 "y" - Colon@124..125 ":" - WhiteSpace@125..126 " " - Path@126..129 - PathSegment@126..129 - Ident@126..129 "i32" - RBrace@129..130 "}" - Newline@130..132 "\n\n" - Struct@132..224 - AttrList@132..139 - Attr@132..139 - Pound@132..133 "#" - Ident@133..138 "Event" - Newline@138..139 "\n" - PubKw@139..142 "pub" - WhiteSpace@142..143 " " - StructKw@143..149 "struct" + Ident@116..119 "foo" + Colon2@119..121 "::" + PathSegment@121..124 + Ident@121..124 "Bar" + Newline@124..125 "\n" + WhiteSpace@125..129 " " + StructFieldDef@129..135 + Ident@129..130 "y" + Colon@130..131 ":" + WhiteSpace@131..132 " " + Path@132..135 + PathSegment@132..135 + Ident@132..135 "i32" + Newline@135..136 "\n" + RBrace@136..137 "}" + Newline@137..139 "\n\n" + Struct@139..234 + AttrList@139..146 + Attr@139..145 + Pound@139..140 "#" + Ident@140..145 "Event" + Newline@145..146 "\n" + PubKw@146..149 "pub" WhiteSpace@149..150 " " - Ident@150..165 "StructFieldAttr" - WhiteSpace@165..166 " " - StructFieldDefList@166..224 - LBrace@166..167 "{" - Newline@167..168 "\n" - WhiteSpace@168..172 " " - StructFieldDef@172..213 - AttrList@172..207 - DocCommentAttr@172..192 - DocComment@172..191 "/// `x` is a topic " - Newline@191..192 "\n" - WhiteSpace@192..196 " " - Attr@196..203 - Pound@196..197 "#" - Ident@197..202 "topic" - Newline@202..203 "\n" - WhiteSpace@203..207 " " - Ident@207..208 "x" - Colon@208..209 ":" - WhiteSpace@209..210 " " - Path@210..213 - PathSegment@210..213 - Ident@210..213 "i32" - WhiteSpace@213..217 " " - StructFieldDef@217..223 - Ident@217..218 "y" - Colon@218..219 ":" - WhiteSpace@219..220 " " - Path@220..223 - PathSegment@220..223 - Ident@220..223 "u32" - RBrace@223..224 "}" - Newline@224..226 "\n\n" - Struct@226..348 - PubKw@226..229 "pub" - WhiteSpace@229..230 " " - StructKw@230..236 "struct" - WhiteSpace@236..237 " " - Ident@237..257 "StructWithTupleField" - WhiteSpace@257..258 " " - StructFieldDefList@258..348 - LBrace@258..259 "{" - Newline@259..260 "\n" - WhiteSpace@260..264 " " - StructFieldDef@264..277 - Ident@264..265 "x" - Colon@265..266 ":" - WhiteSpace@266..267 " " - TupleDef@267..277 - LParen@267..268 "(" - Path@268..271 - PathSegment@268..271 - Ident@268..271 "i32" - Comma@271..272 "," - WhiteSpace@272..273 " " - Path@273..276 - PathSegment@273..276 - Ident@273..276 "u32" - RParen@276..277 ")" - Newline@277..278 "\n" - WhiteSpace@278..282 " " - StructFieldDef@282..335 - Ident@282..283 "y" - Colon@283..284 ":" - WhiteSpace@284..285 " " - TupleDef@285..335 - LParen@285..286 "(" - Newline@286..287 "\n" - WhiteSpace@287..295 " " - Path@295..298 - PathSegment@295..298 - Ident@295..298 "i32" - Comma@298..299 "," - Newline@299..300 "\n" - WhiteSpace@300..308 " " - Path@308..316 - PathSegment@308..311 - Ident@308..311 "foo" - Colon2@311..313 "::" - PathSegment@313..316 - Ident@313..316 "Bar" - Comma@316..317 "," - Newline@317..318 "\n" - WhiteSpace@318..326 " " - Path@326..329 - PathSegment@326..329 - Ident@326..329 "u32" - Newline@329..330 "\n" - WhiteSpace@330..334 " " - RParen@334..335 ")" - Newline@335..336 "\n" - WhiteSpace@336..340 " " - StructFieldDef@340..345 - Ident@340..341 "z" - Colon@341..342 ":" - WhiteSpace@342..343 " " - TupleDef@343..345 - LParen@343..344 "(" - RParen@344..345 ")" - Newline@345..347 "\n\n" - RBrace@347..348 "}" + StructKw@150..156 "struct" + WhiteSpace@156..157 " " + Ident@157..172 "StructFieldAttr" + WhiteSpace@172..173 " " + StructFieldDefList@173..234 + LBrace@173..174 "{" + Newline@174..175 "\n" + WhiteSpace@175..179 " " + StructFieldDef@179..220 + AttrList@179..214 + DocCommentAttr@179..199 + DocComment@179..198 "/// `x` is a topic " + Newline@198..199 "\n" + WhiteSpace@199..203 " " + Attr@203..209 + Pound@203..204 "#" + Ident@204..209 "topic" + Newline@209..210 "\n" + WhiteSpace@210..214 " " + Ident@214..215 "x" + Colon@215..216 ":" + WhiteSpace@216..217 " " + Path@217..220 + PathSegment@217..220 + Ident@217..220 "i32" + Newline@220..222 "\n\n" + WhiteSpace@222..226 " " + StructFieldDef@226..232 + Ident@226..227 "y" + Colon@227..228 ":" + WhiteSpace@228..229 " " + Path@229..232 + PathSegment@229..232 + Ident@229..232 "u32" + Newline@232..233 "\n" + RBrace@233..234 "}" + Newline@234..236 "\n\n" + Struct@236..353 + StructKw@236..242 "struct" + WhiteSpace@242..243 " " + Ident@243..263 "StructWithTupleField" + WhiteSpace@263..264 " " + StructFieldDefList@264..353 + LBrace@264..265 "{" + Newline@265..266 "\n" + WhiteSpace@266..270 " " + StructFieldDef@270..283 + Ident@270..271 "x" + Colon@271..272 ":" + WhiteSpace@272..273 " " + TupleDef@273..283 + LParen@273..274 "(" + Path@274..277 + PathSegment@274..277 + Ident@274..277 "i32" + Comma@277..278 "," + WhiteSpace@278..279 " " + Path@279..282 + PathSegment@279..282 + Ident@279..282 "u32" + RParen@282..283 ")" + Newline@283..284 "\n" + WhiteSpace@284..288 " " + StructFieldDef@288..341 + Ident@288..289 "y" + Colon@289..290 ":" + WhiteSpace@290..291 " " + TupleDef@291..341 + LParen@291..292 "(" + Newline@292..293 "\n" + WhiteSpace@293..301 " " + Path@301..304 + PathSegment@301..304 + Ident@301..304 "i32" + Comma@304..305 "," + Newline@305..306 "\n" + WhiteSpace@306..314 " " + Path@314..322 + PathSegment@314..317 + Ident@314..317 "foo" + Colon2@317..319 "::" + PathSegment@319..322 + Ident@319..322 "Bar" + Comma@322..323 "," + Newline@323..324 "\n" + WhiteSpace@324..332 " " + Path@332..335 + PathSegment@332..335 + Ident@332..335 "u32" + Newline@335..336 "\n" + WhiteSpace@336..340 " " + RParen@340..341 ")" + Newline@341..342 "\n" + WhiteSpace@342..346 " " + StructFieldDef@346..351 + Ident@346..347 "z" + Colon@347..348 ":" + WhiteSpace@348..349 " " + TupleDef@349..351 + LParen@349..350 "(" + RParen@350..351 ")" + Newline@351..352 "\n" + RBrace@352..353 "}" + Newline@353..355 "\n\n" + Struct@355..428 + PubKw@355..358 "pub" + WhiteSpace@358..359 " " + StructKw@359..365 "struct" + WhiteSpace@365..366 " " + Ident@366..388 "StructWithGenericParam" + GenericParamList@388..397 + Lt@388..389 "<" + GenericParam@389..390 + Ident@389..390 "S" + Comma@390..391 "," + WhiteSpace@391..392 " " + GenericParam@392..393 + Ident@392..393 "T" + Comma@393..394 "," + WhiteSpace@394..395 " " + GenericParam@395..396 + Ident@395..396 "U" + Gt@396..397 ">" + WhiteSpace@397..398 " " + StructFieldDefList@398..428 + LBrace@398..399 "{" + Newline@399..400 "\n" + WhiteSpace@400..404 " " + StructFieldDef@404..408 + Ident@404..405 "x" + Colon@405..406 ":" + WhiteSpace@406..407 " " + Path@407..408 + PathSegment@407..408 + Ident@407..408 "S" + Newline@408..409 "\n" + WhiteSpace@409..413 " " + StructFieldDef@413..417 + Ident@413..414 "y" + Colon@414..415 ":" + WhiteSpace@415..416 " " + Path@416..417 + PathSegment@416..417 + Ident@416..417 "T" + Newline@417..418 "\n" + WhiteSpace@418..422 " " + StructFieldDef@422..426 + Ident@422..423 "z" + Colon@423..424 ":" + WhiteSpace@424..425 " " + Path@425..426 + PathSegment@425..426 + Ident@425..426 "U" + Newline@426..427 "\n" + RBrace@427..428 "}" + Newline@428..429 "\n" + WhiteSpace@429..430 " " + Newline@430..431 "\n" + Struct@431..531 + PubKw@431..434 "pub" + WhiteSpace@434..435 " " + StructKw@435..441 "struct" + WhiteSpace@441..442 " " + Ident@442..465 "StructWithGenericParam2" + GenericParamList@465..500 + Lt@465..466 "<" + Newline@466..467 "\n" + WhiteSpace@467..471 " " + GenericParam@471..472 + Ident@471..472 "S" + Comma@472..473 "," + Newline@473..474 "\n" + WhiteSpace@474..478 " " + GenericParam@478..491 + Ident@478..479 "T" + Colon@479..480 ":" + WhiteSpace@480..481 " " + TraitBoundList@481..491 + TraitBound@481..491 + Path@481..491 + PathSegment@481..484 + Ident@481..484 "foo" + Colon2@484..486 "::" + PathSegment@486..491 + Ident@486..491 "Trait" + Comma@491..492 "," + Newline@492..493 "\n" + WhiteSpace@493..497 " " + GenericParam@497..498 + Ident@497..498 "U" + Newline@498..499 "\n" + Gt@499..500 ">" + WhiteSpace@500..501 " " + StructFieldDefList@501..531 + LBrace@501..502 "{" + Newline@502..503 "\n" + WhiteSpace@503..507 " " + StructFieldDef@507..511 + Ident@507..508 "x" + Colon@508..509 ":" + WhiteSpace@509..510 " " + Path@510..511 + PathSegment@510..511 + Ident@510..511 "S" + Newline@511..512 "\n" + WhiteSpace@512..516 " " + StructFieldDef@516..520 + Ident@516..517 "y" + Colon@517..518 ":" + WhiteSpace@518..519 " " + Path@519..520 + PathSegment@519..520 + Ident@519..520 "T" + Newline@520..521 "\n" + WhiteSpace@521..525 " " + StructFieldDef@525..529 + Ident@525..526 "z" + Colon@526..527 ":" + WhiteSpace@527..528 " " + Path@528..529 + PathSegment@528..529 + Ident@528..529 "U" + Newline@529..530 "\n" + RBrace@530..531 "}" + Newline@531..533 "\n\n" + Struct@533..658 + PubKw@533..536 "pub" + WhiteSpace@536..537 " " + StructKw@537..543 "struct" + WhiteSpace@543..544 " " + Ident@544..567 "StructWithGenericParam3" + GenericParamList@567..627 + Lt@567..568 "<" + Newline@568..569 "\n" + WhiteSpace@569..573 " " + GenericParam@573..599 + Ident@573..574 "S" + Colon@574..575 ":" + WhiteSpace@575..576 " " + TraitBoundList@576..599 + TraitBound@576..586 + Path@576..586 + PathSegment@576..579 + Ident@576..579 "foo" + Colon2@579..581 "::" + PathSegment@581..586 + Ident@581..586 "Trait" + WhiteSpace@586..587 " " + Plus@587..588 "+" + WhiteSpace@588..589 " " + TraitBound@589..599 + Path@589..599 + PathSegment@589..592 + Ident@589..592 "bar" + Colon2@592..594 "::" + PathSegment@594..599 + Ident@594..599 "Trait" + Comma@599..600 "," + Newline@600..601 "\n" + WhiteSpace@601..605 " " + GenericParam@605..606 + Ident@605..606 "T" + Comma@606..607 "," + Newline@607..608 "\n" + WhiteSpace@608..612 " " + GenericParam@612..625 + Ident@612..613 "U" + Colon@613..614 ":" + WhiteSpace@614..615 " " + TraitBoundList@615..625 + TraitBound@615..625 + Path@615..625 + PathSegment@615..618 + Ident@615..618 "bar" + Colon2@618..620 "::" + PathSegment@620..625 + Ident@620..625 "Trait" + Newline@625..626 "\n" + Gt@626..627 ">" + WhiteSpace@627..628 " " + StructFieldDefList@628..658 + LBrace@628..629 "{" + Newline@629..630 "\n" + WhiteSpace@630..634 " " + StructFieldDef@634..638 + Ident@634..635 "x" + Colon@635..636 ":" + WhiteSpace@636..637 " " + Path@637..638 + PathSegment@637..638 + Ident@637..638 "S" + Newline@638..639 "\n" + WhiteSpace@639..643 " " + StructFieldDef@643..647 + Ident@643..644 "y" + Colon@644..645 ":" + WhiteSpace@645..646 " " + Path@646..647 + PathSegment@646..647 + Ident@646..647 "T" + Newline@647..648 "\n" + WhiteSpace@648..652 " " + StructFieldDef@652..656 + Ident@652..653 "z" + Colon@653..654 ":" + WhiteSpace@654..655 " " + Path@655..656 + PathSegment@655..656 + Ident@655..656 "U" + Newline@656..657 "\n" + RBrace@657..658 "}" diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 04478bedd2..2b2910c065 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -7,6 +7,7 @@ fn build_cst(input: &str) -> SyntaxNode { println!("{}", error.msg); } assert! {errors.is_empty()} + assert!(input == cst.to_string()); cst } From d2963803894c961434506e935c971fd5971e36b9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 19 Jan 2023 15:35:37 +0100 Subject: [PATCH 020/678] Add parser for `type` --- crates/parser2/src/parser/tuple.rs | 35 -- crates/parser2/src/parser/type_.rs | 78 +++++ .../test_files/syntax_node/struct_def.fe | 2 +- .../test_files/syntax_node/struct_def.snap | 317 ++++++++++-------- 4 files changed, 253 insertions(+), 179 deletions(-) delete mode 100644 crates/parser2/src/parser/tuple.rs create mode 100644 crates/parser2/src/parser/type_.rs diff --git a/crates/parser2/src/parser/tuple.rs b/crates/parser2/src/parser/tuple.rs deleted file mode 100644 index 2e7a0ef35c..0000000000 --- a/crates/parser2/src/parser/tuple.rs +++ /dev/null @@ -1,35 +0,0 @@ -use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; - -use crate::SyntaxKind; - -define_scope! { - TupleDefScope, - TupleDef, - Override( - RParen, - Comma - ) -} -impl super::Parse for TupleDefScope { - fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::LParen); - parser.bump_trivias(true); - if parser.bump_if(SyntaxKind::RParen) { - return; - } - - parser.parse(PathScope::default(), None); - parser.bump_trivias(true); - while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); - parser.parse(PathScope::default(), None); - parser.bump_trivias(true); - } - - parser.bump_trivias(true); - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_recover("expected `)`", None); - parser.bump_if(SyntaxKind::RParen); - } - } -} diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs new file mode 100644 index 0000000000..b7d4392df0 --- /dev/null +++ b/crates/parser2/src/parser/type_.rs @@ -0,0 +1,78 @@ +use crate::SyntaxKind; + +use super::{ + define_scope, param::GenericArgListScope, path::PathScope, token_stream::TokenStream, + Checkpoint, Parser, +}; + +pub(super) fn parse_type( + parser: &mut Parser, + checkpoint: Option, +) -> bool { + match parser.current_kind() { + Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::default(), checkpoint), + Some(SyntaxKind::SelfKw) => parser.parse(SelfTypeScope::default(), checkpoint), + Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), + _ => parser.parse(PathTypeScope::default(), checkpoint), + } +} + +define_scope!(PtrTypeScope, PtrType, Inheritance); +impl super::Parse for PtrTypeScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Star); + parser.bump_trivias(false); + parse_type(parser, None); + } +} + +define_scope!(PathTypeScope, PathType, Inheritance); +impl super::Parse for PathTypeScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.parse(PathScope::default(), None) { + return; + } + + if parser.peek_non_trivia(false) == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } + } +} + +define_scope!(SelfTypeScope, SelfType, Inheritance); +impl super::Parse for SelfTypeScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::SelfKw); + } +} +define_scope! { + TupleTypeScope, + TupleType, + Override( + RParen, + Comma + ) +} +impl super::Parse for TupleTypeScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + parser.bump_trivias(true); + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parse_type(parser, None); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parse_type(parser, None); + parser.bump_trivias(true); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_recover("expected `)`", None); + parser.bump_if(SyntaxKind::RParen); + } + } +} diff --git a/crates/parser2/test_files/syntax_node/struct_def.fe b/crates/parser2/test_files/syntax_node/struct_def.fe index d8423d952d..2a3086cb7d 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.fe +++ b/crates/parser2/test_files/syntax_node/struct_def.fe @@ -41,7 +41,7 @@ pub struct StructWithGenericParam2< T: foo::Trait, U > { - x: S + x: *(S, *i32) y: T z: U } diff --git a/crates/parser2/test_files/syntax_node/struct_def.snap b/crates/parser2/test_files/syntax_node/struct_def.snap index 216e4d5ca4..1cd83d3a50 100644 --- a/crates/parser2/test_files/syntax_node/struct_def.snap +++ b/crates/parser2/test_files/syntax_node/struct_def.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..658 - ItemList@0..658 +Root@0..667 + ItemList@0..667 Struct@0..27 PubKw@0..3 "pub" WhiteSpace@3..4 " " @@ -44,21 +44,23 @@ Root@0..658 Ident@113..114 "x" Colon@114..115 ":" WhiteSpace@115..116 " " - Path@116..124 - PathSegment@116..119 - Ident@116..119 "foo" - Colon2@119..121 "::" - PathSegment@121..124 - Ident@121..124 "Bar" + PathType@116..124 + Path@116..124 + PathSegment@116..119 + Ident@116..119 "foo" + Colon2@119..121 "::" + PathSegment@121..124 + Ident@121..124 "Bar" Newline@124..125 "\n" WhiteSpace@125..129 " " StructFieldDef@129..135 Ident@129..130 "y" Colon@130..131 ":" WhiteSpace@131..132 " " - Path@132..135 - PathSegment@132..135 - Ident@132..135 "i32" + PathType@132..135 + Path@132..135 + PathSegment@132..135 + Ident@132..135 "i32" Newline@135..136 "\n" RBrace@136..137 "}" Newline@137..139 "\n\n" @@ -92,18 +94,20 @@ Root@0..658 Ident@214..215 "x" Colon@215..216 ":" WhiteSpace@216..217 " " - Path@217..220 - PathSegment@217..220 - Ident@217..220 "i32" + PathType@217..220 + Path@217..220 + PathSegment@217..220 + Ident@217..220 "i32" Newline@220..222 "\n\n" WhiteSpace@222..226 " " StructFieldDef@226..232 Ident@226..227 "y" Colon@227..228 ":" WhiteSpace@228..229 " " - Path@229..232 - PathSegment@229..232 - Ident@229..232 "u32" + PathType@229..232 + Path@229..232 + PathSegment@229..232 + Ident@229..232 "u32" Newline@232..233 "\n" RBrace@233..234 "}" Newline@234..236 "\n\n" @@ -120,16 +124,18 @@ Root@0..658 Ident@270..271 "x" Colon@271..272 ":" WhiteSpace@272..273 " " - TupleDef@273..283 + TupleType@273..283 LParen@273..274 "(" - Path@274..277 - PathSegment@274..277 - Ident@274..277 "i32" + PathType@274..277 + Path@274..277 + PathSegment@274..277 + Ident@274..277 "i32" Comma@277..278 "," WhiteSpace@278..279 " " - Path@279..282 - PathSegment@279..282 - Ident@279..282 "u32" + PathType@279..282 + Path@279..282 + PathSegment@279..282 + Ident@279..282 "u32" RParen@282..283 ")" Newline@283..284 "\n" WhiteSpace@284..288 " " @@ -137,28 +143,31 @@ Root@0..658 Ident@288..289 "y" Colon@289..290 ":" WhiteSpace@290..291 " " - TupleDef@291..341 + TupleType@291..341 LParen@291..292 "(" Newline@292..293 "\n" WhiteSpace@293..301 " " - Path@301..304 - PathSegment@301..304 - Ident@301..304 "i32" + PathType@301..304 + Path@301..304 + PathSegment@301..304 + Ident@301..304 "i32" Comma@304..305 "," Newline@305..306 "\n" WhiteSpace@306..314 " " - Path@314..322 - PathSegment@314..317 - Ident@314..317 "foo" - Colon2@317..319 "::" - PathSegment@319..322 - Ident@319..322 "Bar" + PathType@314..322 + Path@314..322 + PathSegment@314..317 + Ident@314..317 "foo" + Colon2@317..319 "::" + PathSegment@319..322 + Ident@319..322 "Bar" Comma@322..323 "," Newline@323..324 "\n" WhiteSpace@324..332 " " - Path@332..335 - PathSegment@332..335 - Ident@332..335 "u32" + PathType@332..335 + Path@332..335 + PathSegment@332..335 + Ident@332..335 "u32" Newline@335..336 "\n" WhiteSpace@336..340 " " RParen@340..341 ")" @@ -168,7 +177,7 @@ Root@0..658 Ident@346..347 "z" Colon@347..348 ":" WhiteSpace@348..349 " " - TupleDef@349..351 + TupleType@349..351 LParen@349..350 "(" RParen@350..351 ")" Newline@351..352 "\n" @@ -202,33 +211,36 @@ Root@0..658 Ident@404..405 "x" Colon@405..406 ":" WhiteSpace@406..407 " " - Path@407..408 - PathSegment@407..408 - Ident@407..408 "S" + PathType@407..408 + Path@407..408 + PathSegment@407..408 + Ident@407..408 "S" Newline@408..409 "\n" WhiteSpace@409..413 " " StructFieldDef@413..417 Ident@413..414 "y" Colon@414..415 ":" WhiteSpace@415..416 " " - Path@416..417 - PathSegment@416..417 - Ident@416..417 "T" + PathType@416..417 + Path@416..417 + PathSegment@416..417 + Ident@416..417 "T" Newline@417..418 "\n" WhiteSpace@418..422 " " StructFieldDef@422..426 Ident@422..423 "z" Colon@423..424 ":" WhiteSpace@424..425 " " - Path@425..426 - PathSegment@425..426 - Ident@425..426 "U" + PathType@425..426 + Path@425..426 + PathSegment@425..426 + Ident@425..426 "U" Newline@426..427 "\n" RBrace@427..428 "}" Newline@428..429 "\n" WhiteSpace@429..430 " " Newline@430..431 "\n" - Struct@431..531 + Struct@431..540 PubKw@431..434 "pub" WhiteSpace@434..435 " " StructKw@435..441 "struct" @@ -263,122 +275,141 @@ Root@0..658 Newline@498..499 "\n" Gt@499..500 ">" WhiteSpace@500..501 " " - StructFieldDefList@501..531 + StructFieldDefList@501..540 LBrace@501..502 "{" Newline@502..503 "\n" WhiteSpace@503..507 " " - StructFieldDef@507..511 + StructFieldDef@507..520 Ident@507..508 "x" Colon@508..509 ":" WhiteSpace@509..510 " " - Path@510..511 - PathSegment@510..511 - Ident@510..511 "S" - Newline@511..512 "\n" - WhiteSpace@512..516 " " - StructFieldDef@516..520 - Ident@516..517 "y" - Colon@517..518 ":" - WhiteSpace@518..519 " " - Path@519..520 - PathSegment@519..520 - Ident@519..520 "T" + PtrType@510..520 + Star@510..511 "*" + TupleType@511..520 + LParen@511..512 "(" + PathType@512..513 + Path@512..513 + PathSegment@512..513 + Ident@512..513 "S" + Comma@513..514 "," + WhiteSpace@514..515 " " + PtrType@515..519 + Star@515..516 "*" + PathType@516..519 + Path@516..519 + PathSegment@516..519 + Ident@516..519 "i32" + RParen@519..520 ")" Newline@520..521 "\n" WhiteSpace@521..525 " " StructFieldDef@525..529 - Ident@525..526 "z" + Ident@525..526 "y" Colon@526..527 ":" WhiteSpace@527..528 " " - Path@528..529 - PathSegment@528..529 - Ident@528..529 "U" + PathType@528..529 + Path@528..529 + PathSegment@528..529 + Ident@528..529 "T" Newline@529..530 "\n" - RBrace@530..531 "}" - Newline@531..533 "\n\n" - Struct@533..658 - PubKw@533..536 "pub" - WhiteSpace@536..537 " " - StructKw@537..543 "struct" - WhiteSpace@543..544 " " - Ident@544..567 "StructWithGenericParam3" - GenericParamList@567..627 - Lt@567..568 "<" - Newline@568..569 "\n" - WhiteSpace@569..573 " " - GenericParam@573..599 - Ident@573..574 "S" - Colon@574..575 ":" - WhiteSpace@575..576 " " - TraitBoundList@576..599 - TraitBound@576..586 - Path@576..586 - PathSegment@576..579 - Ident@576..579 "foo" - Colon2@579..581 "::" - PathSegment@581..586 - Ident@581..586 "Trait" - WhiteSpace@586..587 " " - Plus@587..588 "+" - WhiteSpace@588..589 " " - TraitBound@589..599 - Path@589..599 - PathSegment@589..592 - Ident@589..592 "bar" - Colon2@592..594 "::" - PathSegment@594..599 - Ident@594..599 "Trait" - Comma@599..600 "," - Newline@600..601 "\n" - WhiteSpace@601..605 " " - GenericParam@605..606 - Ident@605..606 "T" - Comma@606..607 "," - Newline@607..608 "\n" - WhiteSpace@608..612 " " - GenericParam@612..625 - Ident@612..613 "U" - Colon@613..614 ":" - WhiteSpace@614..615 " " - TraitBoundList@615..625 - TraitBound@615..625 - Path@615..625 - PathSegment@615..618 - Ident@615..618 "bar" - Colon2@618..620 "::" - PathSegment@620..625 - Ident@620..625 "Trait" - Newline@625..626 "\n" - Gt@626..627 ">" - WhiteSpace@627..628 " " - StructFieldDefList@628..658 - LBrace@628..629 "{" - Newline@629..630 "\n" - WhiteSpace@630..634 " " - StructFieldDef@634..638 - Ident@634..635 "x" - Colon@635..636 ":" - WhiteSpace@636..637 " " - Path@637..638 - PathSegment@637..638 - Ident@637..638 "S" + WhiteSpace@530..534 " " + StructFieldDef@534..538 + Ident@534..535 "z" + Colon@535..536 ":" + WhiteSpace@536..537 " " + PathType@537..538 + Path@537..538 + PathSegment@537..538 + Ident@537..538 "U" + Newline@538..539 "\n" + RBrace@539..540 "}" + Newline@540..542 "\n\n" + Struct@542..667 + PubKw@542..545 "pub" + WhiteSpace@545..546 " " + StructKw@546..552 "struct" + WhiteSpace@552..553 " " + Ident@553..576 "StructWithGenericParam3" + GenericParamList@576..636 + Lt@576..577 "<" + Newline@577..578 "\n" + WhiteSpace@578..582 " " + GenericParam@582..608 + Ident@582..583 "S" + Colon@583..584 ":" + WhiteSpace@584..585 " " + TraitBoundList@585..608 + TraitBound@585..595 + Path@585..595 + PathSegment@585..588 + Ident@585..588 "foo" + Colon2@588..590 "::" + PathSegment@590..595 + Ident@590..595 "Trait" + WhiteSpace@595..596 " " + Plus@596..597 "+" + WhiteSpace@597..598 " " + TraitBound@598..608 + Path@598..608 + PathSegment@598..601 + Ident@598..601 "bar" + Colon2@601..603 "::" + PathSegment@603..608 + Ident@603..608 "Trait" + Comma@608..609 "," + Newline@609..610 "\n" + WhiteSpace@610..614 " " + GenericParam@614..615 + Ident@614..615 "T" + Comma@615..616 "," + Newline@616..617 "\n" + WhiteSpace@617..621 " " + GenericParam@621..634 + Ident@621..622 "U" + Colon@622..623 ":" + WhiteSpace@623..624 " " + TraitBoundList@624..634 + TraitBound@624..634 + Path@624..634 + PathSegment@624..627 + Ident@624..627 "bar" + Colon2@627..629 "::" + PathSegment@629..634 + Ident@629..634 "Trait" + Newline@634..635 "\n" + Gt@635..636 ">" + WhiteSpace@636..637 " " + StructFieldDefList@637..667 + LBrace@637..638 "{" Newline@638..639 "\n" WhiteSpace@639..643 " " StructFieldDef@643..647 - Ident@643..644 "y" + Ident@643..644 "x" Colon@644..645 ":" WhiteSpace@645..646 " " - Path@646..647 - PathSegment@646..647 - Ident@646..647 "T" + PathType@646..647 + Path@646..647 + PathSegment@646..647 + Ident@646..647 "S" Newline@647..648 "\n" WhiteSpace@648..652 " " StructFieldDef@652..656 - Ident@652..653 "z" + Ident@652..653 "y" Colon@653..654 ":" WhiteSpace@654..655 " " - Path@655..656 - PathSegment@655..656 - Ident@655..656 "U" + PathType@655..656 + Path@655..656 + PathSegment@655..656 + Ident@655..656 "T" Newline@656..657 "\n" - RBrace@657..658 "}" + WhiteSpace@657..661 " " + StructFieldDef@661..665 + Ident@661..662 "z" + Colon@662..663 ":" + WhiteSpace@663..664 " " + PathType@664..665 + Path@664..665 + PathSegment@664..665 + Ident@664..665 "U" + Newline@665..666 "\n" + RBrace@666..667 "}" From ed4219d090da51aa3023f576cea4b85e542f330a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 19 Jan 2023 15:48:46 +0100 Subject: [PATCH 021/678] Add directory for `struct` parsing test --- .../test_files/syntax_node/struct_def.fe | 57 --- .../test_files/syntax_node/struct_def.snap | 415 ------------------ .../test_files/syntax_node/structs/attr.fe | 11 + .../test_files/syntax_node/structs/attr.snap | 77 ++++ .../test_files/syntax_node/structs/empty.fe | 2 + .../test_files/syntax_node/structs/empty.snap | 18 + .../syntax_node/structs/generics.fe | 25 ++ .../syntax_node/structs/generics.snap | 236 ++++++++++ .../syntax_node/structs/tupel_field.fe | 9 + .../syntax_node/structs/tupel_field.snap | 78 ++++ crates/parser2/tests/syntax_node.rs | 8 +- 11 files changed, 460 insertions(+), 476 deletions(-) delete mode 100644 crates/parser2/test_files/syntax_node/struct_def.fe delete mode 100644 crates/parser2/test_files/syntax_node/struct_def.snap create mode 100644 crates/parser2/test_files/syntax_node/structs/attr.fe create mode 100644 crates/parser2/test_files/syntax_node/structs/attr.snap create mode 100644 crates/parser2/test_files/syntax_node/structs/empty.fe create mode 100644 crates/parser2/test_files/syntax_node/structs/empty.snap create mode 100644 crates/parser2/test_files/syntax_node/structs/generics.fe create mode 100644 crates/parser2/test_files/syntax_node/structs/generics.snap create mode 100644 crates/parser2/test_files/syntax_node/structs/tupel_field.fe create mode 100644 crates/parser2/test_files/syntax_node/structs/tupel_field.snap diff --git a/crates/parser2/test_files/syntax_node/struct_def.fe b/crates/parser2/test_files/syntax_node/struct_def.fe deleted file mode 100644 index 2a3086cb7d..0000000000 --- a/crates/parser2/test_files/syntax_node/struct_def.fe +++ /dev/null @@ -1,57 +0,0 @@ -pub struct EmptyStruct { - -} - -/// DocComment1 -#attr -// normal comment -/// DocComment2 -pub struct StructAttr { - x: foo::Bar - y: i32 -} - -#Event -pub struct StructFieldAttr { - /// `x` is a topic - #topic - x: i32 - - y: u32 -} - -struct StructWithTupleField { - x: (i32, u32) - y: ( - i32, - foo::Bar, - u32 - ) - z: () -} - -pub struct StructWithGenericParam { - x: S - y: T - z: U -} - -pub struct StructWithGenericParam2< - S, - T: foo::Trait, - U -> { - x: *(S, *i32) - y: T - z: U -} - -pub struct StructWithGenericParam3< - S: foo::Trait + bar::Trait, - T, - U: bar::Trait -> { - x: S - y: T - z: U -} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/struct_def.snap b/crates/parser2/test_files/syntax_node/struct_def.snap deleted file mode 100644 index 1cd83d3a50..0000000000 --- a/crates/parser2/test_files/syntax_node/struct_def.snap +++ /dev/null @@ -1,415 +0,0 @@ ---- -source: crates/parser2/tests/syntax_node.rs -expression: snapshot ---- -Root@0..667 - ItemList@0..667 - Struct@0..27 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - StructKw@4..10 "struct" - WhiteSpace@10..11 " " - Ident@11..22 "EmptyStruct" - WhiteSpace@22..23 " " - StructFieldDefList@23..27 - LBrace@23..24 "{" - Newline@24..26 "\n\n" - RBrace@26..27 "}" - Newline@27..29 "\n\n" - Struct@29..137 - AttrList@29..85 - DocCommentAttr@29..45 - DocComment@29..44 "/// DocComment1" - Newline@44..45 "\n" - Attr@45..50 - Pound@45..46 "#" - Ident@46..50 "attr" - Newline@50..51 "\n" - Comment@51..68 "// normal comment" - Newline@68..69 "\n" - DocCommentAttr@69..85 - DocComment@69..84 "/// DocComment2" - Newline@84..85 "\n" - PubKw@85..88 "pub" - WhiteSpace@88..89 " " - StructKw@89..95 "struct" - WhiteSpace@95..96 " " - Ident@96..106 "StructAttr" - WhiteSpace@106..107 " " - StructFieldDefList@107..137 - LBrace@107..108 "{" - Newline@108..109 "\n" - WhiteSpace@109..113 " " - StructFieldDef@113..124 - Ident@113..114 "x" - Colon@114..115 ":" - WhiteSpace@115..116 " " - PathType@116..124 - Path@116..124 - PathSegment@116..119 - Ident@116..119 "foo" - Colon2@119..121 "::" - PathSegment@121..124 - Ident@121..124 "Bar" - Newline@124..125 "\n" - WhiteSpace@125..129 " " - StructFieldDef@129..135 - Ident@129..130 "y" - Colon@130..131 ":" - WhiteSpace@131..132 " " - PathType@132..135 - Path@132..135 - PathSegment@132..135 - Ident@132..135 "i32" - Newline@135..136 "\n" - RBrace@136..137 "}" - Newline@137..139 "\n\n" - Struct@139..234 - AttrList@139..146 - Attr@139..145 - Pound@139..140 "#" - Ident@140..145 "Event" - Newline@145..146 "\n" - PubKw@146..149 "pub" - WhiteSpace@149..150 " " - StructKw@150..156 "struct" - WhiteSpace@156..157 " " - Ident@157..172 "StructFieldAttr" - WhiteSpace@172..173 " " - StructFieldDefList@173..234 - LBrace@173..174 "{" - Newline@174..175 "\n" - WhiteSpace@175..179 " " - StructFieldDef@179..220 - AttrList@179..214 - DocCommentAttr@179..199 - DocComment@179..198 "/// `x` is a topic " - Newline@198..199 "\n" - WhiteSpace@199..203 " " - Attr@203..209 - Pound@203..204 "#" - Ident@204..209 "topic" - Newline@209..210 "\n" - WhiteSpace@210..214 " " - Ident@214..215 "x" - Colon@215..216 ":" - WhiteSpace@216..217 " " - PathType@217..220 - Path@217..220 - PathSegment@217..220 - Ident@217..220 "i32" - Newline@220..222 "\n\n" - WhiteSpace@222..226 " " - StructFieldDef@226..232 - Ident@226..227 "y" - Colon@227..228 ":" - WhiteSpace@228..229 " " - PathType@229..232 - Path@229..232 - PathSegment@229..232 - Ident@229..232 "u32" - Newline@232..233 "\n" - RBrace@233..234 "}" - Newline@234..236 "\n\n" - Struct@236..353 - StructKw@236..242 "struct" - WhiteSpace@242..243 " " - Ident@243..263 "StructWithTupleField" - WhiteSpace@263..264 " " - StructFieldDefList@264..353 - LBrace@264..265 "{" - Newline@265..266 "\n" - WhiteSpace@266..270 " " - StructFieldDef@270..283 - Ident@270..271 "x" - Colon@271..272 ":" - WhiteSpace@272..273 " " - TupleType@273..283 - LParen@273..274 "(" - PathType@274..277 - Path@274..277 - PathSegment@274..277 - Ident@274..277 "i32" - Comma@277..278 "," - WhiteSpace@278..279 " " - PathType@279..282 - Path@279..282 - PathSegment@279..282 - Ident@279..282 "u32" - RParen@282..283 ")" - Newline@283..284 "\n" - WhiteSpace@284..288 " " - StructFieldDef@288..341 - Ident@288..289 "y" - Colon@289..290 ":" - WhiteSpace@290..291 " " - TupleType@291..341 - LParen@291..292 "(" - Newline@292..293 "\n" - WhiteSpace@293..301 " " - PathType@301..304 - Path@301..304 - PathSegment@301..304 - Ident@301..304 "i32" - Comma@304..305 "," - Newline@305..306 "\n" - WhiteSpace@306..314 " " - PathType@314..322 - Path@314..322 - PathSegment@314..317 - Ident@314..317 "foo" - Colon2@317..319 "::" - PathSegment@319..322 - Ident@319..322 "Bar" - Comma@322..323 "," - Newline@323..324 "\n" - WhiteSpace@324..332 " " - PathType@332..335 - Path@332..335 - PathSegment@332..335 - Ident@332..335 "u32" - Newline@335..336 "\n" - WhiteSpace@336..340 " " - RParen@340..341 ")" - Newline@341..342 "\n" - WhiteSpace@342..346 " " - StructFieldDef@346..351 - Ident@346..347 "z" - Colon@347..348 ":" - WhiteSpace@348..349 " " - TupleType@349..351 - LParen@349..350 "(" - RParen@350..351 ")" - Newline@351..352 "\n" - RBrace@352..353 "}" - Newline@353..355 "\n\n" - Struct@355..428 - PubKw@355..358 "pub" - WhiteSpace@358..359 " " - StructKw@359..365 "struct" - WhiteSpace@365..366 " " - Ident@366..388 "StructWithGenericParam" - GenericParamList@388..397 - Lt@388..389 "<" - GenericParam@389..390 - Ident@389..390 "S" - Comma@390..391 "," - WhiteSpace@391..392 " " - GenericParam@392..393 - Ident@392..393 "T" - Comma@393..394 "," - WhiteSpace@394..395 " " - GenericParam@395..396 - Ident@395..396 "U" - Gt@396..397 ">" - WhiteSpace@397..398 " " - StructFieldDefList@398..428 - LBrace@398..399 "{" - Newline@399..400 "\n" - WhiteSpace@400..404 " " - StructFieldDef@404..408 - Ident@404..405 "x" - Colon@405..406 ":" - WhiteSpace@406..407 " " - PathType@407..408 - Path@407..408 - PathSegment@407..408 - Ident@407..408 "S" - Newline@408..409 "\n" - WhiteSpace@409..413 " " - StructFieldDef@413..417 - Ident@413..414 "y" - Colon@414..415 ":" - WhiteSpace@415..416 " " - PathType@416..417 - Path@416..417 - PathSegment@416..417 - Ident@416..417 "T" - Newline@417..418 "\n" - WhiteSpace@418..422 " " - StructFieldDef@422..426 - Ident@422..423 "z" - Colon@423..424 ":" - WhiteSpace@424..425 " " - PathType@425..426 - Path@425..426 - PathSegment@425..426 - Ident@425..426 "U" - Newline@426..427 "\n" - RBrace@427..428 "}" - Newline@428..429 "\n" - WhiteSpace@429..430 " " - Newline@430..431 "\n" - Struct@431..540 - PubKw@431..434 "pub" - WhiteSpace@434..435 " " - StructKw@435..441 "struct" - WhiteSpace@441..442 " " - Ident@442..465 "StructWithGenericParam2" - GenericParamList@465..500 - Lt@465..466 "<" - Newline@466..467 "\n" - WhiteSpace@467..471 " " - GenericParam@471..472 - Ident@471..472 "S" - Comma@472..473 "," - Newline@473..474 "\n" - WhiteSpace@474..478 " " - GenericParam@478..491 - Ident@478..479 "T" - Colon@479..480 ":" - WhiteSpace@480..481 " " - TraitBoundList@481..491 - TraitBound@481..491 - Path@481..491 - PathSegment@481..484 - Ident@481..484 "foo" - Colon2@484..486 "::" - PathSegment@486..491 - Ident@486..491 "Trait" - Comma@491..492 "," - Newline@492..493 "\n" - WhiteSpace@493..497 " " - GenericParam@497..498 - Ident@497..498 "U" - Newline@498..499 "\n" - Gt@499..500 ">" - WhiteSpace@500..501 " " - StructFieldDefList@501..540 - LBrace@501..502 "{" - Newline@502..503 "\n" - WhiteSpace@503..507 " " - StructFieldDef@507..520 - Ident@507..508 "x" - Colon@508..509 ":" - WhiteSpace@509..510 " " - PtrType@510..520 - Star@510..511 "*" - TupleType@511..520 - LParen@511..512 "(" - PathType@512..513 - Path@512..513 - PathSegment@512..513 - Ident@512..513 "S" - Comma@513..514 "," - WhiteSpace@514..515 " " - PtrType@515..519 - Star@515..516 "*" - PathType@516..519 - Path@516..519 - PathSegment@516..519 - Ident@516..519 "i32" - RParen@519..520 ")" - Newline@520..521 "\n" - WhiteSpace@521..525 " " - StructFieldDef@525..529 - Ident@525..526 "y" - Colon@526..527 ":" - WhiteSpace@527..528 " " - PathType@528..529 - Path@528..529 - PathSegment@528..529 - Ident@528..529 "T" - Newline@529..530 "\n" - WhiteSpace@530..534 " " - StructFieldDef@534..538 - Ident@534..535 "z" - Colon@535..536 ":" - WhiteSpace@536..537 " " - PathType@537..538 - Path@537..538 - PathSegment@537..538 - Ident@537..538 "U" - Newline@538..539 "\n" - RBrace@539..540 "}" - Newline@540..542 "\n\n" - Struct@542..667 - PubKw@542..545 "pub" - WhiteSpace@545..546 " " - StructKw@546..552 "struct" - WhiteSpace@552..553 " " - Ident@553..576 "StructWithGenericParam3" - GenericParamList@576..636 - Lt@576..577 "<" - Newline@577..578 "\n" - WhiteSpace@578..582 " " - GenericParam@582..608 - Ident@582..583 "S" - Colon@583..584 ":" - WhiteSpace@584..585 " " - TraitBoundList@585..608 - TraitBound@585..595 - Path@585..595 - PathSegment@585..588 - Ident@585..588 "foo" - Colon2@588..590 "::" - PathSegment@590..595 - Ident@590..595 "Trait" - WhiteSpace@595..596 " " - Plus@596..597 "+" - WhiteSpace@597..598 " " - TraitBound@598..608 - Path@598..608 - PathSegment@598..601 - Ident@598..601 "bar" - Colon2@601..603 "::" - PathSegment@603..608 - Ident@603..608 "Trait" - Comma@608..609 "," - Newline@609..610 "\n" - WhiteSpace@610..614 " " - GenericParam@614..615 - Ident@614..615 "T" - Comma@615..616 "," - Newline@616..617 "\n" - WhiteSpace@617..621 " " - GenericParam@621..634 - Ident@621..622 "U" - Colon@622..623 ":" - WhiteSpace@623..624 " " - TraitBoundList@624..634 - TraitBound@624..634 - Path@624..634 - PathSegment@624..627 - Ident@624..627 "bar" - Colon2@627..629 "::" - PathSegment@629..634 - Ident@629..634 "Trait" - Newline@634..635 "\n" - Gt@635..636 ">" - WhiteSpace@636..637 " " - StructFieldDefList@637..667 - LBrace@637..638 "{" - Newline@638..639 "\n" - WhiteSpace@639..643 " " - StructFieldDef@643..647 - Ident@643..644 "x" - Colon@644..645 ":" - WhiteSpace@645..646 " " - PathType@646..647 - Path@646..647 - PathSegment@646..647 - Ident@646..647 "S" - Newline@647..648 "\n" - WhiteSpace@648..652 " " - StructFieldDef@652..656 - Ident@652..653 "y" - Colon@653..654 ":" - WhiteSpace@654..655 " " - PathType@655..656 - Path@655..656 - PathSegment@655..656 - Ident@655..656 "T" - Newline@656..657 "\n" - WhiteSpace@657..661 " " - StructFieldDef@661..665 - Ident@661..662 "z" - Colon@662..663 ":" - WhiteSpace@663..664 " " - PathType@664..665 - Path@664..665 - PathSegment@664..665 - Ident@664..665 "U" - Newline@665..666 "\n" - RBrace@666..667 "}" - diff --git a/crates/parser2/test_files/syntax_node/structs/attr.fe b/crates/parser2/test_files/syntax_node/structs/attr.fe new file mode 100644 index 0000000000..b0af646fe5 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/attr.fe @@ -0,0 +1,11 @@ +/// DocComment1 +#attr +// normal comment +/// DocComment2 +pub struct StructAttr { + /// This is `x` + x: foo::Bar + /// This is `y` + #cfg(target: evm) + y: i32 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap new file mode 100644 index 0000000000..00e4751b07 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -0,0 +1,77 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..170 + ItemList@0..170 + Struct@0..170 + AttrList@0..56 + DocCommentAttr@0..16 + DocComment@0..15 "/// DocComment1" + Newline@15..16 "\n" + Attr@16..21 + Pound@16..17 "#" + Ident@17..21 "attr" + Newline@21..22 "\n" + Comment@22..39 "// normal comment" + Newline@39..40 "\n" + DocCommentAttr@40..56 + DocComment@40..55 "/// DocComment2" + Newline@55..56 "\n" + PubKw@56..59 "pub" + WhiteSpace@59..60 " " + StructKw@60..66 "struct" + WhiteSpace@66..67 " " + Ident@67..77 "StructAttr" + WhiteSpace@77..78 " " + StructFieldDefList@78..170 + LBrace@78..79 "{" + Newline@79..80 "\n" + WhiteSpace@80..84 " " + StructFieldDef@84..115 + AttrList@84..104 + DocCommentAttr@84..100 + DocComment@84..99 "/// This is `x`" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + Ident@104..105 "x" + Colon@105..106 ":" + WhiteSpace@106..107 " " + PathType@107..115 + Path@107..115 + PathSegment@107..110 + Ident@107..110 "foo" + Colon2@110..112 "::" + PathSegment@112..115 + Ident@112..115 "Bar" + Newline@115..116 "\n" + WhiteSpace@116..120 " " + StructFieldDef@120..168 + AttrList@120..162 + DocCommentAttr@120..136 + DocComment@120..135 "/// This is `y`" + Newline@135..136 "\n" + WhiteSpace@136..140 " " + Attr@140..157 + Pound@140..141 "#" + Ident@141..144 "cfg" + AttrParamList@144..157 + LParen@144..145 "(" + AttrParam@145..156 + Ident@145..151 "target" + Colon@151..152 ":" + WhiteSpace@152..153 " " + Ident@153..156 "evm" + RParen@156..157 ")" + Newline@157..158 "\n" + WhiteSpace@158..162 " " + Ident@162..163 "y" + Colon@163..164 ":" + WhiteSpace@164..165 " " + PathType@165..168 + Path@165..168 + PathSegment@165..168 + Ident@165..168 "i32" + Newline@168..169 "\n" + RBrace@169..170 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/empty.fe b/crates/parser2/test_files/syntax_node/structs/empty.fe new file mode 100644 index 0000000000..901512de18 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/empty.fe @@ -0,0 +1,2 @@ +pub struct EmptyStruct { +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/empty.snap b/crates/parser2/test_files/syntax_node/structs/empty.snap new file mode 100644 index 0000000000..8771aafd02 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/empty.snap @@ -0,0 +1,18 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..26 + ItemList@0..26 + Struct@0..26 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..22 "EmptyStruct" + WhiteSpace@22..23 " " + StructFieldDefList@23..26 + LBrace@23..24 "{" + Newline@24..25 "\n" + RBrace@25..26 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe new file mode 100644 index 0000000000..80106a0984 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -0,0 +1,25 @@ +pub struct StructWithGenericParam { + x: S + y: T + z: U +} + +pub struct StructWithGenericParam2< + S, + T: foo::Trait, + U +> { + x: *(S, *i32) + y: T + z: U +} + +pub struct StructWithGenericParam3< + S: foo::Trait + bar::Trait, + T, + U: bar::Trait +> { + x: S + y: T + z: U +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap new file mode 100644 index 0000000000..8e01ad7c04 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -0,0 +1,236 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..312 + ItemList@0..312 + Struct@0..73 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..33 "StructWithGenericParam" + GenericParamList@33..42 + Lt@33..34 "<" + GenericParam@34..35 + Ident@34..35 "S" + Comma@35..36 "," + WhiteSpace@36..37 " " + GenericParam@37..38 + Ident@37..38 "T" + Comma@38..39 "," + WhiteSpace@39..40 " " + GenericParam@40..41 + Ident@40..41 "U" + Gt@41..42 ">" + WhiteSpace@42..43 " " + StructFieldDefList@43..73 + LBrace@43..44 "{" + Newline@44..45 "\n" + WhiteSpace@45..49 " " + StructFieldDef@49..53 + Ident@49..50 "x" + Colon@50..51 ":" + WhiteSpace@51..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "S" + Newline@53..54 "\n" + WhiteSpace@54..58 " " + StructFieldDef@58..62 + Ident@58..59 "y" + Colon@59..60 ":" + WhiteSpace@60..61 " " + PathType@61..62 + Path@61..62 + PathSegment@61..62 + Ident@61..62 "T" + Newline@62..63 "\n" + WhiteSpace@63..67 " " + StructFieldDef@67..71 + Ident@67..68 "z" + Colon@68..69 ":" + WhiteSpace@69..70 " " + PathType@70..71 + Path@70..71 + PathSegment@70..71 + Ident@70..71 "U" + Newline@71..72 "\n" + RBrace@72..73 "}" + Newline@73..74 "\n" + WhiteSpace@74..75 " " + Newline@75..76 "\n" + Struct@76..185 + PubKw@76..79 "pub" + WhiteSpace@79..80 " " + StructKw@80..86 "struct" + WhiteSpace@86..87 " " + Ident@87..110 "StructWithGenericParam2" + GenericParamList@110..145 + Lt@110..111 "<" + Newline@111..112 "\n" + WhiteSpace@112..116 " " + GenericParam@116..117 + Ident@116..117 "S" + Comma@117..118 "," + Newline@118..119 "\n" + WhiteSpace@119..123 " " + GenericParam@123..136 + Ident@123..124 "T" + Colon@124..125 ":" + WhiteSpace@125..126 " " + TraitBoundList@126..136 + TraitBound@126..136 + Path@126..136 + PathSegment@126..129 + Ident@126..129 "foo" + Colon2@129..131 "::" + PathSegment@131..136 + Ident@131..136 "Trait" + Comma@136..137 "," + Newline@137..138 "\n" + WhiteSpace@138..142 " " + GenericParam@142..143 + Ident@142..143 "U" + Newline@143..144 "\n" + Gt@144..145 ">" + WhiteSpace@145..146 " " + StructFieldDefList@146..185 + LBrace@146..147 "{" + Newline@147..148 "\n" + WhiteSpace@148..152 " " + StructFieldDef@152..165 + Ident@152..153 "x" + Colon@153..154 ":" + WhiteSpace@154..155 " " + PtrType@155..165 + Star@155..156 "*" + TupleType@156..165 + LParen@156..157 "(" + PathType@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "S" + Comma@158..159 "," + WhiteSpace@159..160 " " + PtrType@160..164 + Star@160..161 "*" + PathType@161..164 + Path@161..164 + PathSegment@161..164 + Ident@161..164 "i32" + RParen@164..165 ")" + Newline@165..166 "\n" + WhiteSpace@166..170 " " + StructFieldDef@170..174 + Ident@170..171 "y" + Colon@171..172 ":" + WhiteSpace@172..173 " " + PathType@173..174 + Path@173..174 + PathSegment@173..174 + Ident@173..174 "T" + Newline@174..175 "\n" + WhiteSpace@175..179 " " + StructFieldDef@179..183 + Ident@179..180 "z" + Colon@180..181 ":" + WhiteSpace@181..182 " " + PathType@182..183 + Path@182..183 + PathSegment@182..183 + Ident@182..183 "U" + Newline@183..184 "\n" + RBrace@184..185 "}" + Newline@185..187 "\n\n" + Struct@187..312 + PubKw@187..190 "pub" + WhiteSpace@190..191 " " + StructKw@191..197 "struct" + WhiteSpace@197..198 " " + Ident@198..221 "StructWithGenericParam3" + GenericParamList@221..281 + Lt@221..222 "<" + Newline@222..223 "\n" + WhiteSpace@223..227 " " + GenericParam@227..253 + Ident@227..228 "S" + Colon@228..229 ":" + WhiteSpace@229..230 " " + TraitBoundList@230..253 + TraitBound@230..240 + Path@230..240 + PathSegment@230..233 + Ident@230..233 "foo" + Colon2@233..235 "::" + PathSegment@235..240 + Ident@235..240 "Trait" + WhiteSpace@240..241 " " + Plus@241..242 "+" + WhiteSpace@242..243 " " + TraitBound@243..253 + Path@243..253 + PathSegment@243..246 + Ident@243..246 "bar" + Colon2@246..248 "::" + PathSegment@248..253 + Ident@248..253 "Trait" + Comma@253..254 "," + Newline@254..255 "\n" + WhiteSpace@255..259 " " + GenericParam@259..260 + Ident@259..260 "T" + Comma@260..261 "," + Newline@261..262 "\n" + WhiteSpace@262..266 " " + GenericParam@266..279 + Ident@266..267 "U" + Colon@267..268 ":" + WhiteSpace@268..269 " " + TraitBoundList@269..279 + TraitBound@269..279 + Path@269..279 + PathSegment@269..272 + Ident@269..272 "bar" + Colon2@272..274 "::" + PathSegment@274..279 + Ident@274..279 "Trait" + Newline@279..280 "\n" + Gt@280..281 ">" + WhiteSpace@281..282 " " + StructFieldDefList@282..312 + LBrace@282..283 "{" + Newline@283..284 "\n" + WhiteSpace@284..288 " " + StructFieldDef@288..292 + Ident@288..289 "x" + Colon@289..290 ":" + WhiteSpace@290..291 " " + PathType@291..292 + Path@291..292 + PathSegment@291..292 + Ident@291..292 "S" + Newline@292..293 "\n" + WhiteSpace@293..297 " " + StructFieldDef@297..301 + Ident@297..298 "y" + Colon@298..299 ":" + WhiteSpace@299..300 " " + PathType@300..301 + Path@300..301 + PathSegment@300..301 + Ident@300..301 "T" + Newline@301..302 "\n" + WhiteSpace@302..306 " " + StructFieldDef@306..310 + Ident@306..307 "z" + Colon@307..308 ":" + WhiteSpace@308..309 " " + PathType@309..310 + Path@309..310 + PathSegment@309..310 + Ident@309..310 "U" + Newline@310..311 "\n" + RBrace@311..312 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.fe b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe new file mode 100644 index 0000000000..bd5ea442d3 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe @@ -0,0 +1,9 @@ +struct StructWithTupleField { + x: (i32, u32) + y: ( + i32, + foo::Bar, + u32 + ) + z: () +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap new file mode 100644 index 0000000000..3ad6e68851 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -0,0 +1,78 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..117 + ItemList@0..117 + Struct@0..117 + StructKw@0..6 "struct" + WhiteSpace@6..7 " " + Ident@7..27 "StructWithTupleField" + WhiteSpace@27..28 " " + StructFieldDefList@28..117 + LBrace@28..29 "{" + Newline@29..30 "\n" + WhiteSpace@30..34 " " + StructFieldDef@34..47 + Ident@34..35 "x" + Colon@35..36 ":" + WhiteSpace@36..37 " " + TupleType@37..47 + LParen@37..38 "(" + PathType@38..41 + Path@38..41 + PathSegment@38..41 + Ident@38..41 "i32" + Comma@41..42 "," + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + Newline@47..48 "\n" + WhiteSpace@48..52 " " + StructFieldDef@52..105 + Ident@52..53 "y" + Colon@53..54 ":" + WhiteSpace@54..55 " " + TupleType@55..105 + LParen@55..56 "(" + Newline@56..57 "\n" + WhiteSpace@57..65 " " + PathType@65..68 + Path@65..68 + PathSegment@65..68 + Ident@65..68 "i32" + Comma@68..69 "," + Newline@69..70 "\n" + WhiteSpace@70..78 " " + PathType@78..86 + Path@78..86 + PathSegment@78..81 + Ident@78..81 "foo" + Colon2@81..83 "::" + PathSegment@83..86 + Ident@83..86 "Bar" + Comma@86..87 "," + Newline@87..88 "\n" + WhiteSpace@88..96 " " + PathType@96..99 + Path@96..99 + PathSegment@96..99 + Ident@96..99 "u32" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + RParen@104..105 ")" + Newline@105..106 "\n" + WhiteSpace@106..110 " " + StructFieldDef@110..115 + Ident@110..111 "z" + Colon@111..112 ":" + WhiteSpace@112..113 " " + TupleType@113..115 + LParen@113..114 "(" + RParen@114..115 ")" + Newline@115..116 "\n" + RBrace@116..117 "}" + diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 2b2910c065..58887a7042 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,7 +1,7 @@ use fe_parser2::syntax_node::SyntaxNode; #[allow(unused)] -fn build_cst(input: &str) -> SyntaxNode { +fn build_root_cst(input: &str) -> SyntaxNode { let (cst, errors) = fe_parser2::parse_source_file(input); for error in &errors { println!("{}", error.msg); @@ -12,7 +12,7 @@ fn build_cst(input: &str) -> SyntaxNode { } fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node", - "parser2/test_files/syntax_node", - build_cst + "parser2/test_files/syntax_node/structs", + "parser2/test_files/syntax_node/structs", + build_root_cst } From b31326daa5cbdf1bb33ef6c172836718359371e1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 19 Jan 2023 17:48:03 +0100 Subject: [PATCH 022/678] Add parser for `stmt` --- crates/parser2/src/parser/attr.rs | 14 ++- crates/parser2/src/parser/mod.rs | 75 ++++++++++-- crates/parser2/src/parser/stmt.rs | 168 +++++++++++++++++++++++++++ crates/parser2/src/parser/struct_.rs | 18 +-- crates/parser2/src/syntax_kind.rs | 31 ++++- 5 files changed, 275 insertions(+), 31 deletions(-) create mode 100644 crates/parser2/src/parser/stmt.rs diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index c9554c4a05..338938e540 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -1,7 +1,17 @@ -use super::{define_scope, token_stream::TokenStream, Parser}; +use super::{define_scope, token_stream::TokenStream, Checkpoint, Parser}; use crate::SyntaxKind; +pub(super) fn parse_attr_list(parser: &mut Parser) -> Option { + if let Some(SyntaxKind::DocComment) | Some(SyntaxKind::Pound) = parser.current_kind() { + let checkpoint = parser.checkpoint(); + parser.parse(super::attr::AttrListScope::default(), None); + Some(checkpoint) + } else { + None + } +} + define_scope! { AttrListScope, AttrList, @@ -19,7 +29,7 @@ impl super::Parse for AttrListScope { Some(Pound) => parser.parse(AttrScope::default(), None), Some(DocComment) => parser.parse(DocCommentAttrScope::default(), None), _ => break, - } + }; } } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 34fc084f23..bfe7e6c865 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -11,12 +11,17 @@ use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; pub mod token_stream; mod attr; +mod expr; mod func; mod item; mod param; +mod pat; mod path; +mod stmt; mod struct_; -mod tuple; +mod type_; + +type Checkpoint = rowan::Checkpoint; /// Parser to build a rowan syntax tree. pub struct Parser { @@ -29,6 +34,9 @@ pub struct Parser { current_pos: rowan::TextSize, next_trivias: VecDeque, + /// The dry run states which holds the each state of the parser when it + /// enters dry run mode. + dry_run_states: Vec, } impl Parser { @@ -41,6 +49,7 @@ impl Parser { errors: Vec::new(), current_pos: rowan::TextSize::from(0), next_trivias: VecDeque::new(), + dry_run_states: Vec::new(), } } @@ -61,30 +70,34 @@ impl Parser { /// Finish the parsing and return the syntax tree. pub fn finish(self) -> (SyntaxNode, Vec) { debug_assert!(self.scopes.is_empty()); + debug_assert!(!self.is_dry_run()); (SyntaxNode::new_root(self.builder.finish()), self.errors) } /// Invoke the scope to parse. The scope is wrapped up by the node specified /// by the scope. + /// Returns `true` if parse succeeded, otherwise `false`. /// /// * If the checkpoint is `Some`, the marked branch is wrapped up by the /// node. /// * If the checkpoint is `None`, the current branch is wrapped up by the /// node. - pub fn parse(&mut self, mut scope: T, checkpoint: Option) + pub fn parse(&mut self, mut scope: T, checkpoint: Option) -> bool where T: Parse + 'static, { let checkpoint = self.enter(scope.clone(), checkpoint); + let error_len = self.errors.len(); scope.parse(self); self.leave(checkpoint); + error_len == self.errors.len() } /// Marks the current branch as a checkpoint. /// The checked branch is wrapped up later when [`parse]` is /// called with the `checkpoint`. - pub fn checkpoint(&mut self) -> rowan::Checkpoint { + pub fn checkpoint(&mut self) -> Checkpoint { self.builder.checkpoint() } @@ -95,7 +108,7 @@ impl Parser { /// node. /// * If checkpoint is `None`, the current branch is wrapped up by an error /// node. - pub fn error_and_recover(&mut self, msg: &str, checkpoint: Option) { + pub fn error_and_recover(&mut self, msg: &str, checkpoint: Option) { let err_scope = self.error(msg); let checkpoint = self.enter(err_scope, checkpoint); self.recover(); @@ -112,6 +125,30 @@ impl Parser { self.leave(checkpoint); } + /// Starts the dry run mode. + /// When the parser is in the dry run mode, the parser does not build the + /// syntax tree. + /// + /// When the [`end_dry_run`] is called, all errors occurred in the dry + /// run mode are discarded, and all tokens which are consumed in the + /// dry run mode are backtracked. + pub fn start_dry_run(&mut self) { + self.stream.set_bt_point(); + self.dry_run_states.push(DryRunState { + pos: self.current_pos, + err_num: self.errors.len(), + }); + } + + /// Ends the dry run mode. + /// See `[start_dry_run]` for more details. + pub fn end_dry_run(&mut self) { + self.stream.backtrack(); + let state = self.dry_run_states.pop().unwrap(); + self.errors.truncate(state.err_num); + self.current_pos = state.pos; + } + /// Bumps the current token and /// current branch. pub fn bump(&mut self) { @@ -121,7 +158,9 @@ impl Parser { }; self.current_pos += rowan::TextSize::of(tok.text()); - self.builder.token(tok.syntax_kind().into(), tok.text()); + if !self.is_dry_run() { + self.builder.token(tok.syntax_kind().into(), tok.text()); + } } /// Peek the next non-trivia token. @@ -242,7 +281,12 @@ impl Parser { ErrorScope::default() } - fn enter(&mut self, scope: T, checkpoint: Option) -> rowan::Checkpoint + /// Returns `true` if the parser is in the dry run mode. + fn is_dry_run(&self) -> bool { + !self.dry_run_states.is_empty() + } + + fn enter(&mut self, scope: T, checkpoint: Option) -> Checkpoint where T: ParsingScope + 'static, { @@ -250,11 +294,13 @@ impl Parser { checkpoint.unwrap_or_else(|| self.checkpoint()) } - fn leave(&mut self, checkpoint: rowan::Checkpoint) { + fn leave(&mut self, checkpoint: Checkpoint) { let scope = self.scopes.pop().unwrap(); - self.builder - .start_node_at(checkpoint, scope.syntax_kind().into()); - self.builder.finish_node(); + if !self.is_dry_run() { + self.builder + .start_node_at(checkpoint, scope.syntax_kind().into()); + self.builder.finish_node(); + } } } @@ -369,7 +415,7 @@ macro_rules! define_scope { use crate::SyntaxKind::*; let set: fxhash::FxHashSet = vec![ $($recoveries), * - ].into_iter().map(|kind: SyntaxKind| kind.into()).collect(); + ].into_iter().map(|kind: $crate::SyntaxKind| kind.into()).collect(); crate::parser::RecoveryMethod::Override(set) }; @@ -386,4 +432,11 @@ macro_rules! define_scope { }; } +struct DryRunState { + /// The text position is the position when the dry run started. + pos: rowan::TextSize, + /// The number of errors when the dry run started. + err_num: usize, +} + use define_scope; diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs new file mode 100644 index 0000000000..f01d7b22d0 --- /dev/null +++ b/crates/parser2/src/parser/stmt.rs @@ -0,0 +1,168 @@ +use crate::SyntaxKind; + +use super::{ + define_scope, + expr::{parse_expr, BlockExprScope}, + pat::parse_pat, + token_stream::TokenStream, + type_::parse_type, + Checkpoint, Parser, +}; + +pub(super) fn parse_stmt( + parser: &mut Parser, + checkpoint: Option, +) -> bool { + use SyntaxKind::*; + + match parser.current_kind() { + Some(LetKw) => parser.parse(LetStmtScope::default(), checkpoint), + Some(ForKw) => parser.parse(ForStmtScope::default(), checkpoint), + Some(WhileKw) => parser.parse(WhileStmtScope::default(), checkpoint), + Some(ContinueKw) => parser.parse(ContinueStmtScope::default(), checkpoint), + Some(BreakKw) => parser.parse(BreakStmtScope::default(), checkpoint), + Some(AssertKw) => parser.parse(AssertStmtScope::default(), checkpoint), + Some(ReturnKw) => parser.parse(ReturnStmtScope::default(), checkpoint), + _ => { + parser.start_dry_run(); + if parser.parse(AssignStmtScope::default(), checkpoint) { + parser.end_dry_run(); + assert!(parser.parse(AssignStmtScope::default(), checkpoint)); + true + } else { + parser.end_dry_run(); + parser.parse(ExprStmtScope::default(), checkpoint) + } + } + } +} + +define_scope! { LetStmtScope, LetStmt, Inheritance } +impl super::Parse for LetStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LetKw); + parser.bump_trivias(false); + if !parse_pat(parser, None) { + parser.error_and_recover("expected pattern", None); + return; + } + if parser.peek_non_trivia(false) == Some(SyntaxKind::Colon) { + parser.bump_trivias(false); + parser.bump_expected(SyntaxKind::Colon); + parser.bump_trivias(false); + if !parse_type(parser, None) { + return; + } + } + + if parser.peek_non_trivia(false) == Some(SyntaxKind::Eq) { + parser.bump_trivias(false); + parser.bump_expected(SyntaxKind::Eq); + parser.bump_trivias(false); + if !parse_expr(parser, None) { + return; + } + } + } +} + +define_scope! { ForStmtScope, ForStmt, Inheritance } +impl super::Parse for ForStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ForKw); + parser.bump_trivias(true); + if !parse_pat(parser, None) { + return; + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::InKw) { + parser.error_and_recover("expected `in` keyword", None); + return; + } + parser.bump_trivias(true); + if !parse_expr(parser, None) { + return; + } + + if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected block", None); + return; + } + parser.parse(BlockExprScope::default(), None); + } +} + +define_scope! { WhileStmtScope, WhileStmt, Inheritance } +impl super::Parse for WhileStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::WhileKw); + parser.bump_trivias(true); + if !parse_expr(parser, None) { + return; + } + + if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected block", None); + return; + } + parser.parse(BlockExprScope::default(), None); + } +} + +define_scope! { ContinueStmtScope, ContinueStmt, Inheritance } +impl super::Parse for ContinueStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ContinueKw); + } +} + +define_scope! { BreakStmtScope, BreakStmt, Inheritance } +impl super::Parse for BreakStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::BreakKw); + } +} + +define_scope! { AssertStmtScope, AssertStmt, Inheritance } +impl super::Parse for AssertStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::AssertKw); + parser.bump_trivias(false); + parse_expr(parser, None); + } +} + +define_scope! { ReturnStmtScope, ReturnStmt, Inheritance } +impl super::Parse for ReturnStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ReturnStmt); + parser.bump_trivias(false); + parse_expr(parser, None); + } +} + +define_scope! { AssignStmtScope, AssignStmt, Inheritance } +impl super::Parse for AssignStmtScope { + fn parse(&mut self, parser: &mut Parser) { + if !parse_pat(parser, None) { + return; + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `=` keyword", None); + return; + } + if !parse_expr(parser, None) { + return; + } + } +} + +define_scope! { ExprStmtScope, ExprStmt, Inheritance } +impl super::Parse for ExprStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parse_expr(parser, None); + } +} diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 7e062d9c86..a1fb97d593 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -1,8 +1,8 @@ use crate::SyntaxKind; use super::{ - define_scope, param::GenericParamListScope, token_stream::TokenStream, tuple::TupleDefScope, - Parser, + attr::parse_attr_list, define_scope, param::GenericParamListScope, token_stream::TokenStream, + type_::parse_type, Parser, }; define_scope! { @@ -66,12 +66,7 @@ define_scope! { } impl super::Parse for StructFieldDefScope { fn parse(&mut self, parser: &mut Parser) { - if matches!( - parser.current_kind(), - Some(SyntaxKind::Pound | SyntaxKind::DocComment) - ) { - parser.parse(super::attr::AttrListScope::default(), None); - } + parse_attr_list(parser); parser.bump_trivias(true); parser.bump_if(SyntaxKind::PubKw); @@ -84,16 +79,11 @@ impl super::Parse for StructFieldDefScope { parser.error_and_recover("expected `name: type` for the field definition", None); } parser.bump_trivias(false); - if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(TupleDefScope::default(), None); - } else { - parser.parse(super::path::PathScope::default(), None); - } + parse_type(parser, None); if !matches!( parser.peek_non_trivia(false), Some(SyntaxKind::Newline) | Some(SyntaxKind::RBrace) ) { - println!("{:?}", parser.peek_non_trivia(false)); parser.error_and_recover("expected newline after the field definition", None); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 065659b12d..038b290c46 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -174,9 +174,15 @@ pub enum SyntaxKind { /// `for` #[token("for")] ForKw, + /// `in` + #[token("in")] + InKw, /// `while` #[token("while")] WhileKw, + /// `assert` + #[token("assert")] + AssertKw, /// `pub` #[token("pub")] PubKw, @@ -218,6 +224,8 @@ pub enum SyntaxKind { UnsafeKw, // Expressions. These are non-leaf nodes. + /// { statement-list } + BlockExpr, /// `x + 1` BinExpr, /// `!x` @@ -254,15 +262,23 @@ pub enum SyntaxKind { // Statements. These are non-leaf nodes. /// `let x = 1` LetStmt, + /// `return 1` + AssignStmt, /// `for x in y {..}` ForStmt, + /// `while expr {..}` + WhileStmt, + /// `continue` + ContinueStmt, + /// `break` + BreakStmt, + /// `assert x == 2` AssertStmt, /// `return 1` ReturnStmt, /// `1` ExprStmt, - StmtList, // Patterns. These are non-leaf nodes. /// `_` @@ -308,6 +324,16 @@ pub enum SyntaxKind { Extern, ItemList, + // Types. These are non-leaf nodes. + /// `*i32` + PtrType, + /// `foo::Type` + PathType, + /// `Self` + SelfType, + /// `(i32, foo::Bar)` + TupleType, + // Paths. These are non-leaf nodes. /// `Segment1::Segment2` Path, @@ -331,9 +357,6 @@ pub enum SyntaxKind { StructFieldDef, StructFieldDefList, - /// `(i32, u32)` - TupleDef, - VariantDef, VariantDefList, From 3572c150f9e1e04c39bc35982ac2897187dcc19b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 20 Jan 2023 00:03:56 +0100 Subject: [PATCH 023/678] Add parser for `pat` --- crates/parser2/src/lib.rs | 7 +- crates/parser2/src/parser/item.rs | 56 +++--- crates/parser2/src/parser/mod.rs | 111 +++++++---- crates/parser2/src/parser/pat.rs | 121 ++++++++++++ crates/parser2/src/parser/stmt.rs | 14 +- crates/parser2/src/parser/type_.rs | 1 - crates/parser2/src/syntax_kind.rs | 11 +- .../test_files/syntax_node/pats/lit.fe | 2 + .../test_files/syntax_node/pats/lit.snap | 11 ++ .../parser2/test_files/syntax_node/pats/or.fe | 5 + .../test_files/syntax_node/pats/or.snap | 126 ++++++++++++ .../test_files/syntax_node/pats/path.fe | 1 + .../test_files/syntax_node/pats/path.snap | 13 ++ .../test_files/syntax_node/pats/path_tuple.fe | 16 ++ .../syntax_node/pats/path_tuple.snap | 181 ++++++++++++++++++ .../syntax_node/pats/rest_pattern.fe | 1 + .../syntax_node/pats/rest_pattern.snap | 8 + .../test_files/syntax_node/pats/wilecard.fe | 1 + .../test_files/syntax_node/pats/wilecard.snap | 8 + crates/parser2/tests/syntax_node.rs | 76 ++++++-- 20 files changed, 680 insertions(+), 90 deletions(-) create mode 100644 crates/parser2/src/parser/pat.rs create mode 100644 crates/parser2/test_files/syntax_node/pats/lit.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/lit.snap create mode 100644 crates/parser2/test_files/syntax_node/pats/or.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/or.snap create mode 100644 crates/parser2/test_files/syntax_node/pats/path.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/path.snap create mode 100644 crates/parser2/test_files/syntax_node/pats/path_tuple.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/path_tuple.snap create mode 100644 crates/parser2/test_files/syntax_node/pats/rest_pattern.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/rest_pattern.snap create mode 100644 crates/parser2/test_files/syntax_node/pats/wilecard.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/wilecard.snap diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index d2d41970e8..9c6848140a 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -4,6 +4,8 @@ pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; + +use parser::RootScope; use syntax_node::SyntaxNode; pub type TextRange = rowan::TextRange; @@ -11,8 +13,11 @@ pub type TextRange = rowan::TextRange; pub fn parse_source_file(text: &str) -> (SyntaxNode, Vec) { let lexer = lexer::Lexer::new(text); let mut parser = parser::Parser::new(lexer); + let checkpoint = parser.enter(RootScope::default(), None); + + parser.parse(parser::ItemListScope::default(), None); - parser.parse(parser::RootScope::default(), None); + parser.leave(checkpoint); parser.finish() } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index c272f5724f..0a1b3b2682 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -2,18 +2,7 @@ use std::cell::RefCell; use crate::SyntaxKind; -use super::{define_scope, token_stream::TokenStream, Parser}; - -define_scope! { - RootScope, - Root, - Override() -} -impl super::Parse for RootScope { - fn parse(&mut self, parser: &mut Parser) { - parser.parse(ItemListScope::default(), None); - } -} +use super::{attr, define_scope, token_stream::TokenStream, Parser}; define_scope! { ItemListScope, @@ -34,7 +23,6 @@ define_scope! { Pound ) } - impl super::Parse for ItemListScope { fn parse(&mut self, parser: &mut Parser) { use crate::SyntaxKind::*; @@ -45,12 +33,8 @@ impl super::Parse for ItemListScope { break; } - let mut checkpoint = None; parser.bump_trivias(true); - if let Some(DocComment) | Some(Pound) = parser.current_kind() { - checkpoint.get_or_insert_with(|| parser.checkpoint()); - parser.parse(super::attr::AttrListScope::default(), None); - } + let mut checkpoint = attr::parse_attr_list(parser); parser.bump_trivias(true); let modifier = match parser.current_kind() { @@ -89,15 +73,33 @@ impl super::Parse for ItemListScope { } match parser.current_kind() { - Some(FnKw) => parser.parse(super::func::FnScope::default(), checkpoint), - Some(StructKw) => parser.parse(super::struct_::StructScope::default(), checkpoint), - Some(EnumKw) => parser.parse(EnumScope::default(), checkpoint), - Some(TraitKw) => parser.parse(TraitScope::default(), checkpoint), - Some(ImplKw) => parser.parse(ImplScope::default(), checkpoint), - Some(UseKw) => parser.parse(UseScope::default(), checkpoint), - Some(ConstKw) => parser.parse(ConstScope::default(), checkpoint), - Some(ExternKw) => parser.parse(ExternScope::default(), checkpoint), - Some(TypeKw) => parser.parse(TypeAliasScope::default(), checkpoint), + Some(FnKw) => { + parser.parse(super::func::FnScope::default(), checkpoint); + } + Some(StructKw) => { + parser.parse(super::struct_::StructScope::default(), checkpoint); + } + Some(EnumKw) => { + parser.parse(EnumScope::default(), checkpoint); + } + Some(TraitKw) => { + parser.parse(TraitScope::default(), checkpoint); + } + Some(ImplKw) => { + parser.parse(ImplScope::default(), checkpoint); + } + Some(UseKw) => { + parser.parse(UseScope::default(), checkpoint); + } + Some(ConstKw) => { + parser.parse(ConstScope::default(), checkpoint); + } + Some(ExternKw) => { + parser.parse(ExternScope::default(), checkpoint); + } + Some(TypeKw) => { + parser.parse(TypeAliasScope::default(), checkpoint); + } tok => parser .error_and_recover(&format! {"expected item: but got {:?}", tok}, checkpoint), } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index bfe7e6c865..2270e1bbbe 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -1,6 +1,6 @@ use std::collections::VecDeque; -pub(crate) use item::RootScope; +pub(crate) use item::ItemListScope; use fxhash::FxHashSet; @@ -10,16 +10,18 @@ use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; pub mod token_stream; -mod attr; -mod expr; -mod func; -mod item; -mod param; -mod pat; -mod path; -mod stmt; -mod struct_; -mod type_; +pub use pat::parse_pat; + +pub mod attr; +pub mod expr; +pub mod func; +pub mod item; +pub mod param; +pub mod pat; +pub mod path; +pub mod stmt; +pub mod struct_; +pub mod type_; type Checkpoint = rowan::Checkpoint; @@ -94,6 +96,30 @@ impl Parser { error_len == self.errors.len() } + #[doc(hidden)] + /// Enter the scope and return the checkpoint. The checkpoint branch will be + /// wrapped up by the scope's node when [`leave`] is called. + // NOTE: This method is limited to testing and internal usage. + pub fn enter(&mut self, scope: T, checkpoint: Option) -> Checkpoint + where + T: ParsingScope + 'static, + { + self.scopes.push(Box::new(scope)); + checkpoint.unwrap_or_else(|| self.checkpoint()) + } + + #[doc(hidden)] + /// Leave the scope and wrap up the checkpoint by the scope's node. + // NOTE: This method is limited to testing and internal usage. + pub fn leave(&mut self, checkpoint: Checkpoint) { + let scope = self.scopes.pop().unwrap(); + if !self.is_dry_run() { + self.builder + .start_node_at(checkpoint, scope.syntax_kind().into()); + self.builder.finish_node(); + } + } + /// Marks the current branch as a checkpoint. /// The checked branch is wrapped up later when [`parse]` is /// called with the `checkpoint`. @@ -115,6 +141,30 @@ impl Parser { self.leave(checkpoint); } + /// Add `msg` as an error to the error list, then bumps consecutive tokens + /// until a `tok` is found or the end of the file is reached. + /// + /// * If checkpoint is `Some`, the marked branch is wrapped up by an error + /// node. + /// * If checkpoint is `None`, the current branch is wrapped up by an error + /// node. + pub fn error_and_bump_until( + &mut self, + msg: &str, + checkpoint: Option, + kind: SyntaxKind, + ) { + let err_scope = self.error(msg); + let checkpoint = self.enter(err_scope, checkpoint); + loop { + if self.current_kind() == Some(kind) || self.current_kind().is_none() { + break; + } + self.bump() + } + self.leave(checkpoint); + } + /// Add the `msg` to the error list and bumps n token in the error branch. pub fn error_and_bump(&mut self, msg: &str, bump_n: usize) { let error = self.error(msg); @@ -285,23 +335,6 @@ impl Parser { fn is_dry_run(&self) -> bool { !self.dry_run_states.is_empty() } - - fn enter(&mut self, scope: T, checkpoint: Option) -> Checkpoint - where - T: ParsingScope + 'static, - { - self.scopes.push(Box::new(scope)); - checkpoint.unwrap_or_else(|| self.checkpoint()) - } - - fn leave(&mut self, checkpoint: Checkpoint) { - let scope = self.scopes.pop().unwrap(); - if !self.is_dry_run() { - self.builder - .start_node_at(checkpoint, scope.syntax_kind().into()); - self.builder.finish_node(); - } - } } /// The current scope of parsing. @@ -335,7 +368,11 @@ pub enum RecoveryMethod { impl RecoveryMethod { fn inheritance_empty() -> Self { - RecoveryMethod::Inheritance(fxhash::FxHashSet::default()) + Self::Inheritance(fxhash::FxHashSet::default()) + } + + fn inheritance(tokens: &[SyntaxKind]) -> Self { + Self::Inheritance(tokens.iter().copied().collect()) } } @@ -352,16 +389,22 @@ where } } +define_scope! { + RootScope, + Root, + Override() +} + macro_rules! define_scope { - ($scope_name: ident, $kind: path ,Inheritance) => { + ($scope_name: ident, $kind: path, Inheritance) => { #[derive(Default, Debug, Clone, Copy)] - pub(crate) struct $scope_name {} + pub struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { lazy_static::lazy_static! { pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { - crate::parser::RecoveryMethod::Inheritance(fxhash::FxHashSet::default()) + crate::parser::RecoveryMethod::inheritance_empty() }; } @@ -377,7 +420,7 @@ macro_rules! define_scope { ($scope_name: ident, $kind: path, Inheritance($($recoveries: path), *)) => { #[derive(Default, Debug, Clone, Copy)] - pub(crate) struct $scope_name {} + pub struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { @@ -405,7 +448,7 @@ macro_rules! define_scope { ($scope_name: ident, $kind: path, Override($($recoveries: path), *)) => { #[derive(Default, Debug, Clone, Copy)] - pub(crate) struct $scope_name {} + pub struct $scope_name {} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs new file mode 100644 index 0000000000..a7ff453d91 --- /dev/null +++ b/crates/parser2/src/parser/pat.rs @@ -0,0 +1,121 @@ +use std::cell::RefCell; + +use crate::SyntaxKind; + +use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser, RecoveryMethod}; + +pub fn parse_pat(parser: &mut Parser) -> bool { + use SyntaxKind::*; + let checkpoint = parser.checkpoint(); + let mut success = match parser.current_kind() { + Some(Underscore) => parser.parse(WildCardPatScope::default(), None), + Some(Dot2) => parser.parse(RestPatScope::default(), None), + Some(LParen) => parser.parse(TuplePatScope::default(), None), + Some(Int | String) => parser.parse(LitPatScope::default(), None), + _ => parser.parse(PathPatScope::default(), None), + }; + + if parser.peek_non_trivia(true) == Some(SyntaxKind::Pipe) { + parser.bump_trivias(true); + success = parser.parse(OrPatScope::default(), Some(checkpoint)) && success; + } + + success +} + +define_scope! { WildCardPatScope, WildCardPat, Inheritance(SyntaxKind::Pipe) } +impl super::Parse for WildCardPatScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Underscore); + } +} + +define_scope! { RestPatScope, RestPat, Inheritance } +impl super::Parse for RestPatScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Dot2); + parser.bump_trivias(true); + } +} + +define_scope! { LitPatScope, LitPat, Inheritance(SyntaxKind::Pipe) } +impl super::Parse for LitPatScope { + fn parse(&mut self, parser: &mut Parser) { + match parser.current_kind() { + Some(SyntaxKind::Int | SyntaxKind::String) => parser.bump(), + _ => unreachable!(), + } + } +} + +define_scope! { TuplePatScope, TuplePat, Override(RParen) } +impl super::Parse for TuplePatScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + parser.bump_trivias(true); + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parse_pat(parser); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parse_pat(parser); + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_recover("expected `)`", None); + parser.bump_if(SyntaxKind::RParen); + } + } +} + +// We can't use `define_scope` here since the `syntax_kind` of the scope can be +// determined after parsing. +#[derive(Debug, Clone)] +struct PathPatScope { + syntax_kind: RefCell, + recovery_method: RecoveryMethod, +} +impl Default for PathPatScope { + fn default() -> Self { + Self { + syntax_kind: SyntaxKind::PathPat.into(), + recovery_method: RecoveryMethod::inheritance(&[SyntaxKind::Pipe]), + } + } +} +impl super::ParsingScope for PathPatScope { + /// Returns the recovery method of the current scope. + fn recovery_method(&self) -> &RecoveryMethod { + &self.recovery_method + } + + fn syntax_kind(&self) -> SyntaxKind { + *self.syntax_kind.borrow() + } +} +impl super::Parse for PathPatScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.parse(PathScope::default(), None) { + return; + } + + if parser.peek_non_trivia(false) == Some(SyntaxKind::LParen) { + parser.bump_trivias(false); + parser.parse(TuplePatScope::default(), None); + *self.syntax_kind.borrow_mut() = SyntaxKind::PathTuplePat; + } + } +} + +define_scope! { OrPatScope, OrPat, Inheritance(SyntaxKind::Pipe) } +impl super::Parse for OrPatScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Pipe); + parser.bump_trivias(true); + parse_pat(parser); + } +} diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index f01d7b22d0..292ac42226 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -42,7 +42,7 @@ impl super::Parse for LetStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LetKw); parser.bump_trivias(false); - if !parse_pat(parser, None) { + if !parse_pat(parser) { parser.error_and_recover("expected pattern", None); return; } @@ -59,9 +59,7 @@ impl super::Parse for LetStmtScope { parser.bump_trivias(false); parser.bump_expected(SyntaxKind::Eq); parser.bump_trivias(false); - if !parse_expr(parser, None) { - return; - } + parse_expr(parser, None); } } } @@ -71,7 +69,7 @@ impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); parser.bump_trivias(true); - if !parse_pat(parser, None) { + if !parse_pat(parser) { return; } @@ -145,7 +143,7 @@ impl super::Parse for ReturnStmtScope { define_scope! { AssignStmtScope, AssignStmt, Inheritance } impl super::Parse for AssignStmtScope { fn parse(&mut self, parser: &mut Parser) { - if !parse_pat(parser, None) { + if !parse_pat(parser) { return; } @@ -154,9 +152,7 @@ impl super::Parse for AssignStmtScope { parser.error_and_recover("expected `=` keyword", None); return; } - if !parse_expr(parser, None) { - return; - } + parse_expr(parser, None); } } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index b7d4392df0..faccc95863 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -69,7 +69,6 @@ impl super::Parse for TupleTypeScope { parser.bump_trivias(true); } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::RParen) { parser.error_and_recover("expected `)`", None); parser.bump_if(SyntaxKind::RParen); diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 038b290c46..d14c5b989c 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -69,7 +69,7 @@ pub enum SyntaxKind { FatArrow, /// `_` #[token("_")] - UnderScore, + Underscore, /// `#` #[token("#")] Pound, @@ -253,7 +253,7 @@ pub enum SyntaxKind { /// `[x; 1]` ArrayExpr, /// `1` - LiteralExpr, + LitExpr, /// `if x { 1 } else { 2 }` IfExpr, /// `match x { pat => { .. } }` @@ -286,13 +286,15 @@ pub enum SyntaxKind { /// `..` RestPat, /// `x` - LiteralPat, + LitPat, /// `(x, y)` TuplePat, /// `Enum::Variant` PathPat, /// `Enum::Variant(x, y)` PathTuplePat, + /// `pat1 | pat2` + OrPat, // MatchArms. // `pat => { stmtlist }` @@ -371,9 +373,6 @@ pub enum SyntaxKind { /// `Trait1` TraitBound, - /// `1`, `"foo"` - Literal, - /// Root node of the input source. Root, diff --git a/crates/parser2/test_files/syntax_node/pats/lit.fe b/crates/parser2/test_files/syntax_node/pats/lit.fe new file mode 100644 index 0000000000..73671527c0 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/lit.fe @@ -0,0 +1,2 @@ +0x1 +"String" \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/lit.snap b/crates/parser2/test_files/syntax_node/pats/lit.snap new file mode 100644 index 0000000000..2608ab927b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/lit.snap @@ -0,0 +1,11 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..12 + LitPat@0..3 + Int@0..3 "0x1" + Newline@3..4 "\n" + LitPat@4..12 + String@4..12 "\"String\"" + diff --git a/crates/parser2/test_files/syntax_node/pats/or.fe b/crates/parser2/test_files/syntax_node/pats/or.fe new file mode 100644 index 0000000000..8cd692f4c1 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/or.fe @@ -0,0 +1,5 @@ +Foo::Bar | FOO::Baz + +Foo::Bar(1 | 2) | Foo::Baz(..) + +Foo::Bar(1 | 2) | Foo::Baz(Foo::Bar(1 | 2) | Bar::Baz("STRING")) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/or.snap b/crates/parser2/test_files/syntax_node/pats/or.snap new file mode 100644 index 0000000000..ba8befa9a6 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/or.snap @@ -0,0 +1,126 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..117 + OrPat@0..19 + PathPat@0..8 + Path@0..8 + PathSegment@0..3 + Ident@0..3 "Foo" + Colon2@3..5 "::" + PathSegment@5..8 + Ident@5..8 "Bar" + WhiteSpace@8..9 " " + Pipe@9..10 "|" + WhiteSpace@10..11 " " + PathPat@11..19 + Path@11..19 + PathSegment@11..14 + Ident@11..14 "FOO" + Colon2@14..16 "::" + PathSegment@16..19 + Ident@16..19 "Baz" + Newline@19..21 "\n\n" + OrPat@21..51 + PathPat@21..36 + Path@21..29 + PathSegment@21..24 + Ident@21..24 "Foo" + Colon2@24..26 "::" + PathSegment@26..29 + Ident@26..29 "Bar" + TuplePat@29..36 + LParen@29..30 "(" + OrPat@30..35 + LitPat@30..31 + Int@30..31 "1" + WhiteSpace@31..32 " " + Pipe@32..33 "|" + WhiteSpace@33..34 " " + LitPat@34..35 + Int@34..35 "2" + RParen@35..36 ")" + WhiteSpace@36..37 " " + Pipe@37..38 "|" + WhiteSpace@38..39 " " + PathPat@39..51 + Path@39..47 + PathSegment@39..42 + Ident@39..42 "Foo" + Colon2@42..44 "::" + PathSegment@44..47 + Ident@44..47 "Baz" + TuplePat@47..51 + LParen@47..48 "(" + RestPat@48..50 + Dot2@48..50 ".." + RParen@50..51 ")" + Newline@51..53 "\n\n" + OrPat@53..117 + PathPat@53..68 + Path@53..61 + PathSegment@53..56 + Ident@53..56 "Foo" + Colon2@56..58 "::" + PathSegment@58..61 + Ident@58..61 "Bar" + TuplePat@61..68 + LParen@61..62 "(" + OrPat@62..67 + LitPat@62..63 + Int@62..63 "1" + WhiteSpace@63..64 " " + Pipe@64..65 "|" + WhiteSpace@65..66 " " + LitPat@66..67 + Int@66..67 "2" + RParen@67..68 ")" + WhiteSpace@68..69 " " + Pipe@69..70 "|" + WhiteSpace@70..71 " " + PathPat@71..117 + Path@71..79 + PathSegment@71..74 + Ident@71..74 "Foo" + Colon2@74..76 "::" + PathSegment@76..79 + Ident@76..79 "Baz" + TuplePat@79..117 + LParen@79..80 "(" + OrPat@80..116 + PathPat@80..95 + Path@80..88 + PathSegment@80..83 + Ident@80..83 "Foo" + Colon2@83..85 "::" + PathSegment@85..88 + Ident@85..88 "Bar" + TuplePat@88..95 + LParen@88..89 "(" + OrPat@89..94 + LitPat@89..90 + Int@89..90 "1" + WhiteSpace@90..91 " " + Pipe@91..92 "|" + WhiteSpace@92..93 " " + LitPat@93..94 + Int@93..94 "2" + RParen@94..95 ")" + WhiteSpace@95..96 " " + Pipe@96..97 "|" + WhiteSpace@97..98 " " + PathPat@98..116 + Path@98..106 + PathSegment@98..101 + Ident@98..101 "Bar" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Baz" + TuplePat@106..116 + LParen@106..107 "(" + LitPat@107..115 + String@107..115 "\"STRING\"" + RParen@115..116 ")" + RParen@116..117 ")" + diff --git a/crates/parser2/test_files/syntax_node/pats/path.fe b/crates/parser2/test_files/syntax_node/pats/path.fe new file mode 100644 index 0000000000..7e28afe8ee --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path.fe @@ -0,0 +1 @@ +MyEnum::Foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/path.snap b/crates/parser2/test_files/syntax_node/pats/path.snap new file mode 100644 index 0000000000..1e7f37ef48 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path.snap @@ -0,0 +1,13 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..11 + PathPat@0..11 + Path@0..11 + PathSegment@0..6 + Ident@0..6 "MyEnum" + Colon2@6..8 "::" + PathSegment@8..11 + Ident@8..11 "Foo" + diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.fe b/crates/parser2/test_files/syntax_node/pats/path_tuple.fe new file mode 100644 index 0000000000..8a87ada08d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.fe @@ -0,0 +1,16 @@ +Empty() + +MyEnum::Empty() + +MyEnum::Foo(X::Foo, Z::Bar(1, 2), _, ..) + +MyEnum::Foo2( + X::Foo, + Z::Bar(1, 2), + _, + .. +) + +MyEnum::Bind(x) + +MyEnum::OrTuple(Int::I32 | Int::I64 | Int::Any(10)) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap new file mode 100644 index 0000000000..a9b9516d7d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -0,0 +1,181 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..203 + PathPat@0..7 + Path@0..5 + PathSegment@0..5 + Ident@0..5 "Empty" + TuplePat@5..7 + LParen@5..6 "(" + RParen@6..7 ")" + Newline@7..9 "\n\n" + PathPat@9..24 + Path@9..22 + PathSegment@9..15 + Ident@9..15 "MyEnum" + Colon2@15..17 "::" + PathSegment@17..22 + Ident@17..22 "Empty" + TuplePat@22..24 + LParen@22..23 "(" + RParen@23..24 ")" + Newline@24..26 "\n\n" + PathPat@26..67 + Path@26..37 + PathSegment@26..32 + Ident@26..32 "MyEnum" + Colon2@32..34 "::" + PathSegment@34..37 + Ident@34..37 "Foo" + TuplePat@37..67 + LParen@37..38 "(" + PathPat@38..44 + Path@38..44 + PathSegment@38..39 + Ident@38..39 "X" + Colon2@39..41 "::" + PathSegment@41..44 + Ident@41..44 "Foo" + Comma@44..45 "," + WhiteSpace@45..46 " " + PathPat@46..58 + Path@46..52 + PathSegment@46..47 + Ident@46..47 "Z" + Colon2@47..49 "::" + PathSegment@49..52 + Ident@49..52 "Bar" + TuplePat@52..58 + LParen@52..53 "(" + LitPat@53..54 + Int@53..54 "1" + Comma@54..55 "," + WhiteSpace@55..56 " " + LitPat@56..57 + Int@56..57 "2" + RParen@57..58 ")" + Comma@58..59 "," + WhiteSpace@59..61 " " + WildCardPat@61..62 + Underscore@61..62 "_" + Comma@62..63 "," + WhiteSpace@63..64 " " + RestPat@64..66 + Dot2@64..66 ".." + RParen@66..67 ")" + Newline@67..69 "\n\n" + PathPat@69..133 + Path@69..81 + PathSegment@69..75 + Ident@69..75 "MyEnum" + Colon2@75..77 "::" + PathSegment@77..81 + Ident@77..81 "Foo2" + TuplePat@81..133 + LParen@81..82 "(" + Newline@82..83 "\n" + WhiteSpace@83..87 " " + PathPat@87..93 + Path@87..93 + PathSegment@87..88 + Ident@87..88 "X" + Colon2@88..90 "::" + PathSegment@90..93 + Ident@90..93 "Foo" + Comma@93..94 "," + WhiteSpace@94..95 " " + Newline@95..96 "\n" + WhiteSpace@96..100 " " + PathPat@100..112 + Path@100..106 + PathSegment@100..101 + Ident@100..101 "Z" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Bar" + TuplePat@106..112 + LParen@106..107 "(" + LitPat@107..108 + Int@107..108 "1" + Comma@108..109 "," + WhiteSpace@109..110 " " + LitPat@110..111 + Int@110..111 "2" + RParen@111..112 ")" + Comma@112..113 "," + WhiteSpace@113..114 " " + Newline@114..115 "\n" + WhiteSpace@115..120 " " + WildCardPat@120..121 + Underscore@120..121 "_" + Comma@121..122 "," + WhiteSpace@122..123 " " + Newline@123..124 "\n" + WhiteSpace@124..129 " " + RestPat@129..132 + Dot2@129..131 ".." + Newline@131..132 "\n" + RParen@132..133 ")" + Newline@133..135 "\n\n" + PathPat@135..150 + Path@135..147 + PathSegment@135..141 + Ident@135..141 "MyEnum" + Colon2@141..143 "::" + PathSegment@143..147 + Ident@143..147 "Bind" + TuplePat@147..150 + LParen@147..148 "(" + PathPat@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" + RParen@149..150 ")" + Newline@150..152 "\n\n" + PathPat@152..203 + Path@152..167 + PathSegment@152..158 + Ident@152..158 "MyEnum" + Colon2@158..160 "::" + PathSegment@160..167 + Ident@160..167 "OrTuple" + TuplePat@167..203 + LParen@167..168 "(" + OrPat@168..202 + PathPat@168..176 + Path@168..176 + PathSegment@168..171 + Ident@168..171 "Int" + Colon2@171..173 "::" + PathSegment@173..176 + Ident@173..176 "I32" + WhiteSpace@176..177 " " + Pipe@177..178 "|" + WhiteSpace@178..179 " " + OrPat@179..202 + PathPat@179..187 + Path@179..187 + PathSegment@179..182 + Ident@179..182 "Int" + Colon2@182..184 "::" + PathSegment@184..187 + Ident@184..187 "I64" + WhiteSpace@187..188 " " + Pipe@188..189 "|" + WhiteSpace@189..190 " " + PathPat@190..202 + Path@190..198 + PathSegment@190..193 + Ident@190..193 "Int" + Colon2@193..195 "::" + PathSegment@195..198 + Ident@195..198 "Any" + TuplePat@198..202 + LParen@198..199 "(" + LitPat@199..201 + Int@199..201 "10" + RParen@201..202 ")" + RParen@202..203 ")" + diff --git a/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe b/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe new file mode 100644 index 0000000000..a96aa0ea9d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe @@ -0,0 +1 @@ +.. \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap b/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap new file mode 100644 index 0000000000..caf7a706ba --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap @@ -0,0 +1,8 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..2 + RestPat@0..2 + Dot2@0..2 ".." + diff --git a/crates/parser2/test_files/syntax_node/pats/wilecard.fe b/crates/parser2/test_files/syntax_node/pats/wilecard.fe new file mode 100644 index 0000000000..c9cdc63b07 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/wilecard.fe @@ -0,0 +1 @@ +_ \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/wilecard.snap b/crates/parser2/test_files/syntax_node/pats/wilecard.snap new file mode 100644 index 0000000000..e1751174b5 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/wilecard.snap @@ -0,0 +1,8 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..1 + WildCardPat@0..1 + Underscore@0..1 "_" + diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 58887a7042..18e87a17f0 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,18 +1,70 @@ -use fe_parser2::syntax_node::SyntaxNode; +use fe_parser2::{ + lexer, + parser::{item::ItemListScope, parse_pat, Parser, RootScope}, + syntax_node::SyntaxNode, +}; -#[allow(unused)] -fn build_root_cst(input: &str) -> SyntaxNode { - let (cst, errors) = fe_parser2::parse_source_file(input); - for error in &errors { - println!("{}", error.msg); - } - assert! {errors.is_empty()} - assert!(input == cst.to_string()); - cst +fn test_item_list(input: &str) -> SyntaxNode { + let runner = TestRunner::new(|parser| { + while parser.current_kind().is_some() { + parser.bump_trivias(true); + parser.parse(ItemListScope::default(), None); + } + }); + runner.run(input) } - fe_compiler_test_utils::build_debug_snap_tests! { "parser2/test_files/syntax_node/structs", "parser2/test_files/syntax_node/structs", - build_root_cst + test_item_list +} + +fn test_pat(input: &str) -> SyntaxNode { + let runner = TestRunner::new(|parser| { + while parser.current_kind().is_some() { + parser.bump_trivias(true); + parse_pat(parser); + } + }); + runner.run(input) +} +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/syntax_node/pats", + "parser2/test_files/syntax_node/pats", + test_pat +} + +struct TestRunner +where + F: Fn(&mut Parser), +{ + f: F, +} + +impl TestRunner +where + F: Fn(&mut Parser), +{ + fn new(f: F) -> Self { + Self { f } + } + + fn run(&self, input: &str) -> SyntaxNode { + let lexer = lexer::Lexer::new(input); + let mut parser = Parser::new(lexer); + + let checkpoint = parser.enter(RootScope::default(), None); + (self.f)(&mut parser); + parser.leave(checkpoint); + + let (cst, errors) = parser.finish(); + + for error in &errors { + println!("{}", error.msg); + } + assert! {errors.is_empty()} + assert!(input == cst.to_string()); + + cst + } } From 303616960044189c2b0cc555b68ffb6435c14a5a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 20 Jan 2023 23:51:41 +0100 Subject: [PATCH 024/678] Add parser for `expr_atom` --- crates/parser2/src/parser/expr.rs | 12 + crates/parser2/src/parser/expr_atom.rs | 340 +++++++++++++++++++++++++ crates/parser2/src/parser/mod.rs | 6 + crates/parser2/src/parser/path.rs | 12 +- crates/parser2/src/parser/type_.rs | 4 +- crates/parser2/src/syntax_kind.rs | 8 +- 6 files changed, 376 insertions(+), 6 deletions(-) create mode 100644 crates/parser2/src/parser/expr.rs create mode 100644 crates/parser2/src/parser/expr_atom.rs diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs new file mode 100644 index 0000000000..fb2a667c49 --- /dev/null +++ b/crates/parser2/src/parser/expr.rs @@ -0,0 +1,12 @@ +use std::cell::RefCell; + +use crate::{parser::path, SyntaxKind}; + +use super::{ + attr::parse_attr_list, define_scope, parse_pat, stmt::parse_stmt, token_stream::TokenStream, + Parser, +}; + +pub(super) fn parse_expr(_parser: &mut Parser) -> bool { + todo!() +} diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs new file mode 100644 index 0000000000..2448abdd34 --- /dev/null +++ b/crates/parser2/src/parser/expr_atom.rs @@ -0,0 +1,340 @@ +use std::cell::RefCell; + +use crate::{parser::path, SyntaxKind}; + +use super::{ + attr::parse_attr_list, define_scope, expr::parse_expr, parse_pat, stmt::parse_stmt, + token_stream::TokenStream, Parser, +}; + +pub(super) fn _parse_expr_atom(parser: &mut Parser) -> bool { + use SyntaxKind::*; + match parser.current_kind() { + Some(Int | String) => parser.parse(LitExprScope::default(), None), + Some(IfKw) => parser.parse(IfExprScope::default(), None), + Some(MatchKw) => parser.parse(MatchExprScope::default(), None), + Some(LBrace) => parser.parse(BlockExprScope::default(), None), + Some(LParen) => parser.parse(ParenScope::default(), None), + Some(LBracket) => parser.parse(ArrayScope::default(), None), + Some(kind) if path::is_path_header(kind) => { + let checkpoint = parser.checkpoint(); + let success = parser.parse(path::PathScope::default(), None); + if success && parser.peek_non_trivia(true) == Some(LBrace) { + parser.bump_trivias(true); + parser.parse(RecordInitExprScope::default(), Some(checkpoint)) + } else { + success + } + } + _ => { + parser.error_and_recover("expected expression", None); + false + } + } +} + +define_scope! { + BlockExprScope, + BlockExpr, + Override( + RBrace, + Newline, + LetKw, + ForKw, + WhileKw, + ContinueKw, + BreakKw, + AssertKw, + ReturnKw + ) +} +impl super::Parse for BlockExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + parser.bump_trivias(true); + + loop { + if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() + { + break; + } + let checkpoint = parse_attr_list(parser); + if !parse_stmt(parser, checkpoint) { + continue; + } + + parser.bump_trivias(false); + if !parser.bump_if(SyntaxKind::Newline) + && parser.peek_non_trivia(true) != Some(SyntaxKind::RBrace) + { + parser.error_and_recover("expected newline after statement", None); + } + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_bump_until("expected `}`", None, SyntaxKind::RBrace); + } + } +} + +define_scope! { IfExprScope, IfExpr, Inheritance } +impl super::Parse for IfExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::IfKw); + + if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected `{`", None); + return; + } + parser.bump_trivias(true); + parser.parse(BlockExprScope::default(), None); + + if parser.peek_non_trivia(true) == Some(SyntaxKind::ElseKw) { + parser.bump_trivias(true); + parser.bump_expected(SyntaxKind::ElseKw); + + if !matches!( + parser.peek_non_trivia(true), + Some(SyntaxKind::LBrace | SyntaxKind::IfKw) + ) { + parser.error_and_recover("expected `{` or `if` after `else`", None); + parse_expr(parser); + } + } + } +} + +define_scope! { MatchExprScope, MatchExpr, Inheritance } +impl super::Parse for MatchExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::MatchKw); + + parser.bump_trivias(true); + parse_expr(parser); + + if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected `{`", None); + } + parser.bump_trivias(true); + parser.parse(MatchArmListScope::default(), None); + } +} + +define_scope! { MatchArmListScope, MatchArmList, Override(SyntaxKind::Newline, SyntaxKind::RBrace) } +impl super::Parse for MatchArmListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + + loop { + parser.bump_trivias(true); + if matches!( + parser.peek_non_trivia(true), + Some(SyntaxKind::RBrace) | None + ) { + break; + } + parser.parse(MatchArmScope::default(), None); + parser.bump_trivias(true); + } + + parser.bump_trivias(true); + } +} + +define_scope! { MatchArmScope, MatchArm, Inheritance } +impl super::Parse for MatchArmScope { + fn parse(&mut self, parser: &mut Parser) { + if !parse_pat(parser) { + return; + } + + if parser.peek_non_trivia(true) != Some(SyntaxKind::FatArrow) { + parser.error_and_recover("expected `=>`", None); + return; + } + parser.bump_trivias(true); + parser.bump_expected(SyntaxKind::FatArrow); + + parser.bump_trivias(true); + parse_expr(parser); + + if parser.peek_non_trivia(false) != Some(SyntaxKind::Newline) { + parser.error_and_bump_until( + "expected newline after match arm", + None, + SyntaxKind::Newline, + ); + } + } +} + +define_scope! { LitExprScope, LitExpr, Inheritance } +impl super::Parse for LitExprScope { + fn parse(&mut self, parser: &mut Parser) { + match parser.current_kind() { + Some(SyntaxKind::Int | SyntaxKind::String) => parser.bump(), + _ => unreachable!(), + } + } +} + +define_scope! { RecordInitExprScope, RecordInitExpr, Inheritance } +impl super::Parse for RecordInitExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(RecordFieldListScope::default(), None); + } +} + +define_scope! { RecordFieldListScope, RecordFieldList, Override(SyntaxKind::RBrace, SyntaxKind::Comma) } +impl super::Parse for RecordFieldListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + parser.bump_trivias(true); + + if parser.bump_if(SyntaxKind::LBrace) { + return; + } + + parser.parse(RecordFieldScope::default(), None); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parser.parse(RecordFieldScope::default(), None); + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_bump_until("expected `}`", None, SyntaxKind::RBrace); + } + } +} + +define_scope! { RecordFieldScope, RecordField, Inheritance } +impl super::Parse for RecordFieldScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier", None); + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected `:`", None); + } + + parser.bump_trivias(true); + parse_expr(parser); + } +} + +// We can't use `define_scope` here since the `syntax_kind` of the scope can be +// determined after parsing. +#[derive(Debug, Clone)] +struct ParenScope { + syntax_kind: RefCell, + recovery_method: super::RecoveryMethod, +} +impl Default for ParenScope { + fn default() -> Self { + Self { + syntax_kind: SyntaxKind::ParenExpr.into(), + recovery_method: super::RecoveryMethod::override_(&[ + SyntaxKind::RParen, + SyntaxKind::Comma, + ]), + } + } +} +impl super::ParsingScope for ParenScope { + fn recovery_method(&self) -> &super::RecoveryMethod { + &self.recovery_method + } + fn syntax_kind(&self) -> SyntaxKind { + *self.syntax_kind.borrow() + } +} +impl super::Parse for ParenScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + parser.bump_trivias(true); + + if parser.bump_if(SyntaxKind::RParen) { + *self.syntax_kind.borrow_mut() = SyntaxKind::TupleExpr; + return; + } + + parse_expr(parser); + parser.bump_trivias(true); + while parser.bump_if(SyntaxKind::Comma) { + *self.syntax_kind.borrow_mut() = SyntaxKind::TupleExpr; + if parser.peek_non_trivia(true) == Some(SyntaxKind::RParen) { + parser.bump_trivias(true); + break; + } + parse_expr(parser); + parser.bump_trivias(true); + } + + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_bump_until("expected `)`", None, SyntaxKind::RParen); + } + } +} + +// We can't use `define_scope` here since the `syntax_kind` of the scope can be +// determined after parsing. +#[derive(Debug, Clone)] +struct ArrayScope { + syntax_kind: RefCell, + recovery_method: super::RecoveryMethod, +} +impl Default for ArrayScope { + fn default() -> Self { + Self { + syntax_kind: SyntaxKind::ArrayExpr.into(), + recovery_method: super::RecoveryMethod::override_(&[ + SyntaxKind::RBracket, + SyntaxKind::Comma, + SyntaxKind::SemiColon, + ]), + } + } +} +impl super::ParsingScope for ArrayScope { + fn recovery_method(&self) -> &super::RecoveryMethod { + &self.recovery_method + } + fn syntax_kind(&self) -> SyntaxKind { + *self.syntax_kind.borrow() + } +} +impl super::Parse for ArrayScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBracket); + parser.bump_trivias(true); + + if parser.bump_if(SyntaxKind::RBracket) { + return; + } + + parse_expr(parser); + parser.bump_trivias(true); + + if parser.bump_if(SyntaxKind::SemiColon) { + parser.bump_trivias(true); + *self.syntax_kind.borrow_mut() = SyntaxKind::ArrayRepExpr; + parse_expr(parser); + } else { + while parser.bump_if(SyntaxKind::Comma) { + parser.bump_trivias(true); + parse_expr(parser); + parser.bump_trivias(true); + } + } + + parser.bump_trivias(true); + if !parser.bump_if(SyntaxKind::RBracket) { + parser.error_and_bump_until("expected `]`", None, SyntaxKind::RBracket); + } + } +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 2270e1bbbe..485b4d45bc 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -23,6 +23,8 @@ pub mod stmt; pub mod struct_; pub mod type_; +mod expr_atom; + type Checkpoint = rowan::Checkpoint; /// Parser to build a rowan syntax tree. @@ -374,6 +376,10 @@ impl RecoveryMethod { fn inheritance(tokens: &[SyntaxKind]) -> Self { Self::Inheritance(tokens.iter().copied().collect()) } + + fn override_(tokens: &[SyntaxKind]) -> Self { + Self::Override(tokens.iter().copied().collect()) + } } trait TextSize { diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 8b990ecdd8..5206759a48 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -25,8 +25,14 @@ define_scope! { } impl super::Parse for PathSegmentScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected path segment", None); - } + debug_assert!(is_path_header(parser.current_kind().unwrap())); + parser.bump() } } + +pub(super) fn is_path_header(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::SelfType | SyntaxKind::SelfKw | SyntaxKind::Ident + ) +} diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index faccc95863..1289cfc185 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -11,7 +11,7 @@ pub(super) fn parse_type( ) -> bool { match parser.current_kind() { Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::default(), checkpoint), - Some(SyntaxKind::SelfKw) => parser.parse(SelfTypeScope::default(), checkpoint), + Some(SyntaxKind::SelfTypeKw) => parser.parse(SelfTypeScope::default(), checkpoint), Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), _ => parser.parse(PathTypeScope::default(), checkpoint), } @@ -42,7 +42,7 @@ impl super::Parse for PathTypeScope { define_scope!(SelfTypeScope, SelfType, Inheritance); impl super::Parse for SelfTypeScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::SelfKw); + parser.bump_expected(SyntaxKind::SelfTypeKw); } } define_scope! { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index d14c5b989c..6c4be99aa4 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -192,6 +192,8 @@ pub enum SyntaxKind { /// `self` #[token("self")] SelfKw, + #[token("Self")] + SelfTypeKw, /// `struct` #[token("struct")] StructKw, @@ -250,14 +252,18 @@ pub enum SyntaxKind { IndexExpr, /// `(x ,y)` TupleExpr, - /// `[x; 1]` + /// `[x, y, z]` ArrayExpr, + /// `[x; 4]` + ArrayRepExpr, /// `1` LitExpr, /// `if x { 1 } else { 2 }` IfExpr, /// `match x { pat => { .. } }` MatchExpr, + /// `(1 + 2)` + ParenExpr, // Statements. These are non-leaf nodes. /// `let x = 1` From cb23f3fad3de39e42a5bc9e402dbbf57aad8dfb0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 22 Jan 2023 16:50:24 +0100 Subject: [PATCH 025/678] Change the way of handling trivias in `Parser` --- crates/parser2/src/parser/attr.rs | 25 +- crates/parser2/src/parser/expr_atom.rs | 143 ++------ crates/parser2/src/parser/item.rs | 135 +++----- crates/parser2/src/parser/mod.rs | 319 +++++++++--------- crates/parser2/src/parser/param.rs | 50 ++- crates/parser2/src/parser/pat.rs | 27 +- crates/parser2/src/parser/path.rs | 5 +- crates/parser2/src/parser/stmt.rs | 55 ++- crates/parser2/src/parser/struct_.rs | 29 +- crates/parser2/src/parser/type_.rs | 12 +- crates/parser2/src/syntax_kind.rs | 27 +- .../syntax_node/pats/path_tuple.snap | 4 +- .../test_files/syntax_node/structs/attr.snap | 27 +- .../test_files/syntax_node/structs/empty.snap | 3 +- .../syntax_node/structs/generics.snap | 9 +- .../syntax_node/structs/tupel_field.snap | 1 + crates/parser2/tests/syntax_node.rs | 2 - 17 files changed, 388 insertions(+), 485 deletions(-) diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index 338938e540..420c4234dd 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -4,9 +4,7 @@ use crate::SyntaxKind; pub(super) fn parse_attr_list(parser: &mut Parser) -> Option { if let Some(SyntaxKind::DocComment) | Some(SyntaxKind::Pound) = parser.current_kind() { - let checkpoint = parser.checkpoint(); - parser.parse(super::attr::AttrListScope::default(), None); - Some(checkpoint) + Some(parser.parse(super::attr::AttrListScope::default(), None).1) } else { None } @@ -21,15 +19,17 @@ define_scope! { } impl super::Parse for AttrListScope { fn parse(&mut self, parser: &mut Parser) { - use SyntaxKind::*; - loop { - parser.bump_trivias(true); + parser.set_newline_as_trivia(true); match parser.current_kind() { - Some(Pound) => parser.parse(AttrScope::default(), None), - Some(DocComment) => parser.parse(DocCommentAttrScope::default(), None), + Some(SyntaxKind::Pound) => parser.parse(AttrScope::default(), None), + Some(SyntaxKind::DocComment) => parser.parse(DocCommentAttrScope::default(), None), _ => break, }; + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Newline) { + parser.error_and_recover("expected newline after Attribute", None) + } } } } @@ -41,6 +41,7 @@ define_scope! { } impl super::Parse for AttrScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Pound); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected attribute name", None); @@ -63,19 +64,15 @@ define_scope! { impl super::Parse for AttrParamListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::RParen) { return; } parser.parse(AttrParam::default(), None); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parser.parse(AttrParam::default(), None); } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::RParen) { parser.error_and_recover("expected `)`", None); } @@ -96,17 +93,15 @@ impl super::Parse for AttrParam { parser.error_and_recover("expected `key: value`", None); } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected `key: value`", None); } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected `ident`", None) } - match parser.peek_non_trivia(true) { + match parser.current_kind() { Some(SyntaxKind::Comma) | Some(SyntaxKind::RParen) | None => {} _ => parser.error_and_recover("unexpected token", None), diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 2448abdd34..03bae71116 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -1,5 +1,3 @@ -use std::cell::RefCell; - use crate::{parser::path, SyntaxKind}; use super::{ @@ -10,18 +8,18 @@ use super::{ pub(super) fn _parse_expr_atom(parser: &mut Parser) -> bool { use SyntaxKind::*; match parser.current_kind() { - Some(Int | String) => parser.parse(LitExprScope::default(), None), - Some(IfKw) => parser.parse(IfExprScope::default(), None), - Some(MatchKw) => parser.parse(MatchExprScope::default(), None), - Some(LBrace) => parser.parse(BlockExprScope::default(), None), - Some(LParen) => parser.parse(ParenScope::default(), None), - Some(LBracket) => parser.parse(ArrayScope::default(), None), + Some(Int | String) => parser.parse(LitExprScope::default(), None).0, + Some(IfKw) => parser.parse(IfExprScope::default(), None).0, + Some(MatchKw) => parser.parse(MatchExprScope::default(), None).0, + Some(LBrace) => parser.parse(BlockExprScope::default(), None).0, + Some(LParen) => parser.parse(ParenScope::default(), None).0, + Some(LBracket) => parser.parse(ArrayScope::default(), None).0, Some(kind) if path::is_path_header(kind) => { - let checkpoint = parser.checkpoint(); - let success = parser.parse(path::PathScope::default(), None); - if success && parser.peek_non_trivia(true) == Some(LBrace) { - parser.bump_trivias(true); - parser.parse(RecordInitExprScope::default(), Some(checkpoint)) + let (success, checkpoint) = parser.parse(path::PathScope::default(), None); + if success && parser.current_kind() == Some(LBrace) { + parser + .parse(RecordInitExprScope::default(), Some(checkpoint)) + .0 } else { success } @@ -51,9 +49,10 @@ define_scope! { impl super::Parse for BlockExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - parser.bump_trivias(true); + parser.set_newline_as_trivia(false); loop { + parser.set_newline_as_trivia(true); if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() { break; @@ -63,13 +62,12 @@ impl super::Parse for BlockExprScope { continue; } - parser.bump_trivias(false); + parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Newline) - && parser.peek_non_trivia(true) != Some(SyntaxKind::RBrace) + && parser.current_kind() != Some(SyntaxKind::RBrace) { parser.error_and_recover("expected newline after statement", None); } - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::RBrace) { @@ -83,19 +81,17 @@ impl super::Parse for IfExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::IfKw); - if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); return; } - parser.bump_trivias(true); parser.parse(BlockExprScope::default(), None); - if parser.peek_non_trivia(true) == Some(SyntaxKind::ElseKw) { - parser.bump_trivias(true); + if parser.current_kind() == Some(SyntaxKind::ElseKw) { parser.bump_expected(SyntaxKind::ElseKw); if !matches!( - parser.peek_non_trivia(true), + parser.current_kind(), Some(SyntaxKind::LBrace | SyntaxKind::IfKw) ) { parser.error_and_recover("expected `{` or `if` after `else`", None); @@ -110,13 +106,11 @@ impl super::Parse for MatchExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::MatchKw); - parser.bump_trivias(true); parse_expr(parser); - if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); } - parser.bump_trivias(true); parser.parse(MatchArmListScope::default(), None); } } @@ -127,18 +121,11 @@ impl super::Parse for MatchArmListScope { parser.bump_expected(SyntaxKind::LBrace); loop { - parser.bump_trivias(true); - if matches!( - parser.peek_non_trivia(true), - Some(SyntaxKind::RBrace) | None - ) { + if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { break; } parser.parse(MatchArmScope::default(), None); - parser.bump_trivias(true); } - - parser.bump_trivias(true); } } @@ -149,17 +136,16 @@ impl super::Parse for MatchArmScope { return; } - if parser.peek_non_trivia(true) != Some(SyntaxKind::FatArrow) { + if parser.current_kind() != Some(SyntaxKind::FatArrow) { parser.error_and_recover("expected `=>`", None); return; } - parser.bump_trivias(true); parser.bump_expected(SyntaxKind::FatArrow); - parser.bump_trivias(true); parse_expr(parser); - if parser.peek_non_trivia(false) != Some(SyntaxKind::Newline) { + parser.set_newline_as_trivia(false); + if parser.current_kind() != Some(SyntaxKind::Newline) { parser.error_and_bump_until( "expected newline after match arm", None, @@ -186,22 +172,18 @@ impl super::Parse for RecordInitExprScope { } } -define_scope! { RecordFieldListScope, RecordFieldList, Override(SyntaxKind::RBrace, SyntaxKind::Comma) } +define_scope! { RecordFieldListScope, RecordFieldList, Override(RBrace, Comma) } impl super::Parse for RecordFieldListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::LBrace) { return; } parser.parse(RecordFieldScope::default(), None); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parser.parse(RecordFieldScope::default(), None); - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::RBrace) { @@ -217,62 +199,31 @@ impl super::Parse for RecordFieldScope { parser.error_and_recover("expected identifier", None); } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected `:`", None); } - parser.bump_trivias(true); parse_expr(parser); } } -// We can't use `define_scope` here since the `syntax_kind` of the scope can be -// determined after parsing. -#[derive(Debug, Clone)] -struct ParenScope { - syntax_kind: RefCell, - recovery_method: super::RecoveryMethod, -} -impl Default for ParenScope { - fn default() -> Self { - Self { - syntax_kind: SyntaxKind::ParenExpr.into(), - recovery_method: super::RecoveryMethod::override_(&[ - SyntaxKind::RParen, - SyntaxKind::Comma, - ]), - } - } -} -impl super::ParsingScope for ParenScope { - fn recovery_method(&self) -> &super::RecoveryMethod { - &self.recovery_method - } - fn syntax_kind(&self) -> SyntaxKind { - *self.syntax_kind.borrow() - } -} +define_scope! { ParenScope, ParenExpr, Override(RParen, Comma) } impl super::Parse for ParenScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::RParen) { - *self.syntax_kind.borrow_mut() = SyntaxKind::TupleExpr; + self.set_kind(SyntaxKind::TupleExpr); return; } parse_expr(parser); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - *self.syntax_kind.borrow_mut() = SyntaxKind::TupleExpr; - if parser.peek_non_trivia(true) == Some(SyntaxKind::RParen) { - parser.bump_trivias(true); + self.set_kind(SyntaxKind::TupleExpr); + if parser.current_kind() == Some(SyntaxKind::RParen) { break; } parse_expr(parser); - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::RParen) { @@ -281,58 +232,30 @@ impl super::Parse for ParenScope { } } -// We can't use `define_scope` here since the `syntax_kind` of the scope can be -// determined after parsing. -#[derive(Debug, Clone)] -struct ArrayScope { - syntax_kind: RefCell, - recovery_method: super::RecoveryMethod, -} -impl Default for ArrayScope { - fn default() -> Self { - Self { - syntax_kind: SyntaxKind::ArrayExpr.into(), - recovery_method: super::RecoveryMethod::override_(&[ - SyntaxKind::RBracket, - SyntaxKind::Comma, - SyntaxKind::SemiColon, - ]), - } - } -} -impl super::ParsingScope for ArrayScope { - fn recovery_method(&self) -> &super::RecoveryMethod { - &self.recovery_method - } - fn syntax_kind(&self) -> SyntaxKind { - *self.syntax_kind.borrow() - } +define_scope! { + ArrayScope, + ArrayExpr, + Override(RBracket, Comma, SemiColon) } impl super::Parse for ArrayScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBracket); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::RBracket) { return; } parse_expr(parser); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::SemiColon) { - parser.bump_trivias(true); - *self.syntax_kind.borrow_mut() = SyntaxKind::ArrayRepExpr; + self.set_kind(SyntaxKind::ArrayRepExpr); parse_expr(parser); } else { while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parse_expr(parser); - parser.bump_trivias(true); } } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::RBracket) { parser.error_and_bump_until("expected `]`", None, SyntaxKind::RBracket); } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 0a1b3b2682..24f4a031e4 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -1,4 +1,4 @@ -use std::cell::RefCell; +use std::cell::Cell; use crate::SyntaxKind; @@ -28,43 +28,15 @@ impl super::Parse for ItemListScope { use crate::SyntaxKind::*; loop { - parser.bump_trivias(true); if parser.current_kind().is_none() { break; } - parser.bump_trivias(true); let mut checkpoint = attr::parse_attr_list(parser); - - parser.bump_trivias(true); - let modifier = match parser.current_kind() { - Some(PubKw) => { - checkpoint.get_or_insert_with(|| parser.checkpoint()); - parser.bump(); - parser.bump_trivias(true); - - if parser.current_kind() == Some(UnsafeKw) { - parser.bump(); - Modifier::PubAndUnsafe - } else { - Modifier::Pub - } - } - - Some(UnsafeKw) => { - checkpoint.get_or_insert_with(|| parser.checkpoint()); - parser.bump(); - Modifier::Unsafe - } - - Some(_) => Modifier::None, - - None => { - parser.error_and_recover("expected item", checkpoint); - continue; - } - }; - parser.bump_trivias(true); + let modifier_scope = ItemModifierScope::default(); + let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); + checkpoint.get_or_insert(modifier_checkpoint); + let modifier = modifier_scope.kind.get(); if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { parser.error("expected `fn` after `unsafe` keyword"); @@ -107,113 +79,92 @@ impl super::Parse for ItemListScope { } } -enum Modifier { +define_scope! { + ItemModifierScope {kind: Cell}, + ItemModifier, + Inheritance +} +impl super::Parse for ItemModifierScope { + fn parse(&mut self, parser: &mut Parser) { + if parser.bump_if(SyntaxKind::PubKw) { + if parser.bump_if(SyntaxKind::UnsafeKw) { + self.kind.set(ModifierKind::PubAndUnsafe); + } else { + self.kind.set(ModifierKind::Pub); + } + } else if parser.bump_if(SyntaxKind::UnsafeKw) { + self.kind.set(ModifierKind::Unsafe); + } else { + self.kind.set(ModifierKind::None); + } + } +} + +#[derive(Debug, Clone, Copy)] +enum ModifierKind { None, Pub, Unsafe, PubAndUnsafe, } - -impl Modifier { +impl Default for ModifierKind { + fn default() -> Self { + Self::None + } +} +impl ModifierKind { fn is_pub(&self) -> bool { - matches!(self, Modifier::Pub | Modifier::PubAndUnsafe) + matches!(self, Self::Pub | Self::PubAndUnsafe) } fn is_unsafe(&self) -> bool { - matches!(self, Modifier::Unsafe | Modifier::PubAndUnsafe) + matches!(self, Self::Unsafe | Self::PubAndUnsafe) } } -define_scope! { - EnumScope, - Enum, - Inheritance -} +define_scope! { EnumScope, Enum, Inheritance } impl super::Parse for EnumScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -define_scope! { - TraitScope, - Trait, - Inheritance -} +define_scope! { TraitScope, Trait, Inheritance } impl super::Parse for TraitScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -// We can't use `define_scope` here since the `syntax_kind` of the scope can be -// determined after parsing. -#[derive(Debug, Clone)] -struct ImplScope { - syntax_kind: RefCell, - recovery_method: super::RecoveryMethod, -} -impl Default for ImplScope { - fn default() -> Self { - Self { - syntax_kind: SyntaxKind::Impl.into(), - recovery_method: super::RecoveryMethod::inheritance_empty(), - } - } -} -impl super::ParsingScope for ImplScope { - fn recovery_method(&self) -> &super::RecoveryMethod { - &self.recovery_method - } - - fn syntax_kind(&self) -> SyntaxKind { - *self.syntax_kind.borrow() - } -} +define_scope! { ImplScope, Impl, Inheritance } impl super::Parse for ImplScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -define_scope! { - UseScope, - Use, - Inheritance -} +define_scope! { UseScope, Use, Inheritance } impl super::Parse for UseScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -define_scope! { - ConstScope, - Const, - Inheritance -} +define_scope! { ConstScope, Const, Inheritance } impl super::Parse for ConstScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -define_scope! { - ExternScope, - Extern, - Inheritance -} +define_scope! { ExternScope, Extern, Inheritance } impl super::Parse for ExternScope { fn parse(&mut self, _parser: &mut Parser) { todo!() } } -define_scope! { - TypeAliasScope, - TypeAlias, - Inheritance -} +define_scope! { TypeAliasScope, TypeAlias, Inheritance } impl super::Parse for TypeAliasScope { fn parse(&mut self, _parser: &mut Parser) { todo!() diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 485b4d45bc..37baa33b07 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -33,11 +33,16 @@ pub struct Parser { stream: BackTrackableTokenStream, builder: rowan::GreenNodeBuilder<'static>, - scopes: Vec>, + /// The second element holds `is_newline_trivia` of the parent. + parents: Vec<(Box, bool)>, errors: Vec, - current_pos: rowan::TextSize, next_trivias: VecDeque, + /// if `is_newline_trivia` is `true`, `Newline` is also regarded as a trivia + /// token. + is_newline_trivia: bool, + + current_pos: rowan::TextSize, /// The dry run states which holds the each state of the parser when it /// enters dry run mode. dry_run_states: Vec, @@ -49,31 +54,39 @@ impl Parser { Self { stream: BackTrackableTokenStream::new(stream), builder: rowan::GreenNodeBuilder::new(), - scopes: Vec::new(), + parents: Vec::new(), errors: Vec::new(), current_pos: rowan::TextSize::from(0), + is_newline_trivia: true, next_trivias: VecDeque::new(), dry_run_states: Vec::new(), } } /// Returns the current token of the parser. - pub fn current_token(&mut self) -> Option<&S::Token> { - if !self.next_trivias.is_empty() { - Some(&self.next_trivias[0]) - } else { - self.stream.peek() - } + pub fn current_token(&mut self) -> Option { + self.peek_non_trivia() + // if !self.next_trivias.is_empty() { + // Some(&self.next_trivias[0]) + // } else { + // self.stream.peek() + // } } - /// Returns the current token kind of the parser. + /// Returns the current non-trivia token kind of the parser. pub fn current_kind(&mut self) -> Option { - self.current_token().map(|token| token.syntax_kind()) + self.current_token().map(|tok| tok.syntax_kind()) + } + + /// Sets the newline kind as trivia if `is_trivia` is `true`. Otherwise, the + /// newline kind is not regarded as a trivia. + pub fn set_newline_as_trivia(&mut self, is_trivia: bool) { + self.is_newline_trivia = is_trivia; } /// Finish the parsing and return the syntax tree. pub fn finish(self) -> (SyntaxNode, Vec) { - debug_assert!(self.scopes.is_empty()); + debug_assert!(self.parents.is_empty()); debug_assert!(!self.is_dry_run()); (SyntaxNode::new_root(self.builder.finish()), self.errors) @@ -81,21 +94,28 @@ impl Parser { /// Invoke the scope to parse. The scope is wrapped up by the node specified /// by the scope. - /// Returns `true` if parse succeeded, otherwise `false`. /// - /// * If the checkpoint is `Some`, the marked branch is wrapped up by the + /// # Arguments + /// * If the `checkpoint` is `Some`, the marked branch is wrapped up by the /// node. - /// * If the checkpoint is `None`, the current branch is wrapped up by the + /// * If the `checkpoint` is `None`, the current branch is wrapped up by the /// node. - pub fn parse(&mut self, mut scope: T, checkpoint: Option) -> bool + /// + /// # Returns + /// * If the parsing succeeds, the first element of the return value is + /// `true`. otherwise, the first element is `false`. + /// * The second element of the return value is the checkpoint of the start + /// of the node. + pub fn parse(&mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) where T: Parse + 'static, { let checkpoint = self.enter(scope.clone(), checkpoint); let error_len = self.errors.len(); + let start_checkpoint = self.builder.checkpoint(); scope.parse(self); self.leave(checkpoint); - error_len == self.errors.len() + (error_len == self.errors.len(), start_checkpoint) } #[doc(hidden)] @@ -106,15 +126,29 @@ impl Parser { where T: ParsingScope + 'static, { - self.scopes.push(Box::new(scope)); - checkpoint.unwrap_or_else(|| self.checkpoint()) + // Ensure the leading trivias are added to the parent node. + if !self.parents.is_empty() { + self.bump_trivias(); + } + self.parents.push((Box::new(scope), self.is_newline_trivia)); + // `is_newline_trivia` is always `true` when entering a scope. + self.is_newline_trivia = true; + checkpoint.unwrap_or_else(|| self.builder.checkpoint()) } #[doc(hidden)] /// Leave the scope and wrap up the checkpoint by the scope's node. // NOTE: This method is limited to testing and internal usage. pub fn leave(&mut self, checkpoint: Checkpoint) { - let scope = self.scopes.pop().unwrap(); + let (scope, is_newline_trivia) = self.parents.pop().unwrap(); + self.is_newline_trivia = is_newline_trivia; + + // Ensure the trailing trivias are added to the current node if the current + // scope is the root. + if self.parents.is_empty() { + self.bump_trivias() + } + if !self.is_dry_run() { self.builder .start_node_at(checkpoint, scope.syntax_kind().into()); @@ -122,13 +156,6 @@ impl Parser { } } - /// Marks the current branch as a checkpoint. - /// The checked branch is wrapped up later when [`parse]` is - /// called with the `checkpoint`. - pub fn checkpoint(&mut self) -> Checkpoint { - self.builder.checkpoint() - } - /// Add `msg` as an error to the error list, then bumps consecutive tokens /// until a token in the recovery set is found. /// @@ -167,16 +194,6 @@ impl Parser { self.leave(checkpoint); } - /// Add the `msg` to the error list and bumps n token in the error branch. - pub fn error_and_bump(&mut self, msg: &str, bump_n: usize) { - let error = self.error(msg); - let checkpoint = self.enter(error, None); - for _ in 0..bump_n { - self.bump(); - } - self.leave(checkpoint); - } - /// Starts the dry run mode. /// When the parser is in the dry run mode, the parser does not build the /// syntax tree. @@ -201,42 +218,12 @@ impl Parser { self.current_pos = state.pos; } - /// Bumps the current token and - /// current branch. + /// Bumps the current token and its leading trivias. pub fn bump(&mut self) { - let tok = match self.next_trivias.pop_front() { - Some(tok) => tok, - None => self.stream.next().unwrap(), - }; + // Bump leading trivias. + self.bump_trivias(); - self.current_pos += rowan::TextSize::of(tok.text()); - if !self.is_dry_run() { - self.builder.token(tok.syntax_kind().into(), tok.text()); - } - } - - /// Peek the next non-trivia token. - /// If `skip_newlines` is `true`, newlines are also treated as trivia. - pub fn peek_non_trivia(&mut self, skip_newlines: bool) -> Option { - if !skip_newlines { - for tok in &self.next_trivias { - if tok.syntax_kind() == SyntaxKind::Newline { - return Some(SyntaxKind::Newline); - } - } - } - - while let Some(next) = self.stream.peek() { - let kind = next.syntax_kind(); - if kind.is_trivia() || (skip_newlines && kind == SyntaxKind::Newline) { - self.next_trivias.push_back(self.stream.next().unwrap()); - continue; - } else { - return Some(kind); - } - } - - None + self.bump_raw(); } /// Bumps the current token if the current token is the `expected` kind. @@ -259,39 +246,15 @@ impl Parser { } } - /// Bumps consecutive trivia tokens. - /// If `bump_newlines` is true, newlines are also bumped. - pub fn bump_trivias(&mut self, bump_newlines: bool) { - while let Some(tok) = self.current_token() { - let kind = tok.syntax_kind(); - if kind.is_trivia() || (bump_newlines && kind == SyntaxKind::Newline) { - self.bump(); - } else { - break; - } - } - } - - /// Bump consecutive newlines. - pub fn bump_newlines(&mut self) { - while let Some(tok) = self.current_token() { - if tok.syntax_kind() == SyntaxKind::Newline { - self.bump(); - } else { - break; - } - } - } - /// Proceeds the parser to the recovery token of the current scope. pub fn recover(&mut self) { let mut recovery_set: FxHashSet = fxhash::FxHashSet::default(); - let mut scope_index = self.scopes.len() - 1; + let mut scope_index = self.parents.len() - 1; loop { match self - .scopes + .parents .get(scope_index) - .map(|scope| scope.recovery_method()) + .map(|scope| scope.0.recovery_method()) { Some(RecoveryMethod::Inheritance(set)) => { recovery_set.extend(set.iter()); @@ -306,9 +269,8 @@ impl Parser { } } - while let Some(tok) = self.stream.peek() { - let syntax_kind = tok.syntax_kind(); - if recovery_set.contains(&syntax_kind) { + while let Some(kind) = self.current_kind() { + if recovery_set.contains(&kind) { break; } else { self.bump(); @@ -316,6 +278,61 @@ impl Parser { } } + /// Bumps the current token and + /// current branch. + fn bump_raw(&mut self) { + let tok = match self.next_trivias.pop_front() { + Some(tok) => tok, + None => self.stream.next().unwrap(), + }; + + self.current_pos += rowan::TextSize::of(tok.text()); + if !self.is_dry_run() { + self.builder.token(tok.syntax_kind().into(), tok.text()); + } + } + + fn bump_trivias(&mut self) { + // Bump trivias. + loop { + match self.peek_raw() { + Some(tok) if self.is_trivia(tok.syntax_kind()) => self.bump_raw(), + _ => break, + } + } + } + + /// Peek the next non-trivia token. + fn peek_non_trivia(&mut self) -> Option { + if !self.is_newline_trivia { + for tok in &self.next_trivias { + if tok.syntax_kind() == SyntaxKind::Newline { + return Some(tok.clone()); + } + } + } + + while let Some(next) = self.stream.peek().map(|tok| tok.syntax_kind()) { + if self.is_trivia(next) { + let next = self.stream.next().unwrap(); + self.next_trivias.push_back(next); + continue; + } else { + return self.stream.peek().cloned(); + } + } + + None + } + + fn peek_raw(&mut self) -> Option { + if let Some(tok) = self.next_trivias.front() { + Some(tok.clone()) + } else { + self.stream.peek().cloned() + } + } + /// Add the `msg` to the error list. fn error(&mut self, msg: &str) -> ErrorScope { let start = self.current_pos; @@ -337,6 +354,10 @@ impl Parser { fn is_dry_run(&self) -> bool { !self.dry_run_states.is_empty() } + + fn is_trivia(&self, kind: SyntaxKind) -> bool { + kind.is_trivia() || (self.is_newline_trivia && kind == SyntaxKind::Newline) + } } /// The current scope of parsing. @@ -351,10 +372,11 @@ pub trait Parse: ParsingScope + Clone { fn parse(&mut self, parser: &mut Parser); } -define_scope! { - ErrorScope, - Error, - Inheritance +struct DryRunState { + /// The text position is the position when the dry run started. + pos: rowan::TextSize, + /// The number of errors when the dry run started. + err_num: usize, } /// Represents the recovery method of the current scope. @@ -369,10 +391,6 @@ pub enum RecoveryMethod { } impl RecoveryMethod { - fn inheritance_empty() -> Self { - Self::Inheritance(fxhash::FxHashSet::default()) - } - fn inheritance(tokens: &[SyntaxKind]) -> Self { Self::Inheritance(tokens.iter().copied().collect()) } @@ -395,6 +413,12 @@ where } } +define_scope! { + ErrorScope, + Error, + Inheritance +} + define_scope! { RootScope, Root, @@ -402,15 +426,15 @@ define_scope! { } macro_rules! define_scope { - ($scope_name: ident, $kind: path, Inheritance) => { - #[derive(Default, Debug, Clone, Copy)] - pub struct $scope_name {} - + ($scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Inheritance $(($($recoveries: path), *))?) => { + crate::parser::define_scope_struct! {$scope_name {$($($field: $ty), *)?}, $kind} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { lazy_static::lazy_static! { pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { - crate::parser::RecoveryMethod::inheritance_empty() + #[allow(unused)] + use crate::SyntaxKind::*; + crate::parser::RecoveryMethod::inheritance(&[$($($recoveries), *)?]) }; } @@ -418,15 +442,13 @@ macro_rules! define_scope { } fn syntax_kind(&self) -> crate::SyntaxKind { - use crate::SyntaxKind::*; - $kind + self.__inner.get() } } }; - ($scope_name: ident, $kind: path, Inheritance($($recoveries: path), *)) => { - #[derive(Default, Debug, Clone, Copy)] - pub struct $scope_name {} + ($scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Override($($recoveries: path), *)) => { + crate::parser::define_scope_struct! {$scope_name {$($($field: $ty), *)?}, $kind} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { @@ -434,11 +456,7 @@ macro_rules! define_scope { pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { #[allow(unused)] use crate::SyntaxKind::*; - let set: fxhash::FxHashSet = vec![ - $($recoveries), * - ].into_iter().map(|kind: SyntaxKind| kind.into()).collect(); - - crate::parser::RecoveryMethod::Inheritance(set) + crate::parser::RecoveryMethod::override_(&[$($recoveries), *]) }; } @@ -446,46 +464,37 @@ macro_rules! define_scope { } fn syntax_kind(&self) -> crate::SyntaxKind { - use crate::SyntaxKind::*; - $kind + self.__inner.get() } } }; +} - ($scope_name: ident, $kind: path, Override($($recoveries: path), *)) => { - #[derive(Default, Debug, Clone, Copy)] - pub struct $scope_name {} - - impl crate::parser::ParsingScope for $scope_name { - fn recovery_method(&self) -> &crate::parser::RecoveryMethod { - lazy_static::lazy_static! { - pub(super) static ref RECOVERY_METHOD: crate::parser::RecoveryMethod = { - #[allow(unused)] - use crate::SyntaxKind::*; - let set: fxhash::FxHashSet = vec![ - $($recoveries), * - ].into_iter().map(|kind: $crate::SyntaxKind| kind.into()).collect(); - - crate::parser::RecoveryMethod::Override(set) - }; - } - - &RECOVERY_METHOD +macro_rules! define_scope_struct { + ($scope_name: ident { $($field: ident: $ty: ty),* } , $kind: path) => { + #[derive(Debug, Clone)] + pub struct $scope_name { + __inner: std::cell::Cell, + $($field: $ty),* + } + impl $scope_name { + #[allow(unused)] + fn set_kind(&mut self, kind: crate::SyntaxKind) { + self.__inner.set(kind); } - - fn syntax_kind(&self) -> crate::SyntaxKind { + } + impl Default for $scope_name { + fn default() -> Self { use crate::SyntaxKind::*; - $kind + Self { + __inner: std::cell::Cell::new($kind), + $($field: Default::default()),* + } } } }; } -struct DryRunState { - /// The text position is the position when the dry run started. - pos: rowan::TextSize, - /// The number of errors when the dry run started. - err_num: usize, -} - use define_scope; +#[doc(hidden)] +use define_scope_struct; diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index f11744b0a4..6f9bf4d86b 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -1,6 +1,6 @@ use crate::SyntaxKind; -use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; +use super::{define_scope, expr::parse_expr, path::PathScope, token_stream::TokenStream, Parser}; define_scope! { GenericParamListScope, @@ -10,21 +10,17 @@ define_scope! { impl super::Parse for GenericParamListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Lt); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::Gt) { return; } parser.parse(GenericParamScope::default(), None); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parser.parse(GenericParamScope::default(), None); - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected closing `>`", None); + parser.error_and_bump_until("expected closing `>`", None, SyntaxKind::Gt); parser.bump_if(SyntaxKind::Gt); } } @@ -41,10 +37,8 @@ impl super::Parse for GenericParamScope { parser.error_and_recover("expected type parameter", None); } - if parser.peek_non_trivia(true) == Some(SyntaxKind::Colon) { - parser.bump_trivias(true); + if parser.current_kind() == Some(SyntaxKind::Colon) { parser.bump_expected(SyntaxKind::Colon); - parser.bump_trivias(true); parser.parse(TraitBoundListScope::default(), None); } } @@ -58,10 +52,8 @@ define_scope! { impl super::Parse for TraitBoundListScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(TraitBoundScope::default(), None); - while parser.peek_non_trivia(true) == Some(SyntaxKind::Plus) { - parser.bump_trivias(true); + while parser.current_kind() == Some(SyntaxKind::Plus) { parser.bump_expected(SyntaxKind::Plus); - parser.bump_trivias(true); parser.parse(TraitBoundScope::default(), None); } } @@ -75,5 +67,39 @@ define_scope! { impl super::Parse for TraitBoundScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); + // TODO: Allow trait bound with associated type bound. + // `Trait`. + } +} + +define_scope! { + GenericArgListScope, + GenericParamList, + Override(Gt, Comma) +} +impl super::Parse for GenericArgListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Lt); + + if parser.bump_if(SyntaxKind::Gt) { + return; + } + + parser.parse(GenericArgScope::default(), None); + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(GenericArgScope::default(), None); + } + + if !parser.bump_if(SyntaxKind::Gt) { + parser.error_and_bump_until("expected closing `>`", None, SyntaxKind::Gt); + parser.bump_if(SyntaxKind::Gt); + } + } +} + +define_scope! { GenericArgScope, GenericParam, Inheritance} +impl super::Parse for GenericArgScope { + fn parse(&mut self, parser: &mut Parser) { + parse_expr(parser); } } diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index a7ff453d91..6dab9c8acb 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -6,8 +6,7 @@ use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser, Re pub fn parse_pat(parser: &mut Parser) -> bool { use SyntaxKind::*; - let checkpoint = parser.checkpoint(); - let mut success = match parser.current_kind() { + let (success, checkpoint) = match parser.current_kind() { Some(Underscore) => parser.parse(WildCardPatScope::default(), None), Some(Dot2) => parser.parse(RestPatScope::default(), None), Some(LParen) => parser.parse(TuplePatScope::default(), None), @@ -15,17 +14,17 @@ pub fn parse_pat(parser: &mut Parser) -> bool { _ => parser.parse(PathPatScope::default(), None), }; - if parser.peek_non_trivia(true) == Some(SyntaxKind::Pipe) { - parser.bump_trivias(true); - success = parser.parse(OrPatScope::default(), Some(checkpoint)) && success; + if parser.current_kind() == Some(SyntaxKind::Pipe) { + parser.parse(OrPatScope::default(), Some(checkpoint)).0 && success + } else { + success } - - success } define_scope! { WildCardPatScope, WildCardPat, Inheritance(SyntaxKind::Pipe) } impl super::Parse for WildCardPatScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Underscore); } } @@ -33,14 +32,15 @@ impl super::Parse for WildCardPatScope { define_scope! { RestPatScope, RestPat, Inheritance } impl super::Parse for RestPatScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Dot2); - parser.bump_trivias(true); } } define_scope! { LitPatScope, LitPat, Inheritance(SyntaxKind::Pipe) } impl super::Parse for LitPatScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); match parser.current_kind() { Some(SyntaxKind::Int | SyntaxKind::String) => parser.bump(), _ => unreachable!(), @@ -52,17 +52,13 @@ define_scope! { TuplePatScope, TuplePat, Override(RParen) } impl super::Parse for TuplePatScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::RParen) { return; } parse_pat(parser); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parse_pat(parser); - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::RParen) { @@ -99,12 +95,12 @@ impl super::ParsingScope for PathPatScope { } impl super::Parse for PathPatScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.parse(PathScope::default(), None) { + if !parser.parse(PathScope::default(), None).0 { return; } - if parser.peek_non_trivia(false) == Some(SyntaxKind::LParen) { - parser.bump_trivias(false); + parser.set_newline_as_trivia(false); + if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(TuplePatScope::default(), None); *self.syntax_kind.borrow_mut() = SyntaxKind::PathTuplePat; } @@ -115,7 +111,6 @@ define_scope! { OrPatScope, OrPat, Inheritance(SyntaxKind::Pipe) } impl super::Parse for OrPatScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Pipe); - parser.bump_trivias(true); parse_pat(parser); } } diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 5206759a48..7664522ad9 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -9,9 +9,9 @@ define_scope! { } impl super::Parse for PathScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.parse(PathSegmentScope::default(), None); - while parser.peek_non_trivia(false) == Some(SyntaxKind::Colon2) { - parser.bump_trivias(false); + while parser.current_kind() == Some(SyntaxKind::Colon2) { parser.bump_expected(SyntaxKind::Colon2); parser.parse(PathSegmentScope::default(), None); } @@ -25,6 +25,7 @@ define_scope! { } impl super::Parse for PathSegmentScope { fn parse(&mut self, parser: &mut Parser) { + dbg! {"{:?}", parser.current_kind()}; debug_assert!(is_path_header(parser.current_kind().unwrap())); parser.bump() } diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 292ac42226..782ba78d7b 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -1,11 +1,7 @@ use crate::SyntaxKind; use super::{ - define_scope, - expr::{parse_expr, BlockExprScope}, - pat::parse_pat, - token_stream::TokenStream, - type_::parse_type, + define_scope, expr::parse_expr, pat::parse_pat, token_stream::TokenStream, type_::parse_type, Checkpoint, Parser, }; @@ -25,41 +21,37 @@ pub(super) fn parse_stmt( Some(ReturnKw) => parser.parse(ReturnStmtScope::default(), checkpoint), _ => { parser.start_dry_run(); - if parser.parse(AssignStmtScope::default(), checkpoint) { + if parser.parse(AssignStmtScope::default(), checkpoint).0 { parser.end_dry_run(); - assert!(parser.parse(AssignStmtScope::default(), checkpoint)); - true + parser.parse(AssignStmtScope::default(), checkpoint) } else { parser.end_dry_run(); parser.parse(ExprStmtScope::default(), checkpoint) } } } + .0 } define_scope! { LetStmtScope, LetStmt, Inheritance } impl super::Parse for LetStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LetKw); - parser.bump_trivias(false); + parser.set_newline_as_trivia(false); if !parse_pat(parser) { parser.error_and_recover("expected pattern", None); return; } - if parser.peek_non_trivia(false) == Some(SyntaxKind::Colon) { - parser.bump_trivias(false); + if parser.current_kind() == Some(SyntaxKind::Colon) { parser.bump_expected(SyntaxKind::Colon); - parser.bump_trivias(false); if !parse_type(parser, None) { return; } } - if parser.peek_non_trivia(false) == Some(SyntaxKind::Eq) { - parser.bump_trivias(false); + if parser.current_kind() == Some(SyntaxKind::Eq) { parser.bump_expected(SyntaxKind::Eq); - parser.bump_trivias(false); - parse_expr(parser, None); + parse_expr(parser); } } } @@ -68,26 +60,23 @@ define_scope! { ForStmtScope, ForStmt, Inheritance } impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); - parser.bump_trivias(true); if !parse_pat(parser) { return; } - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::InKw) { parser.error_and_recover("expected `in` keyword", None); return; } - parser.bump_trivias(true); - if !parse_expr(parser, None) { + if !parse_expr(parser) { return; } - if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); return; } - parser.parse(BlockExprScope::default(), None); + parse_expr(parser); } } @@ -95,16 +84,15 @@ define_scope! { WhileStmtScope, WhileStmt, Inheritance } impl super::Parse for WhileStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhileKw); - parser.bump_trivias(true); - if !parse_expr(parser, None) { + if !parse_expr(parser) { return; } - if parser.peek_non_trivia(true) != Some(SyntaxKind::LBrace) { + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); return; } - parser.parse(BlockExprScope::default(), None); + parse_expr(parser); } } @@ -126,8 +114,8 @@ define_scope! { AssertStmtScope, AssertStmt, Inheritance } impl super::Parse for AssertStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::AssertKw); - parser.bump_trivias(false); - parse_expr(parser, None); + parser.set_newline_as_trivia(false); + parse_expr(parser); } } @@ -135,8 +123,8 @@ define_scope! { ReturnStmtScope, ReturnStmt, Inheritance } impl super::Parse for ReturnStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ReturnStmt); - parser.bump_trivias(false); - parse_expr(parser, None); + parser.set_newline_as_trivia(false); + parse_expr(parser); } } @@ -146,19 +134,18 @@ impl super::Parse for AssignStmtScope { if !parse_pat(parser) { return; } - - parser.bump_trivias(true); + parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` keyword", None); return; } - parse_expr(parser, None); + parse_expr(parser); } } define_scope! { ExprStmtScope, ExprStmt, Inheritance } impl super::Parse for ExprStmtScope { fn parse(&mut self, parser: &mut Parser) { - parse_expr(parser, None); + parse_expr(parser); } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index a1fb97d593..6f5867f5cf 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -14,17 +14,14 @@ impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::StructKw); - parser.bump_trivias(true); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected ident for the struct name", None) } - parser.bump_trivias(true); if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } - parser.bump_trivias(true); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(StructFieldDefListScope::default(), None); } else { @@ -44,10 +41,20 @@ define_scope! { impl super::Parse for StructFieldDefListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - parser.bump_trivias(true); - while !matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + + loop { + parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() + { + break; + } parser.parse(StructFieldDefScope::default(), None); - parser.bump_trivias(true); + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Newline) + && parser.current_kind() != Some(SyntaxKind::RBrace) + { + parser.error_and_recover("expected newline after field definition", None); + } } if !parser.bump_if(SyntaxKind::RBrace) { @@ -67,24 +74,14 @@ define_scope! { impl super::Parse for StructFieldDefScope { fn parse(&mut self, parser: &mut Parser) { parse_attr_list(parser); - parser.bump_trivias(true); parser.bump_if(SyntaxKind::PubKw); - parser.bump_trivias(false); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected ident for the field name", None); } - parser.bump_trivias(false); if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected `name: type` for the field definition", None); } - parser.bump_trivias(false); parse_type(parser, None); - if !matches!( - parser.peek_non_trivia(false), - Some(SyntaxKind::Newline) | Some(SyntaxKind::RBrace) - ) { - parser.error_and_recover("expected newline after the field definition", None); - } } } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 1289cfc185..351d5ef85d 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -15,13 +15,14 @@ pub(super) fn parse_type( Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), _ => parser.parse(PathTypeScope::default(), checkpoint), } + .0 } define_scope!(PtrTypeScope, PtrType, Inheritance); impl super::Parse for PtrTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Star); - parser.bump_trivias(false); + parser.set_newline_as_trivia(false); parse_type(parser, None); } } @@ -29,11 +30,12 @@ impl super::Parse for PtrTypeScope { define_scope!(PathTypeScope, PathType, Inheritance); impl super::Parse for PathTypeScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.parse(PathScope::default(), None) { + if !parser.parse(PathScope::default(), None).0 { return; } - if parser.peek_non_trivia(false) == Some(SyntaxKind::Lt) { + parser.set_newline_as_trivia(false); + if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericArgListScope::default(), None); } } @@ -56,17 +58,13 @@ define_scope! { impl super::Parse for TupleTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); - parser.bump_trivias(true); if parser.bump_if(SyntaxKind::RParen) { return; } parse_type(parser, None); - parser.bump_trivias(true); while parser.bump_if(SyntaxKind::Comma) { - parser.bump_trivias(true); parse_type(parser, None); - parser.bump_trivias(true); } if !parser.bump_if(SyntaxKind::RParen) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 6c4be99aa4..0edbe9a585 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -89,12 +89,20 @@ pub enum SyntaxKind { /// `*` #[token("*")] Star, + #[token("**")] + Star2, /// `/` #[token("/")] Slash, /// `%` #[token("%")] Percent, + #[token("~")] + Tilde, + #[token("!")] + Not, + #[token("^")] + Hat, /// `&` #[token("&")] Amp, @@ -139,7 +147,7 @@ pub enum SyntaxKind { Eq2, /// `!=` #[token("!=")] - NonEq, + NotEq, /// `true' #[token("true")] @@ -238,15 +246,21 @@ pub enum SyntaxKind { CallArgList, /// `arg: 1`, `y` CallArg, - /// `` + /// `foo.bar(x, y)` + MethodCallExpr, + /// `` GenericArgList, /// `T` GenericArg, /// `FOO::Bar` PathExpr, - /// `foo.bar(x, y)` - MethodCallExpr, - /// `foo.bar` + /// `Foo { x: 1, y: "String"` }` + RecordInitExpr, + /// `{ x: 1, y: "String"` }` + RecordFieldList, + /// `x: 1` + RecordField, + /// `foo.bar` or `foo.0` FieldExpr, /// `foo[1]` IndexExpr, @@ -332,6 +346,9 @@ pub enum SyntaxKind { Extern, ItemList, + /// `pub unsafe ` + ItemModifier, + // Types. These are non-leaf nodes. /// `*i32` PtrType, diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap index a9b9516d7d..9c94d2dc53 100644 --- a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -114,9 +114,9 @@ Root@0..203 WhiteSpace@122..123 " " Newline@123..124 "\n" WhiteSpace@124..129 " " - RestPat@129..132 + RestPat@129..131 Dot2@129..131 ".." - Newline@131..132 "\n" + Newline@131..132 "\n" RParen@132..133 ")" Newline@133..135 "\n\n" PathPat@135..150 diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap index 00e4751b07..91b53df1eb 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.snap +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -6,19 +6,20 @@ Root@0..170 ItemList@0..170 Struct@0..170 AttrList@0..56 - DocCommentAttr@0..16 + DocCommentAttr@0..15 DocComment@0..15 "/// DocComment1" - Newline@15..16 "\n" + Newline@15..16 "\n" Attr@16..21 Pound@16..17 "#" Ident@17..21 "attr" Newline@21..22 "\n" Comment@22..39 "// normal comment" Newline@39..40 "\n" - DocCommentAttr@40..56 + DocCommentAttr@40..55 DocComment@40..55 "/// DocComment2" - Newline@55..56 "\n" - PubKw@56..59 "pub" + Newline@55..56 "\n" + ItemModifier@56..59 + PubKw@56..59 "pub" WhiteSpace@59..60 " " StructKw@60..66 "struct" WhiteSpace@66..67 " " @@ -29,11 +30,11 @@ Root@0..170 Newline@79..80 "\n" WhiteSpace@80..84 " " StructFieldDef@84..115 - AttrList@84..104 - DocCommentAttr@84..100 + AttrList@84..100 + DocCommentAttr@84..99 DocComment@84..99 "/// This is `x`" - Newline@99..100 "\n" - WhiteSpace@100..104 " " + Newline@99..100 "\n" + WhiteSpace@100..104 " " Ident@104..105 "x" Colon@105..106 ":" WhiteSpace@106..107 " " @@ -47,10 +48,10 @@ Root@0..170 Newline@115..116 "\n" WhiteSpace@116..120 " " StructFieldDef@120..168 - AttrList@120..162 - DocCommentAttr@120..136 + AttrList@120..158 + DocCommentAttr@120..135 DocComment@120..135 "/// This is `y`" - Newline@135..136 "\n" + Newline@135..136 "\n" WhiteSpace@136..140 " " Attr@140..157 Pound@140..141 "#" @@ -64,7 +65,7 @@ Root@0..170 Ident@153..156 "evm" RParen@156..157 ")" Newline@157..158 "\n" - WhiteSpace@158..162 " " + WhiteSpace@158..162 " " Ident@162..163 "y" Colon@163..164 ":" WhiteSpace@164..165 " " diff --git a/crates/parser2/test_files/syntax_node/structs/empty.snap b/crates/parser2/test_files/syntax_node/structs/empty.snap index 8771aafd02..13c29b9854 100644 --- a/crates/parser2/test_files/syntax_node/structs/empty.snap +++ b/crates/parser2/test_files/syntax_node/structs/empty.snap @@ -5,7 +5,8 @@ expression: snapshot Root@0..26 ItemList@0..26 Struct@0..26 - PubKw@0..3 "pub" + ItemModifier@0..3 + PubKw@0..3 "pub" WhiteSpace@3..4 " " StructKw@4..10 "struct" WhiteSpace@10..11 " " diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 8e01ad7c04..5b120748b7 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -5,7 +5,8 @@ expression: snapshot Root@0..312 ItemList@0..312 Struct@0..73 - PubKw@0..3 "pub" + ItemModifier@0..3 + PubKw@0..3 "pub" WhiteSpace@3..4 " " StructKw@4..10 "struct" WhiteSpace@10..11 " " @@ -62,7 +63,8 @@ Root@0..312 WhiteSpace@74..75 " " Newline@75..76 "\n" Struct@76..185 - PubKw@76..79 "pub" + ItemModifier@76..79 + PubKw@76..79 "pub" WhiteSpace@79..80 " " StructKw@80..86 "struct" WhiteSpace@86..87 " " @@ -145,7 +147,8 @@ Root@0..312 RBrace@184..185 "}" Newline@185..187 "\n\n" Struct@187..312 - PubKw@187..190 "pub" + ItemModifier@187..190 + PubKw@187..190 "pub" WhiteSpace@190..191 " " StructKw@191..197 "struct" WhiteSpace@197..198 " " diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap index 3ad6e68851..5a289d2fb1 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -5,6 +5,7 @@ expression: snapshot Root@0..117 ItemList@0..117 Struct@0..117 + ItemModifier@0..0 StructKw@0..6 "struct" WhiteSpace@6..7 " " Ident@7..27 "StructWithTupleField" diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 18e87a17f0..c55b13016a 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -7,7 +7,6 @@ use fe_parser2::{ fn test_item_list(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { while parser.current_kind().is_some() { - parser.bump_trivias(true); parser.parse(ItemListScope::default(), None); } }); @@ -22,7 +21,6 @@ fe_compiler_test_utils::build_debug_snap_tests! { fn test_pat(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { while parser.current_kind().is_some() { - parser.bump_trivias(true); parse_pat(parser); } }); From a919729f83705b5057395f46be044f63abd07bf4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 23 Jan 2023 17:52:47 +0100 Subject: [PATCH 026/678] Implement parser for `expr` --- crates/parser2/src/parser/expr.rs | 265 +++++++++++++++++++++- crates/parser2/src/parser/expr_atom.rs | 91 +++++--- crates/parser2/src/parser/mod.rs | 41 +++- crates/parser2/src/parser/param.rs | 65 +++++- crates/parser2/src/parser/path.rs | 11 +- crates/parser2/src/parser/stmt.rs | 55 +++-- crates/parser2/src/parser/token_stream.rs | 41 ++-- crates/parser2/src/parser/type_.rs | 33 ++- crates/parser2/src/syntax_kind.rs | 12 + 9 files changed, 521 insertions(+), 93 deletions(-) diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index fb2a667c49..69b788d09f 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -1,12 +1,263 @@ -use std::cell::RefCell; - -use crate::{parser::path, SyntaxKind}; +use crate::SyntaxKind; use super::{ - attr::parse_attr_list, define_scope, parse_pat, stmt::parse_stmt, token_stream::TokenStream, - Parser, + define_scope, expr_atom, + param::{CallArgListScope, GenericArgListScope}, + token_stream::{SyntaxToken, TokenStream}, + Checkpoint, Parser, }; -pub(super) fn parse_expr(_parser: &mut Parser) -> bool { - todo!() +pub fn parse_expr(parser: &mut Parser) -> bool { + parse_expr_with_min_bp(parser, 0, true) +} + +pub fn parse_expr_no_struct(parser: &mut Parser) -> bool { + parse_expr_with_min_bp(parser, 0, false) +} + +// Expressions are parsed in Pratt's top-down operator precedence style. +// +/// Parse an expression, stopping if/when we reach an operator that binds less +/// tightly than given binding power. +/// +/// Returns `true` if parsing succeeded, `false` otherwise. +fn parse_expr_with_min_bp( + parser: &mut Parser, + min_bp: u8, + allow_struct_init: bool, +) -> bool { + let (ok, checkpoint) = parse_expr_atom(parser, allow_struct_init); + if !ok { + return false; + } + + loop { + let Some(kind) = parser.current_kind() else { + break + }; + + // Parse postfix operators. + match postfix_binding_power(kind) { + Some(lbp) if lbp < min_bp => break, + Some(_) => { + match kind { + SyntaxKind::LBracket => { + if parser.parse(IndexExprScope::default(), Some(checkpoint)).0 { + continue; + } else { + return false; + } + } + + SyntaxKind::LParen => { + if parser.parse(CallExprScope::default(), Some(checkpoint)).0 { + continue; + } else { + return false; + } + } + + // `expr()`. + SyntaxKind::Lt => { + parser.start_dry_run(); + if parser.parse(CallExprScope::default(), Some(checkpoint)).0 { + parser.end_dry_run(); + parser.parse(CallExprScope::default(), Some(checkpoint)); + continue; + } else { + parser.end_dry_run(); + } + } + + // `expr.method()` + SyntaxKind::Dot => { + parser.start_dry_run(); + if parser.parse(MethodExprScope::default(), Some(checkpoint)).0 { + parser.end_dry_run(); + parser.parse(MethodExprScope::default(), Some(checkpoint)); + continue; + } else { + parser.end_dry_run(); + } + } + _ => unreachable!(), + } + } + None => {} + } + if let Some((lbp, _)) = infix_binding_power(kind) { + if lbp < min_bp { + break; + } + + if !match kind { + // Method call is already handled as the postfix operator. + SyntaxKind::Dot => parser.parse(FieldExprScope::default(), Some(checkpoint)).0, + _ => parser.parse(BinExprScope::default(), Some(checkpoint)).0, + } { + return false; + } + + continue; + } + break; + } + + true +} + +fn parse_expr_atom( + parser: &mut Parser, + allow_struct_init: bool, +) -> (bool, Checkpoint) { + match parser.current_kind() { + Some(kind) if prefix_binding_power(kind).is_some() => { + parser.parse(UnExprScope::default(), None) + } + Some(_) => expr_atom::parse_expr_atom(parser, allow_struct_init), + None => { + parser.error_and_recover("expected expression", None); + (false, parser.checkpoint()) + } + } +} + +/// Specifies how tightly a prefix unary operator binds to its operand. +fn prefix_binding_power(kind: SyntaxKind) -> Option { + use SyntaxKind::*; + match kind { + Not | Plus | Minus | Tilde => Some(145), + _ => None, + } +} + +/// Specifies how tightly a postfix operator binds to its operand. +fn postfix_binding_power(kind: SyntaxKind) -> Option { + use SyntaxKind::*; + match kind { + LBracket | LParen | Lt => Some(147), + Dot => Some(151), + _ => None, + } +} + +/// Specifies how tightly does an infix operator bind to its left and right +/// operands. +fn infix_binding_power(kind: SyntaxKind) -> Option<(u8, u8)> { + use SyntaxKind::*; + + let bp = match kind { + Pipe2 => (50, 51), + Amp2 => (60, 61), + + // all comparisons are the same + Lt | LtEq | Gt | GtEq | NotEq | Eq2 => (70, 71), + + Pipe => (80, 81), + Hat => (90, 91), + Amp => (100, 101), + Lt2 | Gt2 => (110, 111), + Plus | Minus => (120, 121), + Star | Slash | Percent => (130, 131), + Star2 => (141, 140), + Dot => (151, 150), + _ => return None, + }; + Some(bp) +} + +define_scope! { UnExprScope, UnExpr, Inheritance } +impl super::Parse for UnExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + let kind = parser.current_kind().unwrap(); + let bp = prefix_binding_power(kind).unwrap(); + parser.bump(); + parse_expr_with_min_bp(parser, bp, true); + } +} + +define_scope! { BinExprScope, BinExpr, Inheritance } +impl super::Parse for BinExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + let kind = parser.current_kind().unwrap(); + let (_, rbp) = infix_binding_power(kind).unwrap(); + parser.bump(); + parse_expr_with_min_bp(parser, rbp, true); + } +} + +define_scope! { IndexExprScope, IndexExpr, Override(RBracket) } +impl super::Parse for IndexExprScope { + fn parse(&mut self, parser: &mut Parser) { + parse_expr(parser); + if !parser.bump_if(SyntaxKind::RBracket) { + parser.error_and_recover("expected `]`", None); + } + } +} + +define_scope! { CallExprScope, CallExpr, Inheritance } +impl super::Parse for CallExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } + + if parser.current_kind() != Some(SyntaxKind::LParen) { + parser.error_and_recover("expected `(`", None); + return; + } + parser.parse(CallArgListScope::default(), None); + } +} + +define_scope! { MethodExprScope, MethodCallExpr, Inheritance } +impl super::Parse for MethodExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Dot); + + if parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier", None); + } + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } + + if parser.current_kind() != Some(SyntaxKind::LParen) { + parser.error_and_recover("expected `(`", None); + return; + } + parser.parse(CallArgListScope::default(), None); + } +} + +define_scope! { FieldExprScope, FieldExpr, Inheritance } +impl super::Parse for FieldExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Dot); + + match parser.current_token() { + Some(token) if token.syntax_kind() == SyntaxKind::Ident => { + parser.bump(); + } + Some(token) if token.syntax_kind() == SyntaxKind::Int => { + let text = token.text(); + if !text.chars().all(|c| c.is_ascii_digit()) { + parser + .error_and_recover("expected integer decimal literal without prefix", None); + return; + } + parser.bump(); + } + _ => { + parser.error_and_recover("expected identifier or integer literal", None); + } + } + } } diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 03bae71116..602b282d13 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -1,32 +1,41 @@ +use rowan::Checkpoint; + use crate::{parser::path, SyntaxKind}; use super::{ - attr::parse_attr_list, define_scope, expr::parse_expr, parse_pat, stmt::parse_stmt, - token_stream::TokenStream, Parser, + attr::parse_attr_list, + define_scope, + expr::{parse_expr, parse_expr_no_struct}, + parse_pat, + stmt::parse_stmt, + token_stream::TokenStream, + Parser, }; -pub(super) fn _parse_expr_atom(parser: &mut Parser) -> bool { +pub(super) fn parse_expr_atom( + parser: &mut Parser, + allow_struct_init: bool, +) -> (bool, Checkpoint) { use SyntaxKind::*; match parser.current_kind() { - Some(Int | String) => parser.parse(LitExprScope::default(), None).0, - Some(IfKw) => parser.parse(IfExprScope::default(), None).0, - Some(MatchKw) => parser.parse(MatchExprScope::default(), None).0, - Some(LBrace) => parser.parse(BlockExprScope::default(), None).0, - Some(LParen) => parser.parse(ParenScope::default(), None).0, - Some(LBracket) => parser.parse(ArrayScope::default(), None).0, - Some(kind) if path::is_path_header(kind) => { + Some(Int | String | TrueKw | FalseKw) => parser.parse(LitExprScope::default(), None), + Some(IfKw) => parser.parse(IfExprScope::default(), None), + Some(MatchKw) => parser.parse(MatchExprScope::default(), None), + Some(LBrace) => parser.parse(BlockExprScope::default(), None), + Some(LParen) => parser.parse(ParenScope::default(), None), + Some(LBracket) => parser.parse(ArrayScope::default(), None), + Some(kind) if path::is_path_segment(kind) => { let (success, checkpoint) = parser.parse(path::PathScope::default(), None); - if success && parser.current_kind() == Some(LBrace) { - parser - .parse(RecordInitExprScope::default(), Some(checkpoint)) - .0 + if success && parser.current_kind() == Some(LBrace) && allow_struct_init { + let (success, _) = parser.parse(RecordInitExprScope::default(), Some(checkpoint)); + (success, checkpoint) } else { - success + (success, checkpoint) } } _ => { parser.error_and_recover("expected expression", None); - false + (false, parser.checkpoint()) } } } @@ -81,6 +90,10 @@ impl super::Parse for IfExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::IfKw); + parser.add_recovery_token(SyntaxKind::LBrace); + parse_expr_no_struct(parser); + parser.remove_recovery_token(SyntaxKind::LBrace); + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); return; @@ -95,7 +108,6 @@ impl super::Parse for IfExprScope { Some(SyntaxKind::LBrace | SyntaxKind::IfKw) ) { parser.error_and_recover("expected `{` or `if` after `else`", None); - parse_expr(parser); } } } @@ -106,10 +118,13 @@ impl super::Parse for MatchExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::MatchKw); - parse_expr(parser); + parser.add_recovery_token(SyntaxKind::LBrace); + parse_expr_no_struct(parser); + parser.remove_recovery_token(SyntaxKind::LBrace); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); + return; } parser.parse(MatchArmListScope::default(), None); } @@ -121,10 +136,24 @@ impl super::Parse for MatchArmListScope { parser.bump_expected(SyntaxKind::LBrace); loop { - if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() + { break; } + parser.parse(MatchArmScope::default(), None); + + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Newline) + && parser.current_kind() != Some(SyntaxKind::RBrace) + { + parser.error_and_recover("expected newline after match arm", None); + } + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_bump_until("expected }", None, SyntaxKind::RBrace) } } } @@ -132,26 +161,18 @@ impl super::Parse for MatchArmListScope { define_scope! { MatchArmScope, MatchArm, Inheritance } impl super::Parse for MatchArmScope { fn parse(&mut self, parser: &mut Parser) { - if !parse_pat(parser) { - return; - } + parser.set_newline_as_trivia(false); + + parser.add_recovery_token(SyntaxKind::FatArrow); + parse_pat(parser); + parser.remove_recovery_token(SyntaxKind::FatArrow); - if parser.current_kind() != Some(SyntaxKind::FatArrow) { + if !parser.bump_if(SyntaxKind::FatArrow) { parser.error_and_recover("expected `=>`", None); return; } - parser.bump_expected(SyntaxKind::FatArrow); parse_expr(parser); - - parser.set_newline_as_trivia(false); - if parser.current_kind() != Some(SyntaxKind::Newline) { - parser.error_and_bump_until( - "expected newline after match arm", - None, - SyntaxKind::Newline, - ); - } } } @@ -159,7 +180,9 @@ define_scope! { LitExprScope, LitExpr, Inheritance } impl super::Parse for LitExprScope { fn parse(&mut self, parser: &mut Parser) { match parser.current_kind() { - Some(SyntaxKind::Int | SyntaxKind::String) => parser.bump(), + Some( + SyntaxKind::Int | SyntaxKind::String | SyntaxKind::TrueKw | SyntaxKind::FalseKw, + ) => parser.bump(), _ => unreachable!(), } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 37baa33b07..93539ac40b 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -45,7 +45,9 @@ pub struct Parser { current_pos: rowan::TextSize, /// The dry run states which holds the each state of the parser when it /// enters dry run mode. - dry_run_states: Vec, + dry_run_states: Vec>, + + auxiliary_recovery_set: FxHashSet, } impl Parser { @@ -60,6 +62,7 @@ impl Parser { is_newline_trivia: true, next_trivias: VecDeque::new(), dry_run_states: Vec::new(), + auxiliary_recovery_set: FxHashSet::default(), } } @@ -80,8 +83,10 @@ impl Parser { /// Sets the newline kind as trivia if `is_trivia` is `true`. Otherwise, the /// newline kind is not regarded as a trivia. - pub fn set_newline_as_trivia(&mut self, is_trivia: bool) { - self.is_newline_trivia = is_trivia; + /// + /// Returns previous value. + pub fn set_newline_as_trivia(&mut self, is_trivia: bool) -> bool { + std::mem::replace(&mut self.is_newline_trivia, is_trivia) } /// Finish the parsing and return the syntax tree. @@ -112,7 +117,7 @@ impl Parser { { let checkpoint = self.enter(scope.clone(), checkpoint); let error_len = self.errors.len(); - let start_checkpoint = self.builder.checkpoint(); + let start_checkpoint = self.checkpoint(); scope.parse(self); self.leave(checkpoint); (error_len == self.errors.len(), start_checkpoint) @@ -133,7 +138,15 @@ impl Parser { self.parents.push((Box::new(scope), self.is_newline_trivia)); // `is_newline_trivia` is always `true` when entering a scope. self.is_newline_trivia = true; - checkpoint.unwrap_or_else(|| self.builder.checkpoint()) + checkpoint.unwrap_or_else(|| self.checkpoint()) + } + + pub fn add_recovery_token(&mut self, token: SyntaxKind) { + self.auxiliary_recovery_set.insert(token); + } + + pub fn remove_recovery_token(&mut self, token: SyntaxKind) { + self.auxiliary_recovery_set.remove(&token); } #[doc(hidden)] @@ -149,6 +162,7 @@ impl Parser { self.bump_trivias() } + self.auxiliary_recovery_set.clear(); if !self.is_dry_run() { self.builder .start_node_at(checkpoint, scope.syntax_kind().into()); @@ -206,6 +220,8 @@ impl Parser { self.dry_run_states.push(DryRunState { pos: self.current_pos, err_num: self.errors.len(), + next_trivias: self.next_trivias.clone(), + auxiliary_recovery_set: self.auxiliary_recovery_set.clone(), }); } @@ -216,6 +232,8 @@ impl Parser { let state = self.dry_run_states.pop().unwrap(); self.errors.truncate(state.err_num); self.current_pos = state.pos; + self.next_trivias = state.next_trivias; + self.auxiliary_recovery_set = state.auxiliary_recovery_set; } /// Bumps the current token and its leading trivias. @@ -278,6 +296,10 @@ impl Parser { } } + fn checkpoint(&mut self) -> Checkpoint { + self.builder.checkpoint() + } + /// Bumps the current token and /// current branch. fn bump_raw(&mut self) { @@ -372,11 +394,14 @@ pub trait Parse: ParsingScope + Clone { fn parse(&mut self, parser: &mut Parser); } -struct DryRunState { +struct DryRunState { /// The text position is the position when the dry run started. pos: rowan::TextSize, /// The number of errors when the dry run started. err_num: usize, + /// The stored trivias when the dry run started. + next_trivias: VecDeque, + auxiliary_recovery_set: FxHashSet, } /// Represents the recovery method of the current scope. @@ -474,7 +499,7 @@ macro_rules! define_scope_struct { ($scope_name: ident { $($field: ident: $ty: ty),* } , $kind: path) => { #[derive(Debug, Clone)] pub struct $scope_name { - __inner: std::cell::Cell, + __inner: std::rc::Rc>, $($field: $ty),* } impl $scope_name { @@ -487,7 +512,7 @@ macro_rules! define_scope_struct { fn default() -> Self { use crate::SyntaxKind::*; Self { - __inner: std::cell::Cell::new($kind), + __inner: std::cell::Cell::new($kind).into(), $($field: Default::default()),* } } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 6f9bf4d86b..b64202e7b9 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -1,7 +1,14 @@ use crate::SyntaxKind; -use super::{define_scope, expr::parse_expr, path::PathScope, token_stream::TokenStream, Parser}; - +use super::{ + define_scope, + expr::parse_expr, + expr_atom::{BlockExprScope, LitExprScope}, + path::PathScope, + token_stream::TokenStream, + type_::parse_type, + Parser, +}; define_scope! { GenericParamListScope, GenericParamList, @@ -100,6 +107,60 @@ impl super::Parse for GenericArgListScope { define_scope! { GenericArgScope, GenericParam, Inheritance} impl super::Parse for GenericArgScope { fn parse(&mut self, parser: &mut Parser) { + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + parser.parse(BlockExprScope::default(), None); + } + + Some(SyntaxKind::Star | SyntaxKind::LBracket | SyntaxKind::LParen) => { + parse_type(parser, None); + } + + Some(kind) if kind.is_literal_leaf() => { + parser.parse(LitExprScope::default(), None); + } + + _ => { + parser.parse(PathScope::default(), None); + } + } + } +} + +define_scope! { CallArgListScope, CallArgList, Override(RParen, Comma) } +impl super::Parse for CallArgListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parser.parse(CallArgScope::default(), None); + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(CallArgScope::default(), None); + } + + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_bump_until("expected closing `)`", None, SyntaxKind::RParen); + parser.bump_if(SyntaxKind::RParen); + } + } +} + +define_scope! { CallArgScope, CallArg, Inheritance } +impl super::Parse for CallArgScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + parser.start_dry_run(); + let has_label = parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon); + parser.end_dry_run(); + + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } parse_expr(parser); } } diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 7664522ad9..d470aab226 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -25,13 +25,16 @@ define_scope! { } impl super::Parse for PathSegmentScope { fn parse(&mut self, parser: &mut Parser) { - dbg! {"{:?}", parser.current_kind()}; - debug_assert!(is_path_header(parser.current_kind().unwrap())); - parser.bump() + match parser.current_kind() { + Some(kind) if is_path_segment(kind) => { + parser.bump(); + } + _ => parser.error_and_recover("expected path segment", None), + } } } -pub(super) fn is_path_header(kind: SyntaxKind) -> bool { +pub(super) fn is_path_segment(kind: SyntaxKind) -> bool { matches!( kind, SyntaxKind::SelfType | SyntaxKind::SelfKw | SyntaxKind::Ident diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 782ba78d7b..7c0caef90d 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -1,7 +1,11 @@ use crate::SyntaxKind; use super::{ - define_scope, expr::parse_expr, pat::parse_pat, token_stream::TokenStream, type_::parse_type, + define_scope, + expr::{parse_expr, parse_expr_no_struct}, + pat::parse_pat, + token_stream::TokenStream, + type_::parse_type, Checkpoint, Parser, }; @@ -44,13 +48,12 @@ impl super::Parse for LetStmtScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { parser.bump_expected(SyntaxKind::Colon); - if !parse_type(parser, None) { - return; - } + parser.add_recovery_token(SyntaxKind::Eq); + parse_type(parser, None); + parser.remove_recovery_token(SyntaxKind::Eq); } - if parser.current_kind() == Some(SyntaxKind::Eq) { - parser.bump_expected(SyntaxKind::Eq); + if parser.bump_if(SyntaxKind::Eq) { parse_expr(parser); } } @@ -60,17 +63,19 @@ define_scope! { ForStmtScope, ForStmt, Inheritance } impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); - if !parse_pat(parser) { - return; - } + + parser.add_recovery_token(SyntaxKind::InKw); + parse_pat(parser); + parser.remove_recovery_token(SyntaxKind::InKw); if !parser.bump_if(SyntaxKind::InKw) { parser.error_and_recover("expected `in` keyword", None); return; } - if !parse_expr(parser) { - return; - } + + parser.add_recovery_token(SyntaxKind::LBrace); + parse_expr_no_struct(parser); + parser.remove_recovery_token(SyntaxKind::LBrace); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -84,9 +89,10 @@ define_scope! { WhileStmtScope, WhileStmt, Inheritance } impl super::Parse for WhileStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhileKw); - if !parse_expr(parser) { - return; - } + + parser.add_recovery_token(SyntaxKind::LBrace); + parse_expr_no_struct(parser); + parser.remove_recovery_token(SyntaxKind::LBrace); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -122,23 +128,32 @@ impl super::Parse for AssertStmtScope { define_scope! { ReturnStmtScope, ReturnStmt, Inheritance } impl super::Parse for ReturnStmtScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::ReturnStmt); + parser.bump_expected(SyntaxKind::ReturnKw); parser.set_newline_as_trivia(false); - parse_expr(parser); + + parser.start_dry_run(); + if parse_expr(parser) { + parser.end_dry_run(); + parse_expr(parser); + } else { + parser.end_dry_run(); + } } } define_scope! { AssignStmtScope, AssignStmt, Inheritance } impl super::Parse for AssignStmtScope { fn parse(&mut self, parser: &mut Parser) { - if !parse_pat(parser) { - return; - } + parser.add_recovery_token(SyntaxKind::Eq); + parse_pat(parser); + parser.remove_recovery_token(SyntaxKind::Eq); + parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` keyword", None); return; } + parse_expr(parser); } } diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs index 10a758fec2..c6ade355ad 100644 --- a/crates/parser2/src/parser/token_stream.rs +++ b/crates/parser2/src/parser/token_stream.rs @@ -1,3 +1,5 @@ +use std::collections::VecDeque; + use crate::SyntaxKind; /// This trait works as an abstraction layer to encapsulate the differences @@ -28,7 +30,7 @@ pub trait SyntaxToken: Clone { pub struct BackTrackableTokenStream { stream: T, /// Backtrack buffer which stores tokens that have been already consumed. - bt_buffer: Vec, + bt_buffer: VecDeque, bt_points: Vec, /// Points to the current position of the backtrack buffer. bt_cursor: Option, @@ -39,7 +41,7 @@ impl BackTrackableTokenStream { pub fn new(stream: T) -> Self { Self { stream, - bt_buffer: Vec::new(), + bt_buffer: VecDeque::new(), bt_points: Vec::new(), bt_cursor: None, } @@ -48,21 +50,29 @@ impl BackTrackableTokenStream { /// Returns the next token in the stream. #[allow(clippy::should_implement_trait)] pub fn next(&mut self) -> Option { + if !self.has_parent() { + if let Some(bt_buffer) = self.bt_buffer.pop_front() { + return Some(bt_buffer); + } else { + return self.stream.next(); + } + } + if let Some(cursor) = self.bt_cursor { if cursor < self.bt_buffer.len() { let token = self.bt_buffer.get(cursor).cloned(); self.bt_cursor = Some(cursor + 1); return token; + } else { + self.bt_cursor = Some(cursor + 1); } } let token = self.stream.next()?; - if self.has_bt_point() { - self.bt_buffer.push(token.clone()); - } - if let Some(cursor) = self.bt_cursor { - self.bt_cursor = Some(cursor + 1); + if self.has_parent() { + self.bt_buffer.push_back(token.clone()); } + Some(token) } @@ -80,15 +90,10 @@ impl BackTrackableTokenStream { /// Set a backtrack point which allows the parser to backtrack to this /// point. pub fn set_bt_point(&mut self) { - println!("{}", self.bt_buffer.len()); - self.bt_points.push(self.bt_buffer.len()); - } - - /// Remove the last backtrack point. - pub fn complete(&mut self) { - self.bt_cursor = None; - if !self.has_bt_point() { - self.bt_buffer.clear(); + if self.has_parent() { + self.bt_points.push(self.bt_buffer.len()); + } else { + self.bt_points.push(0); } } @@ -105,4 +110,8 @@ impl BackTrackableTokenStream { pub fn has_bt_point(&mut self) -> bool { !self.bt_points.is_empty() } + + pub fn has_parent(&mut self) -> bool { + !self.bt_points.is_empty() + } } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 351d5ef85d..3c5176e741 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -1,8 +1,8 @@ use crate::SyntaxKind; use super::{ - define_scope, param::GenericArgListScope, path::PathScope, token_stream::TokenStream, - Checkpoint, Parser, + define_scope, expr::parse_expr, param::GenericArgListScope, path::PathScope, + token_stream::TokenStream, Checkpoint, Parser, }; pub(super) fn parse_type( @@ -13,6 +13,7 @@ pub(super) fn parse_type( Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::default(), checkpoint), Some(SyntaxKind::SelfTypeKw) => parser.parse(SelfTypeScope::default(), checkpoint), Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), + Some(SyntaxKind::LBracket) => parser.parse(ArrayTypeScope::default(), checkpoint), _ => parser.parse(PathTypeScope::default(), checkpoint), } .0 @@ -73,3 +74,31 @@ impl super::Parse for TupleTypeScope { } } } + +define_scope! { + ArrayTypeScope, + ArrayType, + Override(RBracket) +} +impl super::Parse for ArrayTypeScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBracket); + + parser.add_recovery_token(SyntaxKind::SemiColon); + parse_type(parser, None); + parser.remove_recovery_token(SyntaxKind::SemiColon); + + if !parser.bump_if(SyntaxKind::SemiColon) { + parser.error_and_recover("expected `;`", None); + parser.bump_if(SyntaxKind::LBracket); + return; + } + + parse_expr(parser); + + if !parser.bump_if(SyntaxKind::RBracket) { + parser.error_and_recover("expected closing `]`", None); + parser.bump_if(SyntaxKind::RBracket); + } + } +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 0edbe9a585..3c5bb91fa2 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -313,6 +313,8 @@ pub enum SyntaxKind { PathPat, /// `Enum::Variant(x, y)` PathTuplePat, + /// `Struct {x, y}` + RecordPat, /// `pat1 | pat2` OrPat, @@ -358,6 +360,8 @@ pub enum SyntaxKind { SelfType, /// `(i32, foo::Bar)` TupleType, + /// `[i32; 4]` + ArrayType, // Paths. These are non-leaf nodes. /// `Segment1::Segment2` @@ -408,6 +412,14 @@ impl SyntaxKind { pub fn is_trivia(self) -> bool { matches!(self, SyntaxKind::WhiteSpace | SyntaxKind::Comment) } + + /// Returns `true` if the token is a literal leaf. + pub fn is_literal_leaf(self) -> bool { + matches!( + self, + SyntaxKind::Int | SyntaxKind::String | SyntaxKind::TrueKw | SyntaxKind::FalseKw + ) + } } impl From for rowan::SyntaxKind { From fb125b38ced115dd0d09188a7707f7fc068005ea Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 23 Jan 2023 20:38:33 +0100 Subject: [PATCH 027/678] Modify pattern syntax node --- crates/parser2/src/parser/pat.rs | 86 ++++--- crates/parser2/src/syntax_kind.rs | 8 + .../test_files/syntax_node/pats/or.snap | 126 +++++----- .../syntax_node/pats/path_tuple.snap | 223 ++++++++++-------- .../test_files/syntax_node/pats/record.fe | 7 + .../test_files/syntax_node/pats/record.snap | 130 ++++++++++ 6 files changed, 386 insertions(+), 194 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/pats/record.fe create mode 100644 crates/parser2/test_files/syntax_node/pats/record.snap diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index 6dab9c8acb..a3834064d4 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -1,8 +1,6 @@ -use std::cell::RefCell; - use crate::SyntaxKind; -use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser, RecoveryMethod}; +use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; pub fn parse_pat(parser: &mut Parser) -> bool { use SyntaxKind::*; @@ -48,17 +46,24 @@ impl super::Parse for LitPatScope { } } -define_scope! { TuplePatScope, TuplePat, Override(RParen) } +define_scope! { TuplePatScope, TuplePat, Inheritance } impl super::Parse for TuplePatScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(TuplePatElemListScope::default(), None); + } +} + +define_scope! { TuplePatElemListScope, TuplePatElemList, Override(RParen) } +impl super::Parse for TuplePatElemListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { return; } - parse_pat(parser); + parser.parse(TuplePatElemScope::default(), None); while parser.bump_if(SyntaxKind::Comma) { - parse_pat(parser); + parser.parse(TuplePatElemScope::default(), None); } if !parser.bump_if(SyntaxKind::RParen) { @@ -68,31 +73,14 @@ impl super::Parse for TuplePatScope { } } -// We can't use `define_scope` here since the `syntax_kind` of the scope can be -// determined after parsing. -#[derive(Debug, Clone)] -struct PathPatScope { - syntax_kind: RefCell, - recovery_method: RecoveryMethod, -} -impl Default for PathPatScope { - fn default() -> Self { - Self { - syntax_kind: SyntaxKind::PathPat.into(), - recovery_method: RecoveryMethod::inheritance(&[SyntaxKind::Pipe]), - } +define_scope! { TuplePatElemScope, TuplePatElem, Inheritance } +impl super::Parse for TuplePatElemScope { + fn parse(&mut self, parser: &mut Parser) { + parse_pat(parser); } } -impl super::ParsingScope for PathPatScope { - /// Returns the recovery method of the current scope. - fn recovery_method(&self) -> &RecoveryMethod { - &self.recovery_method - } - fn syntax_kind(&self) -> SyntaxKind { - *self.syntax_kind.borrow() - } -} +define_scope! { PathPatScope, PathPat, Inheritance(Pipe) } impl super::Parse for PathPatScope { fn parse(&mut self, parser: &mut Parser) { if !parser.parse(PathScope::default(), None).0 { @@ -101,9 +89,47 @@ impl super::Parse for PathPatScope { parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(TuplePatScope::default(), None); - *self.syntax_kind.borrow_mut() = SyntaxKind::PathTuplePat; + self.set_kind(SyntaxKind::PathTuplePat); + parser.parse(TuplePatElemListScope::default(), None); + } else if parser.current_kind() == Some(SyntaxKind::LBrace) { + self.set_kind(SyntaxKind::RecordPat); + parser.parse(RecordPatFieldListScope::default(), None); + } + } +} + +define_scope! { RecordPatFieldListScope, RecordPatFieldList, Override(Comma, RBrace) } +impl super::Parse for RecordPatFieldListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + if parser.bump_if(SyntaxKind::RBrace) { + return; } + + parser.parse(RecordPatFieldScope::default(), None); + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(RecordPatFieldScope::default(), None); + } + parser.remove_recovery_token(SyntaxKind::Comma); + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover("expected `}`", None); + parser.bump_if(SyntaxKind::RBrace); + } + } +} + +define_scope! { RecordPatFieldScope, RecordPatField, Override(Comma, RBrace) } +impl super::Parse for RecordPatFieldScope { + fn parse(&mut self, parser: &mut Parser) { + parser.start_dry_run(); + let has_label = parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon); + parser.end_dry_run(); + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + parse_pat(parser); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 3c5bb91fa2..f11e76e49a 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -309,12 +309,20 @@ pub enum SyntaxKind { LitPat, /// `(x, y)` TuplePat, + /// `(x, y)` + TuplePatElemList, + /// `x` + TuplePatElem, /// `Enum::Variant` PathPat, /// `Enum::Variant(x, y)` PathTuplePat, /// `Struct {x, y}` RecordPat, + /// `{a: b, y}` + RecordPatFieldList, + /// `a: b` + RecordPatField, /// `pat1 | pat2` OrPat, diff --git a/crates/parser2/test_files/syntax_node/pats/or.snap b/crates/parser2/test_files/syntax_node/pats/or.snap index ba8befa9a6..acabcd61ff 100644 --- a/crates/parser2/test_files/syntax_node/pats/or.snap +++ b/crates/parser2/test_files/syntax_node/pats/or.snap @@ -23,104 +23,110 @@ Root@0..117 Ident@16..19 "Baz" Newline@19..21 "\n\n" OrPat@21..51 - PathPat@21..36 + PathTuplePat@21..36 Path@21..29 PathSegment@21..24 Ident@21..24 "Foo" Colon2@24..26 "::" PathSegment@26..29 Ident@26..29 "Bar" - TuplePat@29..36 + TuplePatElemList@29..36 LParen@29..30 "(" - OrPat@30..35 - LitPat@30..31 - Int@30..31 "1" - WhiteSpace@31..32 " " - Pipe@32..33 "|" - WhiteSpace@33..34 " " - LitPat@34..35 - Int@34..35 "2" + TuplePatElem@30..35 + OrPat@30..35 + LitPat@30..31 + Int@30..31 "1" + WhiteSpace@31..32 " " + Pipe@32..33 "|" + WhiteSpace@33..34 " " + LitPat@34..35 + Int@34..35 "2" RParen@35..36 ")" WhiteSpace@36..37 " " Pipe@37..38 "|" WhiteSpace@38..39 " " - PathPat@39..51 + PathTuplePat@39..51 Path@39..47 PathSegment@39..42 Ident@39..42 "Foo" Colon2@42..44 "::" PathSegment@44..47 Ident@44..47 "Baz" - TuplePat@47..51 + TuplePatElemList@47..51 LParen@47..48 "(" - RestPat@48..50 - Dot2@48..50 ".." + TuplePatElem@48..50 + RestPat@48..50 + Dot2@48..50 ".." RParen@50..51 ")" Newline@51..53 "\n\n" OrPat@53..117 - PathPat@53..68 + PathTuplePat@53..68 Path@53..61 PathSegment@53..56 Ident@53..56 "Foo" Colon2@56..58 "::" PathSegment@58..61 Ident@58..61 "Bar" - TuplePat@61..68 + TuplePatElemList@61..68 LParen@61..62 "(" - OrPat@62..67 - LitPat@62..63 - Int@62..63 "1" - WhiteSpace@63..64 " " - Pipe@64..65 "|" - WhiteSpace@65..66 " " - LitPat@66..67 - Int@66..67 "2" + TuplePatElem@62..67 + OrPat@62..67 + LitPat@62..63 + Int@62..63 "1" + WhiteSpace@63..64 " " + Pipe@64..65 "|" + WhiteSpace@65..66 " " + LitPat@66..67 + Int@66..67 "2" RParen@67..68 ")" WhiteSpace@68..69 " " Pipe@69..70 "|" WhiteSpace@70..71 " " - PathPat@71..117 + PathTuplePat@71..117 Path@71..79 PathSegment@71..74 Ident@71..74 "Foo" Colon2@74..76 "::" PathSegment@76..79 Ident@76..79 "Baz" - TuplePat@79..117 + TuplePatElemList@79..117 LParen@79..80 "(" - OrPat@80..116 - PathPat@80..95 - Path@80..88 - PathSegment@80..83 - Ident@80..83 "Foo" - Colon2@83..85 "::" - PathSegment@85..88 - Ident@85..88 "Bar" - TuplePat@88..95 - LParen@88..89 "(" - OrPat@89..94 - LitPat@89..90 - Int@89..90 "1" - WhiteSpace@90..91 " " - Pipe@91..92 "|" - WhiteSpace@92..93 " " - LitPat@93..94 - Int@93..94 "2" - RParen@94..95 ")" - WhiteSpace@95..96 " " - Pipe@96..97 "|" - WhiteSpace@97..98 " " - PathPat@98..116 - Path@98..106 - PathSegment@98..101 - Ident@98..101 "Bar" - Colon2@101..103 "::" - PathSegment@103..106 - Ident@103..106 "Baz" - TuplePat@106..116 - LParen@106..107 "(" - LitPat@107..115 - String@107..115 "\"STRING\"" - RParen@115..116 ")" + TuplePatElem@80..116 + OrPat@80..116 + PathTuplePat@80..95 + Path@80..88 + PathSegment@80..83 + Ident@80..83 "Foo" + Colon2@83..85 "::" + PathSegment@85..88 + Ident@85..88 "Bar" + TuplePatElemList@88..95 + LParen@88..89 "(" + TuplePatElem@89..94 + OrPat@89..94 + LitPat@89..90 + Int@89..90 "1" + WhiteSpace@90..91 " " + Pipe@91..92 "|" + WhiteSpace@92..93 " " + LitPat@93..94 + Int@93..94 "2" + RParen@94..95 ")" + WhiteSpace@95..96 " " + Pipe@96..97 "|" + WhiteSpace@97..98 " " + PathTuplePat@98..116 + Path@98..106 + PathSegment@98..101 + Ident@98..101 "Bar" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Baz" + TuplePatElemList@106..116 + LParen@106..107 "(" + TuplePatElem@107..115 + LitPat@107..115 + String@107..115 "\"STRING\"" + RParen@115..116 ")" RParen@116..117 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap index 9c94d2dc53..1c6db30315 100644 --- a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -3,179 +3,194 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- Root@0..203 - PathPat@0..7 + PathTuplePat@0..7 Path@0..5 PathSegment@0..5 Ident@0..5 "Empty" - TuplePat@5..7 + TuplePatElemList@5..7 LParen@5..6 "(" RParen@6..7 ")" Newline@7..9 "\n\n" - PathPat@9..24 + PathTuplePat@9..24 Path@9..22 PathSegment@9..15 Ident@9..15 "MyEnum" Colon2@15..17 "::" PathSegment@17..22 Ident@17..22 "Empty" - TuplePat@22..24 + TuplePatElemList@22..24 LParen@22..23 "(" RParen@23..24 ")" Newline@24..26 "\n\n" - PathPat@26..67 + PathTuplePat@26..67 Path@26..37 PathSegment@26..32 Ident@26..32 "MyEnum" Colon2@32..34 "::" PathSegment@34..37 Ident@34..37 "Foo" - TuplePat@37..67 + TuplePatElemList@37..67 LParen@37..38 "(" - PathPat@38..44 - Path@38..44 - PathSegment@38..39 - Ident@38..39 "X" - Colon2@39..41 "::" - PathSegment@41..44 - Ident@41..44 "Foo" + TuplePatElem@38..44 + PathPat@38..44 + Path@38..44 + PathSegment@38..39 + Ident@38..39 "X" + Colon2@39..41 "::" + PathSegment@41..44 + Ident@41..44 "Foo" Comma@44..45 "," WhiteSpace@45..46 " " - PathPat@46..58 - Path@46..52 - PathSegment@46..47 - Ident@46..47 "Z" - Colon2@47..49 "::" - PathSegment@49..52 - Ident@49..52 "Bar" - TuplePat@52..58 - LParen@52..53 "(" - LitPat@53..54 - Int@53..54 "1" - Comma@54..55 "," - WhiteSpace@55..56 " " - LitPat@56..57 - Int@56..57 "2" - RParen@57..58 ")" + TuplePatElem@46..58 + PathTuplePat@46..58 + Path@46..52 + PathSegment@46..47 + Ident@46..47 "Z" + Colon2@47..49 "::" + PathSegment@49..52 + Ident@49..52 "Bar" + TuplePatElemList@52..58 + LParen@52..53 "(" + TuplePatElem@53..54 + LitPat@53..54 + Int@53..54 "1" + Comma@54..55 "," + WhiteSpace@55..56 " " + TuplePatElem@56..57 + LitPat@56..57 + Int@56..57 "2" + RParen@57..58 ")" Comma@58..59 "," WhiteSpace@59..61 " " - WildCardPat@61..62 - Underscore@61..62 "_" + TuplePatElem@61..62 + WildCardPat@61..62 + Underscore@61..62 "_" Comma@62..63 "," WhiteSpace@63..64 " " - RestPat@64..66 - Dot2@64..66 ".." + TuplePatElem@64..66 + RestPat@64..66 + Dot2@64..66 ".." RParen@66..67 ")" Newline@67..69 "\n\n" - PathPat@69..133 + PathTuplePat@69..133 Path@69..81 PathSegment@69..75 Ident@69..75 "MyEnum" Colon2@75..77 "::" PathSegment@77..81 Ident@77..81 "Foo2" - TuplePat@81..133 + TuplePatElemList@81..133 LParen@81..82 "(" Newline@82..83 "\n" WhiteSpace@83..87 " " - PathPat@87..93 - Path@87..93 - PathSegment@87..88 - Ident@87..88 "X" - Colon2@88..90 "::" - PathSegment@90..93 - Ident@90..93 "Foo" + TuplePatElem@87..93 + PathPat@87..93 + Path@87..93 + PathSegment@87..88 + Ident@87..88 "X" + Colon2@88..90 "::" + PathSegment@90..93 + Ident@90..93 "Foo" Comma@93..94 "," WhiteSpace@94..95 " " Newline@95..96 "\n" WhiteSpace@96..100 " " - PathPat@100..112 - Path@100..106 - PathSegment@100..101 - Ident@100..101 "Z" - Colon2@101..103 "::" - PathSegment@103..106 - Ident@103..106 "Bar" - TuplePat@106..112 - LParen@106..107 "(" - LitPat@107..108 - Int@107..108 "1" - Comma@108..109 "," - WhiteSpace@109..110 " " - LitPat@110..111 - Int@110..111 "2" - RParen@111..112 ")" + TuplePatElem@100..112 + PathTuplePat@100..112 + Path@100..106 + PathSegment@100..101 + Ident@100..101 "Z" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Bar" + TuplePatElemList@106..112 + LParen@106..107 "(" + TuplePatElem@107..108 + LitPat@107..108 + Int@107..108 "1" + Comma@108..109 "," + WhiteSpace@109..110 " " + TuplePatElem@110..111 + LitPat@110..111 + Int@110..111 "2" + RParen@111..112 ")" Comma@112..113 "," WhiteSpace@113..114 " " Newline@114..115 "\n" WhiteSpace@115..120 " " - WildCardPat@120..121 - Underscore@120..121 "_" + TuplePatElem@120..121 + WildCardPat@120..121 + Underscore@120..121 "_" Comma@121..122 "," WhiteSpace@122..123 " " Newline@123..124 "\n" WhiteSpace@124..129 " " - RestPat@129..131 - Dot2@129..131 ".." + TuplePatElem@129..131 + RestPat@129..131 + Dot2@129..131 ".." Newline@131..132 "\n" RParen@132..133 ")" Newline@133..135 "\n\n" - PathPat@135..150 + PathTuplePat@135..150 Path@135..147 PathSegment@135..141 Ident@135..141 "MyEnum" Colon2@141..143 "::" PathSegment@143..147 Ident@143..147 "Bind" - TuplePat@147..150 + TuplePatElemList@147..150 LParen@147..148 "(" - PathPat@148..149 - Path@148..149 - PathSegment@148..149 - Ident@148..149 "x" + TuplePatElem@148..149 + PathPat@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" RParen@149..150 ")" Newline@150..152 "\n\n" - PathPat@152..203 + PathTuplePat@152..203 Path@152..167 PathSegment@152..158 Ident@152..158 "MyEnum" Colon2@158..160 "::" PathSegment@160..167 Ident@160..167 "OrTuple" - TuplePat@167..203 + TuplePatElemList@167..203 LParen@167..168 "(" - OrPat@168..202 - PathPat@168..176 - Path@168..176 - PathSegment@168..171 - Ident@168..171 "Int" - Colon2@171..173 "::" - PathSegment@173..176 - Ident@173..176 "I32" - WhiteSpace@176..177 " " - Pipe@177..178 "|" - WhiteSpace@178..179 " " - OrPat@179..202 - PathPat@179..187 - Path@179..187 - PathSegment@179..182 - Ident@179..182 "Int" - Colon2@182..184 "::" - PathSegment@184..187 - Ident@184..187 "I64" - WhiteSpace@187..188 " " - Pipe@188..189 "|" - WhiteSpace@189..190 " " - PathPat@190..202 - Path@190..198 - PathSegment@190..193 - Ident@190..193 "Int" - Colon2@193..195 "::" - PathSegment@195..198 - Ident@195..198 "Any" - TuplePat@198..202 - LParen@198..199 "(" - LitPat@199..201 - Int@199..201 "10" - RParen@201..202 ")" + TuplePatElem@168..202 + OrPat@168..202 + PathPat@168..176 + Path@168..176 + PathSegment@168..171 + Ident@168..171 "Int" + Colon2@171..173 "::" + PathSegment@173..176 + Ident@173..176 "I32" + WhiteSpace@176..177 " " + Pipe@177..178 "|" + WhiteSpace@178..179 " " + OrPat@179..202 + PathPat@179..187 + Path@179..187 + PathSegment@179..182 + Ident@179..182 "Int" + Colon2@182..184 "::" + PathSegment@184..187 + Ident@184..187 "I64" + WhiteSpace@187..188 " " + Pipe@188..189 "|" + WhiteSpace@189..190 " " + PathTuplePat@190..202 + Path@190..198 + PathSegment@190..193 + Ident@190..193 "Int" + Colon2@193..195 "::" + PathSegment@195..198 + Ident@195..198 "Any" + TuplePatElemList@198..202 + LParen@198..199 "(" + TuplePatElem@199..201 + LitPat@199..201 + Int@199..201 "10" + RParen@201..202 ")" RParen@202..203 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/record.fe b/crates/parser2/test_files/syntax_node/pats/record.fe new file mode 100644 index 0000000000..e207b4de03 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/record.fe @@ -0,0 +1,7 @@ +Record {} + +foo::Empty { } + +Record { a, b } +Record { a: x, b: y } +Record {x: (1, a), Foo {x, y} } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/record.snap b/crates/parser2/test_files/syntax_node/pats/record.snap new file mode 100644 index 0000000000..01ff9af07b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/record.snap @@ -0,0 +1,130 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..96 + RecordPat@0..9 + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Record" + WhiteSpace@6..7 " " + RecordPatFieldList@7..9 + LBrace@7..8 "{" + RBrace@8..9 "}" + Newline@9..11 "\n\n" + RecordPat@11..25 + Path@11..21 + PathSegment@11..14 + Ident@11..14 "foo" + Colon2@14..16 "::" + PathSegment@16..21 + Ident@16..21 "Empty" + WhiteSpace@21..22 " " + RecordPatFieldList@22..25 + LBrace@22..23 "{" + WhiteSpace@23..24 " " + RBrace@24..25 "}" + Newline@25..27 "\n\n" + RecordPat@27..42 + Path@27..33 + PathSegment@27..33 + Ident@27..33 "Record" + WhiteSpace@33..34 " " + RecordPatFieldList@34..42 + LBrace@34..35 "{" + WhiteSpace@35..36 " " + RecordPatField@36..37 + PathPat@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "a" + Comma@37..38 "," + WhiteSpace@38..39 " " + RecordPatField@39..40 + PathPat@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "b" + WhiteSpace@40..41 " " + RBrace@41..42 "}" + Newline@42..43 "\n" + RecordPat@43..64 + Path@43..49 + PathSegment@43..49 + Ident@43..49 "Record" + WhiteSpace@49..50 " " + RecordPatFieldList@50..64 + LBrace@50..51 "{" + WhiteSpace@51..52 " " + RecordPatField@52..56 + Ident@52..53 "a" + Colon@53..54 ":" + WhiteSpace@54..55 " " + PathPat@55..56 + Path@55..56 + PathSegment@55..56 + Ident@55..56 "x" + Comma@56..57 "," + WhiteSpace@57..58 " " + RecordPatField@58..62 + Ident@58..59 "b" + Colon@59..60 ":" + WhiteSpace@60..61 " " + PathPat@61..62 + Path@61..62 + PathSegment@61..62 + Ident@61..62 "y" + WhiteSpace@62..63 " " + RBrace@63..64 "}" + Newline@64..65 "\n" + RecordPat@65..96 + Path@65..71 + PathSegment@65..71 + Ident@65..71 "Record" + WhiteSpace@71..72 " " + RecordPatFieldList@72..96 + LBrace@72..73 "{" + RecordPatField@73..82 + Ident@73..74 "x" + Colon@74..75 ":" + WhiteSpace@75..76 " " + TuplePat@76..82 + TuplePatElemList@76..82 + LParen@76..77 "(" + TuplePatElem@77..78 + LitPat@77..78 + Int@77..78 "1" + Comma@78..79 "," + WhiteSpace@79..80 " " + TuplePatElem@80..81 + PathPat@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "a" + RParen@81..82 ")" + Comma@82..83 "," + WhiteSpace@83..84 " " + RecordPatField@84..94 + RecordPat@84..94 + Path@84..87 + PathSegment@84..87 + Ident@84..87 "Foo" + WhiteSpace@87..88 " " + RecordPatFieldList@88..94 + LBrace@88..89 "{" + RecordPatField@89..90 + PathPat@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "x" + Comma@90..91 "," + WhiteSpace@91..92 " " + RecordPatField@92..93 + PathPat@92..93 + Path@92..93 + PathSegment@92..93 + Ident@92..93 "y" + RBrace@93..94 "}" + WhiteSpace@94..95 " " + RBrace@95..96 "}" + From 4e73332e673852407c3355749369ac8fbd0336a9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 23 Jan 2023 20:51:31 +0100 Subject: [PATCH 028/678] Add tests for `expr` --- crates/parser2/src/parser/expr.rs | 3 +- crates/parser2/src/parser/token_stream.rs | 2 +- .../test_files/syntax_node/exprs/array.fe | 2 + .../test_files/syntax_node/exprs/array.snap | 35 ++ .../test_files/syntax_node/exprs/binop.fe | 11 + .../test_files/syntax_node/exprs/binop.snap | 186 ++++++ .../test_files/syntax_node/exprs/call.fe | 9 + .../test_files/syntax_node/exprs/call.snap | 212 +++++++ .../test_files/syntax_node/exprs/if_.fe | 33 + .../test_files/syntax_node/exprs/if_.snap | 256 ++++++++ .../test_files/syntax_node/exprs/match_.fe | 38 ++ .../test_files/syntax_node/exprs/match_.snap | 583 ++++++++++++++++++ .../test_files/syntax_node/exprs/method.fe | 7 + .../test_files/syntax_node/exprs/method.snap | 127 ++++ crates/parser2/tests/syntax_node.rs | 27 +- 15 files changed, 1528 insertions(+), 3 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/exprs/array.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/array.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/binop.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/binop.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/call.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/call.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/if_.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/if_.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/match_.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/match_.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/method.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/method.snap diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 69b788d09f..13824f2096 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -191,6 +191,7 @@ impl super::Parse for BinExprScope { define_scope! { IndexExprScope, IndexExpr, Override(RBracket) } impl super::Parse for IndexExprScope { fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBracket); parse_expr(parser); if !parser.bump_if(SyntaxKind::RBracket) { parser.error_and_recover("expected `]`", None); @@ -220,7 +221,7 @@ impl super::Parse for MethodExprScope { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Dot); - if parser.bump_if(SyntaxKind::Ident) { + if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected identifier", None); } diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs index c6ade355ad..aa123ba84b 100644 --- a/crates/parser2/src/parser/token_stream.rs +++ b/crates/parser2/src/parser/token_stream.rs @@ -91,7 +91,7 @@ impl BackTrackableTokenStream { /// point. pub fn set_bt_point(&mut self) { if self.has_parent() { - self.bt_points.push(self.bt_buffer.len()); + self.bt_points.push(self.bt_cursor.unwrap()); } else { self.bt_points.push(0); } diff --git a/crates/parser2/test_files/syntax_node/exprs/array.fe b/crates/parser2/test_files/syntax_node/exprs/array.fe new file mode 100644 index 0000000000..df749291b9 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/array.fe @@ -0,0 +1,2 @@ +[1, {1 + 2}] +[1; 16] \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/array.snap b/crates/parser2/test_files/syntax_node/exprs/array.snap new file mode 100644 index 0000000000..8eb26cd584 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/array.snap @@ -0,0 +1,35 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..20 + ArrayExpr@0..12 + LBracket@0..1 "[" + LitExpr@1..2 + Int@1..2 "1" + Comma@2..3 "," + WhiteSpace@3..4 " " + BlockExpr@4..11 + LBrace@4..5 "{" + ExprStmt@5..10 + BinExpr@5..10 + LitExpr@5..6 + Int@5..6 "1" + WhiteSpace@6..7 " " + Plus@7..8 "+" + WhiteSpace@8..9 " " + LitExpr@9..10 + Int@9..10 "2" + RBrace@10..11 "}" + RBracket@11..12 "]" + Newline@12..13 "\n" + ArrayRepExpr@13..20 + LBracket@13..14 "[" + LitExpr@14..15 + Int@14..15 "1" + SemiColon@15..16 ";" + WhiteSpace@16..17 " " + LitExpr@17..19 + Int@17..19 "16" + RBracket@19..20 "]" + diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.fe b/crates/parser2/test_files/syntax_node/exprs/binop.fe new file mode 100644 index 0000000000..e83e6a9ad9 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/binop.fe @@ -0,0 +1,11 @@ +1 + 2 * 3 +1 * 2 + 3 +1 < 2 +1 < (2 + 3) +true || false && 1 < 2 +true || false && (1 < 2) > 3 ^ 2 +a ** 2 ** 3 +1 - 2 - 3 +1 << 3 >> 2 +a.b.c +a.0.c diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap new file mode 100644 index 0000000000..126f588e59 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -0,0 +1,186 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..140 + BinExpr@0..9 + LitExpr@0..1 + Int@0..1 "1" + WhiteSpace@1..2 " " + Plus@2..3 "+" + WhiteSpace@3..4 " " + BinExpr@4..9 + LitExpr@4..5 + Int@4..5 "2" + WhiteSpace@5..6 " " + Star@6..7 "*" + WhiteSpace@7..8 " " + LitExpr@8..9 + Int@8..9 "3" + Newline@9..10 "\n" + BinExpr@10..19 + BinExpr@10..15 + LitExpr@10..11 + Int@10..11 "1" + WhiteSpace@11..12 " " + Star@12..13 "*" + WhiteSpace@13..14 " " + LitExpr@14..15 + Int@14..15 "2" + WhiteSpace@15..16 " " + Plus@16..17 "+" + WhiteSpace@17..18 " " + LitExpr@18..19 + Int@18..19 "3" + Newline@19..20 "\n" + BinExpr@20..25 + LitExpr@20..21 + Int@20..21 "1" + WhiteSpace@21..22 " " + Lt@22..23 "<" + WhiteSpace@23..24 " " + LitExpr@24..25 + Int@24..25 "2" + Newline@25..26 "\n" + BinExpr@26..37 + LitExpr@26..27 + Int@26..27 "1" + WhiteSpace@27..28 " " + Lt@28..29 "<" + WhiteSpace@29..30 " " + ParenExpr@30..37 + LParen@30..31 "(" + BinExpr@31..36 + LitExpr@31..32 + Int@31..32 "2" + WhiteSpace@32..33 " " + Plus@33..34 "+" + WhiteSpace@34..35 " " + LitExpr@35..36 + Int@35..36 "3" + RParen@36..37 ")" + Newline@37..38 "\n" + BinExpr@38..60 + LitExpr@38..42 + TrueKw@38..42 "true" + WhiteSpace@42..43 " " + Pipe2@43..45 "||" + WhiteSpace@45..46 " " + BinExpr@46..60 + LitExpr@46..51 + FalseKw@46..51 "false" + WhiteSpace@51..52 " " + Amp2@52..54 "&&" + WhiteSpace@54..55 " " + BinExpr@55..60 + LitExpr@55..56 + Int@55..56 "1" + WhiteSpace@56..57 " " + Lt@57..58 "<" + WhiteSpace@58..59 " " + LitExpr@59..60 + Int@59..60 "2" + Newline@60..61 "\n" + BinExpr@61..93 + LitExpr@61..65 + TrueKw@61..65 "true" + WhiteSpace@65..66 " " + Pipe2@66..68 "||" + WhiteSpace@68..69 " " + BinExpr@69..93 + LitExpr@69..74 + FalseKw@69..74 "false" + WhiteSpace@74..75 " " + Amp2@75..77 "&&" + WhiteSpace@77..78 " " + BinExpr@78..93 + ParenExpr@78..85 + LParen@78..79 "(" + BinExpr@79..84 + LitExpr@79..80 + Int@79..80 "1" + WhiteSpace@80..81 " " + Lt@81..82 "<" + WhiteSpace@82..83 " " + LitExpr@83..84 + Int@83..84 "2" + RParen@84..85 ")" + WhiteSpace@85..86 " " + Gt@86..87 ">" + WhiteSpace@87..88 " " + BinExpr@88..93 + LitExpr@88..89 + Int@88..89 "3" + WhiteSpace@89..90 " " + Hat@90..91 "^" + WhiteSpace@91..92 " " + LitExpr@92..93 + Int@92..93 "2" + Newline@93..94 "\n" + BinExpr@94..105 + Path@94..95 + PathSegment@94..95 + Ident@94..95 "a" + WhiteSpace@95..96 " " + Star2@96..98 "**" + WhiteSpace@98..99 " " + BinExpr@99..105 + LitExpr@99..100 + Int@99..100 "2" + WhiteSpace@100..101 " " + Star2@101..103 "**" + WhiteSpace@103..104 " " + LitExpr@104..105 + Int@104..105 "3" + Newline@105..106 "\n" + BinExpr@106..115 + BinExpr@106..111 + LitExpr@106..107 + Int@106..107 "1" + WhiteSpace@107..108 " " + Minus@108..109 "-" + WhiteSpace@109..110 " " + LitExpr@110..111 + Int@110..111 "2" + WhiteSpace@111..112 " " + Minus@112..113 "-" + WhiteSpace@113..114 " " + LitExpr@114..115 + Int@114..115 "3" + Newline@115..116 "\n" + BinExpr@116..127 + BinExpr@116..122 + LitExpr@116..117 + Int@116..117 "1" + WhiteSpace@117..118 " " + Lt2@118..120 "<<" + WhiteSpace@120..121 " " + LitExpr@121..122 + Int@121..122 "3" + WhiteSpace@122..123 " " + Gt2@123..125 ">>" + WhiteSpace@125..126 " " + LitExpr@126..127 + Int@126..127 "2" + Newline@127..128 "\n" + FieldExpr@128..133 + FieldExpr@128..131 + Path@128..129 + PathSegment@128..129 + Ident@128..129 "a" + Dot@129..130 "." + Ident@130..131 "b" + Dot@131..132 "." + Ident@132..133 "c" + Newline@133..134 "\n" + FieldExpr@134..139 + FieldExpr@134..137 + Path@134..135 + PathSegment@134..135 + Ident@134..135 "a" + Dot@135..136 "." + Int@136..137 "0" + Dot@137..138 "." + Ident@138..139 "c" + Newline@139..140 "\n" + diff --git a/crates/parser2/test_files/syntax_node/exprs/call.fe b/crates/parser2/test_files/syntax_node/exprs/call.fe new file mode 100644 index 0000000000..3b0fa29d55 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/call.fe @@ -0,0 +1,9 @@ +foo() +foo::Bar() +foo(x: 1, z: 3) +foo(x: 1, z: 3) +foo(x: 1, 2, z: 3) +foo(1, y: 2, z: 3) + +foo(val1: 2, val2: "String") +foo<[u32; 1], {3 + 4}>(x: 1, y: 2) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap new file mode 100644 index 0000000000..948017628c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -0,0 +1,212 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..169 + CallExpr@0..5 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" + CallArgList@3..5 + LParen@3..4 "(" + RParen@4..5 ")" + Newline@5..6 "\n" + CallExpr@6..16 + Path@6..14 + PathSegment@6..9 + Ident@6..9 "foo" + Colon2@9..11 "::" + PathSegment@11..14 + Ident@11..14 "Bar" + CallArgList@14..16 + LParen@14..15 "(" + RParen@15..16 ")" + Newline@16..17 "\n" + CallExpr@17..32 + Path@17..20 + PathSegment@17..20 + Ident@17..20 "foo" + CallArgList@20..32 + LParen@20..21 "(" + CallArg@21..25 + Ident@21..22 "x" + Colon@22..23 ":" + WhiteSpace@23..24 " " + LitExpr@24..25 + Int@24..25 "1" + Comma@25..26 "," + WhiteSpace@26..27 " " + CallArg@27..31 + Ident@27..28 "z" + Colon@28..29 ":" + WhiteSpace@29..30 " " + LitExpr@30..31 + Int@30..31 "3" + RParen@31..32 ")" + Newline@32..33 "\n" + CallExpr@33..48 + Path@33..36 + PathSegment@33..36 + Ident@33..36 "foo" + CallArgList@36..48 + LParen@36..37 "(" + CallArg@37..41 + Ident@37..38 "x" + Colon@38..39 ":" + WhiteSpace@39..40 " " + LitExpr@40..41 + Int@40..41 "1" + Comma@41..42 "," + WhiteSpace@42..43 " " + CallArg@43..47 + Ident@43..44 "z" + Colon@44..45 ":" + WhiteSpace@45..46 " " + LitExpr@46..47 + Int@46..47 "3" + RParen@47..48 ")" + Newline@48..49 "\n" + CallExpr@49..67 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "foo" + CallArgList@52..67 + LParen@52..53 "(" + CallArg@53..57 + Ident@53..54 "x" + Colon@54..55 ":" + WhiteSpace@55..56 " " + LitExpr@56..57 + Int@56..57 "1" + Comma@57..58 "," + WhiteSpace@58..59 " " + CallArg@59..60 + LitExpr@59..60 + Int@59..60 "2" + Comma@60..61 "," + WhiteSpace@61..62 " " + CallArg@62..66 + Ident@62..63 "z" + Colon@63..64 ":" + WhiteSpace@64..65 " " + LitExpr@65..66 + Int@65..66 "3" + RParen@66..67 ")" + Newline@67..68 "\n" + CallExpr@68..86 + Path@68..71 + PathSegment@68..71 + Ident@68..71 "foo" + CallArgList@71..86 + LParen@71..72 "(" + CallArg@72..73 + LitExpr@72..73 + Int@72..73 "1" + Comma@73..74 "," + WhiteSpace@74..75 " " + CallArg@75..79 + Ident@75..76 "y" + Colon@76..77 ":" + WhiteSpace@77..78 " " + LitExpr@78..79 + Int@78..79 "2" + Comma@79..80 "," + WhiteSpace@80..81 " " + CallArg@81..85 + Ident@81..82 "z" + Colon@82..83 ":" + WhiteSpace@83..84 " " + LitExpr@84..85 + Int@84..85 "3" + RParen@85..86 ")" + Newline@86..88 "\n\n" + CallExpr@88..134 + Path@88..91 + PathSegment@88..91 + Ident@88..91 "foo" + GenericParamList@91..109 + Lt@91..92 "<" + GenericParam@92..95 + Path@92..95 + PathSegment@92..95 + Ident@92..95 "i32" + Comma@95..96 "," + WhiteSpace@96..97 " " + GenericParam@97..108 + Path@97..108 + PathSegment@97..100 + Ident@97..100 "foo" + Colon2@100..102 "::" + PathSegment@102..108 + Ident@102..108 "MyType" + Gt@108..109 ">" + CallArgList@109..134 + LParen@109..110 "(" + CallArg@110..117 + Ident@110..114 "val1" + Colon@114..115 ":" + WhiteSpace@115..116 " " + LitExpr@116..117 + Int@116..117 "2" + Comma@117..118 "," + WhiteSpace@118..119 " " + CallArg@119..133 + Ident@119..123 "val2" + Colon@123..124 ":" + WhiteSpace@124..125 " " + LitExpr@125..133 + String@125..133 "\"String\"" + RParen@133..134 ")" + Newline@134..135 "\n" + CallExpr@135..169 + Path@135..138 + PathSegment@135..138 + Ident@135..138 "foo" + GenericParamList@138..157 + Lt@138..139 "<" + GenericParam@139..147 + ArrayType@139..147 + LBracket@139..140 "[" + PathType@140..143 + Path@140..143 + PathSegment@140..143 + Ident@140..143 "u32" + SemiColon@143..144 ";" + WhiteSpace@144..145 " " + LitExpr@145..146 + Int@145..146 "1" + RBracket@146..147 "]" + Comma@147..148 "," + WhiteSpace@148..149 " " + GenericParam@149..156 + BlockExpr@149..156 + LBrace@149..150 "{" + ExprStmt@150..155 + BinExpr@150..155 + LitExpr@150..151 + Int@150..151 "3" + WhiteSpace@151..152 " " + Plus@152..153 "+" + WhiteSpace@153..154 " " + LitExpr@154..155 + Int@154..155 "4" + RBrace@155..156 "}" + Gt@156..157 ">" + CallArgList@157..169 + LParen@157..158 "(" + CallArg@158..162 + Ident@158..159 "x" + Colon@159..160 ":" + WhiteSpace@160..161 " " + LitExpr@161..162 + Int@161..162 "1" + Comma@162..163 "," + WhiteSpace@163..164 " " + CallArg@164..168 + Ident@164..165 "y" + Colon@165..166 ":" + WhiteSpace@166..167 " " + LitExpr@167..168 + Int@167..168 "2" + RParen@168..169 ")" + diff --git a/crates/parser2/test_files/syntax_node/exprs/if_.fe b/crates/parser2/test_files/syntax_node/exprs/if_.fe new file mode 100644 index 0000000000..d823bc0d5c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/if_.fe @@ -0,0 +1,33 @@ +if b {} else {} + +if b {} else { + let x = 1 + x +} + +if b { + let x = 1 + x +} else {} + +if b { + let x = 1 + x +} + +if b { + let x = 1 + x +} else { + let y = 1 + y +} + +if match x { + Scope::Parent => true + Scope::Child => false +} { + return +} else { + 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/if_.snap b/crates/parser2/test_files/syntax_node/exprs/if_.snap new file mode 100644 index 0000000000..3b560b3ca4 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/if_.snap @@ -0,0 +1,256 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..279 + IfExpr@0..12 + IfKw@0..2 "if" + WhiteSpace@2..3 " " + Path@3..4 + PathSegment@3..4 + Ident@3..4 "b" + WhiteSpace@4..5 " " + BlockExpr@5..7 + LBrace@5..6 "{" + RBrace@6..7 "}" + WhiteSpace@7..8 " " + ElseKw@8..12 "else" + WhiteSpace@12..13 " " + BlockExpr@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" + Newline@15..17 "\n\n" + IfExpr@17..29 + IfKw@17..19 "if" + WhiteSpace@19..20 " " + Path@20..21 + PathSegment@20..21 + Ident@20..21 "b" + WhiteSpace@21..22 " " + BlockExpr@22..24 + LBrace@22..23 "{" + RBrace@23..24 "}" + WhiteSpace@24..25 " " + ElseKw@25..29 "else" + WhiteSpace@29..30 " " + BlockExpr@30..53 + LBrace@30..31 "{" + Newline@31..32 "\n" + WhiteSpace@32..36 " " + LetStmt@36..45 + LetKw@36..39 "let" + WhiteSpace@39..40 " " + PathPat@40..41 + Path@40..41 + PathSegment@40..41 + Ident@40..41 "x" + WhiteSpace@41..42 " " + Eq@42..43 "=" + WhiteSpace@43..44 " " + LitExpr@44..45 + Int@44..45 "1" + Newline@45..46 "\n" + WhiteSpace@46..50 " " + ExprStmt@50..51 + Path@50..51 + PathSegment@50..51 + Ident@50..51 "x" + Newline@51..52 "\n" + RBrace@52..53 "}" + Newline@53..55 "\n\n" + IfExpr@55..88 + IfKw@55..57 "if" + WhiteSpace@57..58 " " + Path@58..59 + PathSegment@58..59 + Ident@58..59 "b" + WhiteSpace@59..60 " " + BlockExpr@60..83 + LBrace@60..61 "{" + Newline@61..62 "\n" + WhiteSpace@62..66 " " + LetStmt@66..75 + LetKw@66..69 "let" + WhiteSpace@69..70 " " + PathPat@70..71 + Path@70..71 + PathSegment@70..71 + Ident@70..71 "x" + WhiteSpace@71..72 " " + Eq@72..73 "=" + WhiteSpace@73..74 " " + LitExpr@74..75 + Int@74..75 "1" + Newline@75..76 "\n" + WhiteSpace@76..80 " " + ExprStmt@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "x" + Newline@81..82 "\n" + RBrace@82..83 "}" + WhiteSpace@83..84 " " + ElseKw@84..88 "else" + WhiteSpace@88..89 " " + BlockExpr@89..91 + LBrace@89..90 "{" + RBrace@90..91 "}" + Newline@91..93 "\n\n" + IfExpr@93..121 + IfKw@93..95 "if" + WhiteSpace@95..96 " " + Path@96..97 + PathSegment@96..97 + Ident@96..97 "b" + WhiteSpace@97..98 " " + BlockExpr@98..121 + LBrace@98..99 "{" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + LetStmt@104..113 + LetKw@104..107 "let" + WhiteSpace@107..108 " " + PathPat@108..109 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "x" + WhiteSpace@109..110 " " + Eq@110..111 "=" + WhiteSpace@111..112 " " + LitExpr@112..113 + Int@112..113 "1" + Newline@113..114 "\n" + WhiteSpace@114..118 " " + ExprStmt@118..119 + Path@118..119 + PathSegment@118..119 + Ident@118..119 "x" + Newline@119..120 "\n" + RBrace@120..121 "}" + Newline@121..123 "\n\n" + IfExpr@123..156 + IfKw@123..125 "if" + WhiteSpace@125..126 " " + Path@126..127 + PathSegment@126..127 + Ident@126..127 "b" + WhiteSpace@127..128 " " + BlockExpr@128..151 + LBrace@128..129 "{" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + LetStmt@134..143 + LetKw@134..137 "let" + WhiteSpace@137..138 " " + PathPat@138..139 + Path@138..139 + PathSegment@138..139 + Ident@138..139 "x" + WhiteSpace@139..140 " " + Eq@140..141 "=" + WhiteSpace@141..142 " " + LitExpr@142..143 + Int@142..143 "1" + Newline@143..144 "\n" + WhiteSpace@144..148 " " + ExprStmt@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" + Newline@149..150 "\n" + RBrace@150..151 "}" + WhiteSpace@151..152 " " + ElseKw@152..156 "else" + WhiteSpace@156..157 " " + BlockExpr@157..180 + LBrace@157..158 "{" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + LetStmt@163..172 + LetKw@163..166 "let" + WhiteSpace@166..167 " " + PathPat@167..168 + Path@167..168 + PathSegment@167..168 + Ident@167..168 "y" + WhiteSpace@168..169 " " + Eq@169..170 "=" + WhiteSpace@170..171 " " + LitExpr@171..172 + Int@171..172 "1" + Newline@172..173 "\n" + WhiteSpace@173..177 " " + ExprStmt@177..178 + Path@177..178 + PathSegment@177..178 + Ident@177..178 "y" + Newline@178..179 "\n" + RBrace@179..180 "}" + Newline@180..182 "\n\n" + IfExpr@182..269 + IfKw@182..184 "if" + WhiteSpace@184..185 " " + MatchExpr@185..248 + MatchKw@185..190 "match" + WhiteSpace@190..191 " " + Path@191..192 + PathSegment@191..192 + Ident@191..192 "x" + WhiteSpace@192..193 " " + MatchArmList@193..248 + LBrace@193..194 "{" + Newline@194..195 "\n" + WhiteSpace@195..199 " " + MatchArm@199..220 + PathPat@199..212 + Path@199..212 + PathSegment@199..204 + Ident@199..204 "Scope" + Colon2@204..206 "::" + PathSegment@206..212 + Ident@206..212 "Parent" + WhiteSpace@212..213 " " + FatArrow@213..215 "=>" + WhiteSpace@215..216 " " + LitExpr@216..220 + TrueKw@216..220 "true" + Newline@220..221 "\n" + WhiteSpace@221..225 " " + MatchArm@225..246 + PathPat@225..237 + Path@225..237 + PathSegment@225..230 + Ident@225..230 "Scope" + Colon2@230..232 "::" + PathSegment@232..237 + Ident@232..237 "Child" + WhiteSpace@237..238 " " + FatArrow@238..240 "=>" + WhiteSpace@240..241 " " + LitExpr@241..246 + FalseKw@241..246 "false" + Newline@246..247 "\n" + RBrace@247..248 "}" + WhiteSpace@248..249 " " + BlockExpr@249..264 + LBrace@249..250 "{" + Newline@250..251 "\n" + WhiteSpace@251..255 " " + ReturnStmt@255..261 + ReturnKw@255..261 "return" + WhiteSpace@261..262 " " + Newline@262..263 "\n" + RBrace@263..264 "}" + WhiteSpace@264..265 " " + ElseKw@265..269 "else" + WhiteSpace@269..270 " " + BlockExpr@270..279 + LBrace@270..271 "{" + Newline@271..272 "\n" + WhiteSpace@272..276 " " + ExprStmt@276..277 + LitExpr@276..277 + Int@276..277 "1" + Newline@277..278 "\n" + RBrace@278..279 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/match_.fe b/crates/parser2/test_files/syntax_node/exprs/match_.fe new file mode 100644 index 0000000000..6ddf0c7210 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/match_.fe @@ -0,0 +1,38 @@ +match e {} + +match e { + Enum::Add(x, y) => x + y + Enum::Sub(x, y) => x - y +} + +match (S {x: 1, y: 2}) { + _ => 1 +} + +match e { + Enum::Add(x, y) => x + y + Enum::Sub(x, y) => x - y +} + +match e { + Enum::Add(x, y) => { + x + y + } + Enum::Sub(x, y) => x - y + Enum::Mul(x, y) => { x * y } +} + +match e { Enum::Var(s) => s } + +match { + let x = 1 + Enum::Var(x) + } +{ + Enum::Var(s) => s +} + +match (S {x: Foo::Bar(x), y: 2}) { + S {x: Boo::Bar(x), y} => true + _ => false +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/match_.snap b/crates/parser2/test_files/syntax_node/exprs/match_.snap new file mode 100644 index 0000000000..80a8790a4f --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/match_.snap @@ -0,0 +1,583 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..516 + MatchExpr@0..10 + MatchKw@0..5 "match" + WhiteSpace@5..6 " " + Path@6..7 + PathSegment@6..7 + Ident@6..7 "e" + WhiteSpace@7..8 " " + MatchArmList@8..10 + LBrace@8..9 "{" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + MatchExpr@12..81 + MatchKw@12..17 "match" + WhiteSpace@17..18 " " + Path@18..19 + PathSegment@18..19 + Ident@18..19 "e" + WhiteSpace@19..20 " " + MatchArmList@20..81 + LBrace@20..21 "{" + Newline@21..22 "\n" + WhiteSpace@22..26 " " + MatchArm@26..50 + PathTuplePat@26..41 + Path@26..35 + PathSegment@26..30 + Ident@26..30 "Enum" + Colon2@30..32 "::" + PathSegment@32..35 + Ident@32..35 "Add" + TuplePatElemList@35..41 + LParen@35..36 "(" + TuplePatElem@36..37 + PathPat@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "x" + Comma@37..38 "," + WhiteSpace@38..39 " " + TuplePatElem@39..40 + PathPat@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "y" + RParen@40..41 ")" + WhiteSpace@41..42 " " + FatArrow@42..44 "=>" + WhiteSpace@44..45 " " + BinExpr@45..50 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "x" + WhiteSpace@46..47 " " + Plus@47..48 "+" + WhiteSpace@48..49 " " + Path@49..50 + PathSegment@49..50 + Ident@49..50 "y" + Newline@50..51 "\n" + WhiteSpace@51..55 " " + MatchArm@55..79 + PathTuplePat@55..70 + Path@55..64 + PathSegment@55..59 + Ident@55..59 "Enum" + Colon2@59..61 "::" + PathSegment@61..64 + Ident@61..64 "Sub" + TuplePatElemList@64..70 + LParen@64..65 "(" + TuplePatElem@65..66 + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "x" + Comma@66..67 "," + WhiteSpace@67..68 " " + TuplePatElem@68..69 + PathPat@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "y" + RParen@69..70 ")" + WhiteSpace@70..71 " " + FatArrow@71..73 "=>" + WhiteSpace@73..74 " " + BinExpr@74..79 + Path@74..75 + PathSegment@74..75 + Ident@74..75 "x" + WhiteSpace@75..76 " " + Minus@76..77 "-" + WhiteSpace@77..78 " " + Path@78..79 + PathSegment@78..79 + Ident@78..79 "y" + Newline@79..80 "\n" + RBrace@80..81 "}" + Newline@81..83 "\n\n" + MatchExpr@83..120 + MatchKw@83..88 "match" + WhiteSpace@88..89 " " + ParenExpr@89..105 + LParen@89..90 "(" + RecordInitExpr@90..104 + Path@90..91 + PathSegment@90..91 + Ident@90..91 "S" + WhiteSpace@91..92 " " + RecordFieldList@92..104 + LBrace@92..93 "{" + RecordField@93..97 + Ident@93..94 "x" + Colon@94..95 ":" + WhiteSpace@95..96 " " + LitExpr@96..97 + Int@96..97 "1" + Comma@97..98 "," + WhiteSpace@98..99 " " + RecordField@99..103 + Ident@99..100 "y" + Colon@100..101 ":" + WhiteSpace@101..102 " " + LitExpr@102..103 + Int@102..103 "2" + RBrace@103..104 "}" + RParen@104..105 ")" + WhiteSpace@105..106 " " + MatchArmList@106..120 + LBrace@106..107 "{" + Newline@107..108 "\n" + WhiteSpace@108..112 " " + MatchArm@112..118 + WildCardPat@112..113 + Underscore@112..113 "_" + WhiteSpace@113..114 " " + FatArrow@114..116 "=>" + WhiteSpace@116..117 " " + LitExpr@117..118 + Int@117..118 "1" + Newline@118..119 "\n" + RBrace@119..120 "}" + Newline@120..122 "\n\n" + MatchExpr@122..191 + MatchKw@122..127 "match" + WhiteSpace@127..128 " " + Path@128..129 + PathSegment@128..129 + Ident@128..129 "e" + WhiteSpace@129..130 " " + MatchArmList@130..191 + LBrace@130..131 "{" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + MatchArm@136..160 + PathTuplePat@136..151 + Path@136..145 + PathSegment@136..140 + Ident@136..140 "Enum" + Colon2@140..142 "::" + PathSegment@142..145 + Ident@142..145 "Add" + TuplePatElemList@145..151 + LParen@145..146 "(" + TuplePatElem@146..147 + PathPat@146..147 + Path@146..147 + PathSegment@146..147 + Ident@146..147 "x" + Comma@147..148 "," + WhiteSpace@148..149 " " + TuplePatElem@149..150 + PathPat@149..150 + Path@149..150 + PathSegment@149..150 + Ident@149..150 "y" + RParen@150..151 ")" + WhiteSpace@151..152 " " + FatArrow@152..154 "=>" + WhiteSpace@154..155 " " + BinExpr@155..160 + Path@155..156 + PathSegment@155..156 + Ident@155..156 "x" + WhiteSpace@156..157 " " + Plus@157..158 "+" + WhiteSpace@158..159 " " + Path@159..160 + PathSegment@159..160 + Ident@159..160 "y" + Newline@160..161 "\n" + WhiteSpace@161..165 " " + MatchArm@165..189 + PathTuplePat@165..180 + Path@165..174 + PathSegment@165..169 + Ident@165..169 "Enum" + Colon2@169..171 "::" + PathSegment@171..174 + Ident@171..174 "Sub" + TuplePatElemList@174..180 + LParen@174..175 "(" + TuplePatElem@175..176 + PathPat@175..176 + Path@175..176 + PathSegment@175..176 + Ident@175..176 "x" + Comma@176..177 "," + WhiteSpace@177..178 " " + TuplePatElem@178..179 + PathPat@178..179 + Path@178..179 + PathSegment@178..179 + Ident@178..179 "y" + RParen@179..180 ")" + WhiteSpace@180..181 " " + FatArrow@181..183 "=>" + WhiteSpace@183..184 " " + BinExpr@184..189 + Path@184..185 + PathSegment@184..185 + Ident@184..185 "x" + WhiteSpace@185..186 " " + Minus@186..187 "-" + WhiteSpace@187..188 " " + Path@188..189 + PathSegment@188..189 + Ident@188..189 "y" + Newline@189..190 "\n" + RBrace@190..191 "}" + Newline@191..193 "\n\n" + MatchExpr@193..313 + MatchKw@193..198 "match" + WhiteSpace@198..199 " " + Path@199..200 + PathSegment@199..200 + Ident@199..200 "e" + WhiteSpace@200..201 " " + MatchArmList@201..313 + LBrace@201..202 "{" + Newline@202..203 "\n" + WhiteSpace@203..207 " " + MatchArm@207..249 + PathTuplePat@207..222 + Path@207..216 + PathSegment@207..211 + Ident@207..211 "Enum" + Colon2@211..213 "::" + PathSegment@213..216 + Ident@213..216 "Add" + TuplePatElemList@216..222 + LParen@216..217 "(" + TuplePatElem@217..218 + PathPat@217..218 + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" + Comma@218..219 "," + WhiteSpace@219..220 " " + TuplePatElem@220..221 + PathPat@220..221 + Path@220..221 + PathSegment@220..221 + Ident@220..221 "y" + RParen@221..222 ")" + WhiteSpace@222..223 " " + FatArrow@223..225 "=>" + WhiteSpace@225..226 " " + BlockExpr@226..249 + LBrace@226..227 "{" + WhiteSpace@227..228 " " + Newline@228..229 "\n" + WhiteSpace@229..237 " " + ExprStmt@237..242 + BinExpr@237..242 + Path@237..238 + PathSegment@237..238 + Ident@237..238 "x" + WhiteSpace@238..239 " " + Plus@239..240 "+" + WhiteSpace@240..241 " " + Path@241..242 + PathSegment@241..242 + Ident@241..242 "y" + WhiteSpace@242..243 " " + Newline@243..244 "\n" + WhiteSpace@244..248 " " + RBrace@248..249 "}" + Newline@249..250 "\n" + WhiteSpace@250..254 " " + MatchArm@254..278 + PathTuplePat@254..269 + Path@254..263 + PathSegment@254..258 + Ident@254..258 "Enum" + Colon2@258..260 "::" + PathSegment@260..263 + Ident@260..263 "Sub" + TuplePatElemList@263..269 + LParen@263..264 "(" + TuplePatElem@264..265 + PathPat@264..265 + Path@264..265 + PathSegment@264..265 + Ident@264..265 "x" + Comma@265..266 "," + WhiteSpace@266..267 " " + TuplePatElem@267..268 + PathPat@267..268 + Path@267..268 + PathSegment@267..268 + Ident@267..268 "y" + RParen@268..269 ")" + WhiteSpace@269..270 " " + FatArrow@270..272 "=>" + WhiteSpace@272..273 " " + BinExpr@273..278 + Path@273..274 + PathSegment@273..274 + Ident@273..274 "x" + WhiteSpace@274..275 " " + Minus@275..276 "-" + WhiteSpace@276..277 " " + Path@277..278 + PathSegment@277..278 + Ident@277..278 "y" + Newline@278..279 "\n" + WhiteSpace@279..283 " " + MatchArm@283..311 + PathTuplePat@283..298 + Path@283..292 + PathSegment@283..287 + Ident@283..287 "Enum" + Colon2@287..289 "::" + PathSegment@289..292 + Ident@289..292 "Mul" + TuplePatElemList@292..298 + LParen@292..293 "(" + TuplePatElem@293..294 + PathPat@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "x" + Comma@294..295 "," + WhiteSpace@295..296 " " + TuplePatElem@296..297 + PathPat@296..297 + Path@296..297 + PathSegment@296..297 + Ident@296..297 "y" + RParen@297..298 ")" + WhiteSpace@298..299 " " + FatArrow@299..301 "=>" + WhiteSpace@301..302 " " + BlockExpr@302..311 + LBrace@302..303 "{" + WhiteSpace@303..304 " " + ExprStmt@304..309 + BinExpr@304..309 + Path@304..305 + PathSegment@304..305 + Ident@304..305 "x" + WhiteSpace@305..306 " " + Star@306..307 "*" + WhiteSpace@307..308 " " + Path@308..309 + PathSegment@308..309 + Ident@308..309 "y" + WhiteSpace@309..310 " " + RBrace@310..311 "}" + Newline@311..312 "\n" + RBrace@312..313 "}" + Newline@313..315 "\n\n" + MatchExpr@315..344 + MatchKw@315..320 "match" + WhiteSpace@320..321 " " + Path@321..322 + PathSegment@321..322 + Ident@321..322 "e" + WhiteSpace@322..323 " " + MatchArmList@323..344 + LBrace@323..324 "{" + WhiteSpace@324..325 " " + MatchArm@325..342 + PathTuplePat@325..337 + Path@325..334 + PathSegment@325..329 + Ident@325..329 "Enum" + Colon2@329..331 "::" + PathSegment@331..334 + Ident@331..334 "Var" + TuplePatElemList@334..337 + LParen@334..335 "(" + TuplePatElem@335..336 + PathPat@335..336 + Path@335..336 + PathSegment@335..336 + Ident@335..336 "s" + RParen@336..337 ")" + WhiteSpace@337..338 " " + FatArrow@338..340 "=>" + WhiteSpace@340..341 " " + Path@341..342 + PathSegment@341..342 + Ident@341..342 "s" + WhiteSpace@342..343 " " + RBrace@343..344 "}" + Newline@344..346 "\n\n" + MatchExpr@346..429 + MatchKw@346..351 "match" + WhiteSpace@351..352 " " + BlockExpr@352..400 + LBrace@352..353 "{" + Newline@353..354 "\n" + WhiteSpace@354..362 " " + LetStmt@362..371 + LetKw@362..365 "let" + WhiteSpace@365..366 " " + PathPat@366..367 + Path@366..367 + PathSegment@366..367 + Ident@366..367 "x" + WhiteSpace@367..368 " " + Eq@368..369 "=" + WhiteSpace@369..370 " " + LitExpr@370..371 + Int@370..371 "1" + Newline@371..372 "\n" + WhiteSpace@372..380 " " + ExprStmt@380..392 + CallExpr@380..392 + Path@380..389 + PathSegment@380..384 + Ident@380..384 "Enum" + Colon2@384..386 "::" + PathSegment@386..389 + Ident@386..389 "Var" + CallArgList@389..392 + LParen@389..390 "(" + CallArg@390..391 + Path@390..391 + PathSegment@390..391 + Ident@390..391 "x" + RParen@391..392 ")" + Newline@392..393 "\n" + WhiteSpace@393..399 " " + RBrace@399..400 "}" + WhiteSpace@400..401 " " + Newline@401..402 "\n" + MatchArmList@402..429 + LBrace@402..403 "{" + WhiteSpace@403..404 " " + Newline@404..405 "\n" + WhiteSpace@405..409 " " + MatchArm@409..426 + PathTuplePat@409..421 + Path@409..418 + PathSegment@409..413 + Ident@409..413 "Enum" + Colon2@413..415 "::" + PathSegment@415..418 + Ident@415..418 "Var" + TuplePatElemList@418..421 + LParen@418..419 "(" + TuplePatElem@419..420 + PathPat@419..420 + Path@419..420 + PathSegment@419..420 + Ident@419..420 "s" + RParen@420..421 ")" + WhiteSpace@421..422 " " + FatArrow@422..424 "=>" + WhiteSpace@424..425 " " + Path@425..426 + PathSegment@425..426 + Ident@425..426 "s" + WhiteSpace@426..427 " " + Newline@427..428 "\n" + RBrace@428..429 "}" + Newline@429..431 "\n\n" + MatchExpr@431..516 + MatchKw@431..436 "match" + WhiteSpace@436..437 " " + ParenExpr@437..463 + LParen@437..438 "(" + RecordInitExpr@438..462 + Path@438..439 + PathSegment@438..439 + Ident@438..439 "S" + WhiteSpace@439..440 " " + RecordFieldList@440..462 + LBrace@440..441 "{" + RecordField@441..455 + Ident@441..442 "x" + Colon@442..443 ":" + WhiteSpace@443..444 " " + CallExpr@444..455 + Path@444..452 + PathSegment@444..447 + Ident@444..447 "Foo" + Colon2@447..449 "::" + PathSegment@449..452 + Ident@449..452 "Bar" + CallArgList@452..455 + LParen@452..453 "(" + CallArg@453..454 + Path@453..454 + PathSegment@453..454 + Ident@453..454 "x" + RParen@454..455 ")" + Comma@455..456 "," + WhiteSpace@456..457 " " + RecordField@457..461 + Ident@457..458 "y" + Colon@458..459 ":" + WhiteSpace@459..460 " " + LitExpr@460..461 + Int@460..461 "2" + RBrace@461..462 "}" + RParen@462..463 ")" + WhiteSpace@463..464 " " + MatchArmList@464..516 + LBrace@464..465 "{" + Newline@465..466 "\n" + WhiteSpace@466..470 " " + MatchArm@470..499 + RecordPat@470..491 + Path@470..471 + PathSegment@470..471 + Ident@470..471 "S" + WhiteSpace@471..472 " " + RecordPatFieldList@472..491 + LBrace@472..473 "{" + RecordPatField@473..487 + Ident@473..474 "x" + Colon@474..475 ":" + WhiteSpace@475..476 " " + PathTuplePat@476..487 + Path@476..484 + PathSegment@476..479 + Ident@476..479 "Boo" + Colon2@479..481 "::" + PathSegment@481..484 + Ident@481..484 "Bar" + TuplePatElemList@484..487 + LParen@484..485 "(" + TuplePatElem@485..486 + PathPat@485..486 + Path@485..486 + PathSegment@485..486 + Ident@485..486 "x" + RParen@486..487 ")" + Comma@487..488 "," + WhiteSpace@488..489 " " + RecordPatField@489..490 + PathPat@489..490 + Path@489..490 + PathSegment@489..490 + Ident@489..490 "y" + RBrace@490..491 "}" + WhiteSpace@491..492 " " + FatArrow@492..494 "=>" + WhiteSpace@494..495 " " + LitExpr@495..499 + TrueKw@495..499 "true" + Newline@499..500 "\n" + WhiteSpace@500..504 " " + MatchArm@504..514 + WildCardPat@504..505 + Underscore@504..505 "_" + WhiteSpace@505..506 " " + FatArrow@506..508 "=>" + WhiteSpace@508..509 " " + LitExpr@509..514 + FalseKw@509..514 "false" + Newline@514..515 "\n" + RBrace@515..516 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/method.fe b/crates/parser2/test_files/syntax_node/exprs/method.fe new file mode 100644 index 0000000000..36684ffa42 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/method.fe @@ -0,0 +1,7 @@ +x.y() +x.y(1, 2) + +x.y.z(x: 1, y: 2) +x[0].z(x: 1) + +x.y(x: 1, y) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap new file mode 100644 index 0000000000..0f7f7118ac --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -0,0 +1,127 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..75 + MethodCallExpr@0..5 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + Dot@1..2 "." + Ident@2..3 "y" + CallArgList@3..5 + LParen@3..4 "(" + RParen@4..5 ")" + Newline@5..6 "\n" + MethodCallExpr@6..15 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "x" + Dot@7..8 "." + Ident@8..9 "y" + CallArgList@9..15 + LParen@9..10 "(" + CallArg@10..11 + LitExpr@10..11 + Int@10..11 "1" + Comma@11..12 "," + WhiteSpace@12..13 " " + CallArg@13..14 + LitExpr@13..14 + Int@13..14 "2" + RParen@14..15 ")" + Newline@15..17 "\n\n" + MethodCallExpr@17..34 + FieldExpr@17..20 + Path@17..18 + PathSegment@17..18 + Ident@17..18 "x" + Dot@18..19 "." + Ident@19..20 "y" + Dot@20..21 "." + Ident@21..22 "z" + CallArgList@22..34 + LParen@22..23 "(" + CallArg@23..27 + Ident@23..24 "x" + Colon@24..25 ":" + WhiteSpace@25..26 " " + LitExpr@26..27 + Int@26..27 "1" + Comma@27..28 "," + WhiteSpace@28..29 " " + CallArg@29..33 + Ident@29..30 "y" + Colon@30..31 ":" + WhiteSpace@31..32 " " + LitExpr@32..33 + Int@32..33 "2" + RParen@33..34 ")" + Newline@34..35 "\n" + MethodCallExpr@35..47 + IndexExpr@35..39 + Path@35..36 + PathSegment@35..36 + Ident@35..36 "x" + LBracket@36..37 "[" + LitExpr@37..38 + Int@37..38 "0" + RBracket@38..39 "]" + Dot@39..40 "." + Ident@40..41 "z" + CallArgList@41..47 + LParen@41..42 "(" + CallArg@42..46 + Ident@42..43 "x" + Colon@43..44 ":" + WhiteSpace@44..45 " " + LitExpr@45..46 + Int@45..46 "1" + RParen@46..47 ")" + Newline@47..49 "\n\n" + MethodCallExpr@49..75 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "x" + Dot@50..51 "." + Ident@51..52 "y" + GenericParamList@52..66 + Lt@52..53 "<" + GenericParam@53..56 + Path@53..56 + PathSegment@53..56 + Ident@53..56 "i32" + Comma@56..57 "," + WhiteSpace@57..58 " " + GenericParam@58..65 + BlockExpr@58..65 + LBrace@58..59 "{" + ExprStmt@59..64 + BinExpr@59..64 + Path@59..60 + PathSegment@59..60 + Ident@59..60 "x" + WhiteSpace@60..61 " " + Plus@61..62 "+" + WhiteSpace@62..63 " " + Path@63..64 + PathSegment@63..64 + Ident@63..64 "y" + RBrace@64..65 "}" + Gt@65..66 ">" + CallArgList@66..75 + LParen@66..67 "(" + CallArg@67..71 + Ident@67..68 "x" + Colon@68..69 ":" + WhiteSpace@69..70 " " + LitExpr@70..71 + Int@70..71 "1" + Comma@71..72 "," + WhiteSpace@72..73 " " + CallArg@73..74 + Path@73..74 + PathSegment@73..74 + Ident@73..74 "y" + RParen@74..75 ")" + diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index c55b13016a..a6d3e437ed 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,7 +1,8 @@ use fe_parser2::{ lexer, - parser::{item::ItemListScope, parse_pat, Parser, RootScope}, + parser::{expr::parse_expr, item::ItemListScope, parse_pat, Parser, RootScope}, syntax_node::SyntaxNode, + SyntaxKind, }; fn test_item_list(input: &str) -> SyntaxNode { @@ -32,6 +33,30 @@ fe_compiler_test_utils::build_debug_snap_tests! { test_pat } +fn test_expr(input: &str) -> SyntaxNode { + let runner = TestRunner::new(|parser| { + parser.set_newline_as_trivia(false); + + fn bump_newlines(parser: &mut Parser) { + while parser.current_kind() == Some(SyntaxKind::Newline) { + parser.bump(); + } + } + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_expr(parser); + bump_newlines(parser); + } + }); + runner.run(input) +} +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/syntax_node/exprs", + "parser2/test_files/syntax_node/exprs", + test_expr +} + struct TestRunner where F: Fn(&mut Parser), From 1fed224fc309643b6037713f1d7791f24c6cc3f2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 23 Jan 2023 21:26:25 +0100 Subject: [PATCH 029/678] Add tests for `stmt` --- crates/parser2/src/parser/mod.rs | 2 +- crates/parser2/src/parser/stmt.rs | 25 +- crates/parser2/src/syntax_kind.rs | 10 +- .../test_files/syntax_node/stmts/for_.fe | 7 + .../test_files/syntax_node/stmts/for_.snap | 112 +++++++ .../test_files/syntax_node/stmts/let_.fe | 23 ++ .../test_files/syntax_node/stmts/let_.snap | 284 ++++++++++++++++++ .../test_files/syntax_node/stmts/while_.fe | 4 + .../test_files/syntax_node/stmts/while_.snap | 60 ++++ crates/parser2/tests/syntax_node.rs | 35 ++- 10 files changed, 543 insertions(+), 19 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/stmts/for_.fe create mode 100644 crates/parser2/test_files/syntax_node/stmts/for_.snap create mode 100644 crates/parser2/test_files/syntax_node/stmts/let_.fe create mode 100644 crates/parser2/test_files/syntax_node/stmts/let_.snap create mode 100644 crates/parser2/test_files/syntax_node/stmts/while_.fe create mode 100644 crates/parser2/test_files/syntax_node/stmts/while_.snap diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 93539ac40b..354a7d586a 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -288,7 +288,7 @@ impl Parser { } while let Some(kind) = self.current_kind() { - if recovery_set.contains(&kind) { + if recovery_set.contains(&kind) | self.auxiliary_recovery_set.contains(&kind) { break; } else { self.bump(); diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 7c0caef90d..a5b6d1148b 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -9,10 +9,7 @@ use super::{ Checkpoint, Parser, }; -pub(super) fn parse_stmt( - parser: &mut Parser, - checkpoint: Option, -) -> bool { +pub fn parse_stmt(parser: &mut Parser, checkpoint: Option) -> bool { use SyntaxKind::*; match parser.current_kind() { @@ -42,6 +39,7 @@ impl super::Parse for LetStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LetKw); parser.set_newline_as_trivia(false); + parser.bump_if(SyntaxKind::MutKw); if !parse_pat(parser) { parser.error_and_recover("expected pattern", None); return; @@ -149,8 +147,17 @@ impl super::Parse for AssignStmtScope { parser.remove_recovery_token(SyntaxKind::Eq); parser.set_newline_as_trivia(false); + if parser + .current_kind() + .map(|kind| is_aug_assign_kind(kind)) + .unwrap_or_default() + { + parser.bump(); + self.set_kind(SyntaxKind::AugAssignStmt); + } + if !parser.bump_if(SyntaxKind::Eq) { - parser.error_and_recover("expected `=` keyword", None); + parser.error_and_recover("expected `=`", None); return; } @@ -164,3 +171,11 @@ impl super::Parse for ExprStmtScope { parse_expr(parser); } } + +fn is_aug_assign_kind(kind: SyntaxKind) -> bool { + use SyntaxKind::*; + matches!( + kind, + Pipe | Hat | Amp | Lt2 | Gt2 | Plus | Minus | Star | Slash | Percent | Star2 + ) +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index f11e76e49a..ce7d620f81 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -124,9 +124,6 @@ pub enum SyntaxKind { /// `<=` #[token("<=")] LtEq, - /// `<<=` - #[token("<<=")] - Lt2Eq, /// `>` #[token(">")] Gt, @@ -136,9 +133,6 @@ pub enum SyntaxKind { /// `>=` #[token(">=")] GtEq, - /// `>>=` - #[token(">>=")] - Gt2Eq, /// `=` #[token("=")] Eq, @@ -282,8 +276,10 @@ pub enum SyntaxKind { // Statements. These are non-leaf nodes. /// `let x = 1` LetStmt, - /// `return 1` + /// `x = 1` AssignStmt, + /// `x += 1` + AugAssignStmt, /// `for x in y {..}` ForStmt, /// `while expr {..}` diff --git a/crates/parser2/test_files/syntax_node/stmts/for_.fe b/crates/parser2/test_files/syntax_node/stmts/for_.fe new file mode 100644 index 0000000000..3804fce951 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/for_.fe @@ -0,0 +1,7 @@ +for i in arr { + sum = sum + i +} + +for Struct {x, y} in s_list.iter() { + sum = sum + x + y +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/for_.snap b/crates/parser2/test_files/syntax_node/stmts/for_.snap new file mode 100644 index 0000000000..e411c28968 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/for_.snap @@ -0,0 +1,112 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..96 + ForStmt@0..34 + ForKw@0..3 "for" + WhiteSpace@3..4 " " + PathPat@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "i" + WhiteSpace@5..6 " " + InKw@6..8 "in" + WhiteSpace@8..9 " " + Path@9..12 + PathSegment@9..12 + Ident@9..12 "arr" + WhiteSpace@12..13 " " + BlockExpr@13..34 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + AssignStmt@19..32 + PathPat@19..22 + Path@19..22 + PathSegment@19..22 + Ident@19..22 "sum" + WhiteSpace@22..23 " " + Eq@23..24 "=" + WhiteSpace@24..25 " " + BinExpr@25..32 + Path@25..28 + PathSegment@25..28 + Ident@25..28 "sum" + WhiteSpace@28..29 " " + Plus@29..30 "+" + WhiteSpace@30..31 " " + Path@31..32 + PathSegment@31..32 + Ident@31..32 "i" + Newline@32..33 "\n" + RBrace@33..34 "}" + Newline@34..36 "\n\n" + ForStmt@36..96 + ForKw@36..39 "for" + WhiteSpace@39..40 " " + RecordPat@40..53 + Path@40..46 + PathSegment@40..46 + Ident@40..46 "Struct" + WhiteSpace@46..47 " " + RecordPatFieldList@47..53 + LBrace@47..48 "{" + RecordPatField@48..49 + PathPat@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "x" + Comma@49..50 "," + WhiteSpace@50..51 " " + RecordPatField@51..52 + PathPat@51..52 + Path@51..52 + PathSegment@51..52 + Ident@51..52 "y" + RBrace@52..53 "}" + WhiteSpace@53..54 " " + InKw@54..56 "in" + WhiteSpace@56..57 " " + MethodCallExpr@57..70 + Path@57..63 + PathSegment@57..63 + Ident@57..63 "s_list" + Dot@63..64 "." + Ident@64..68 "iter" + CallArgList@68..70 + LParen@68..69 "(" + RParen@69..70 ")" + WhiteSpace@70..71 " " + BlockExpr@71..96 + LBrace@71..72 "{" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + AssignStmt@77..94 + PathPat@77..80 + Path@77..80 + PathSegment@77..80 + Ident@77..80 "sum" + WhiteSpace@80..81 " " + Eq@81..82 "=" + WhiteSpace@82..83 " " + BinExpr@83..94 + BinExpr@83..90 + Path@83..86 + PathSegment@83..86 + Ident@83..86 "sum" + WhiteSpace@86..87 " " + Plus@87..88 "+" + WhiteSpace@88..89 " " + Path@89..90 + PathSegment@89..90 + Ident@89..90 "x" + WhiteSpace@90..91 " " + Plus@91..92 "+" + WhiteSpace@92..93 " " + Path@93..94 + PathSegment@93..94 + Ident@93..94 "y" + Newline@94..95 "\n" + RBrace@95..96 "}" + diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.fe b/crates/parser2/test_files/syntax_node/stmts/let_.fe new file mode 100644 index 0000000000..4582f9b69c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/let_.fe @@ -0,0 +1,23 @@ +let x + +let x = 1 +let x: i32 = 1 +let mut x: i32 = 1 + +x += 1 + 1 +y <<= 1 >> 2 + +let MyEnum::Foo(x, y) = e + +let S {x, y: z} = s + +let x = if b { + y +} else { + z +} + +let x = match b { + MyEnum::A(x) | MyEnum::B(x) => x + _ => 0 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.snap b/crates/parser2/test_files/syntax_node/stmts/let_.snap new file mode 100644 index 0000000000..0c68276ab8 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/let_.snap @@ -0,0 +1,284 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..231 + LetStmt@0..5 + LetKw@0..3 "let" + WhiteSpace@3..4 " " + PathPat@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "x" + Newline@5..7 "\n\n" + LetStmt@7..16 + LetKw@7..10 "let" + WhiteSpace@10..11 " " + PathPat@11..12 + Path@11..12 + PathSegment@11..12 + Ident@11..12 "x" + WhiteSpace@12..13 " " + Eq@13..14 "=" + WhiteSpace@14..15 " " + LitExpr@15..16 + Int@15..16 "1" + Newline@16..17 "\n" + LetStmt@17..31 + LetKw@17..20 "let" + WhiteSpace@20..21 " " + PathPat@21..22 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "x" + Colon@22..23 ":" + WhiteSpace@23..24 " " + PathType@24..27 + Path@24..27 + PathSegment@24..27 + Ident@24..27 "i32" + WhiteSpace@27..28 " " + Eq@28..29 "=" + WhiteSpace@29..30 " " + LitExpr@30..31 + Int@30..31 "1" + Newline@31..32 "\n" + LetStmt@32..50 + LetKw@32..35 "let" + WhiteSpace@35..36 " " + MutKw@36..39 "mut" + WhiteSpace@39..40 " " + PathPat@40..41 + Path@40..41 + PathSegment@40..41 + Ident@40..41 "x" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "i32" + WhiteSpace@46..47 " " + Eq@47..48 "=" + WhiteSpace@48..49 " " + LitExpr@49..50 + Int@49..50 "1" + Newline@50..52 "\n\n" + AugAssignStmt@52..62 + PathPat@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "x" + WhiteSpace@53..54 " " + Plus@54..55 "+" + Eq@55..56 "=" + WhiteSpace@56..57 " " + BinExpr@57..62 + LitExpr@57..58 + Int@57..58 "1" + WhiteSpace@58..59 " " + Plus@59..60 "+" + WhiteSpace@60..61 " " + LitExpr@61..62 + Int@61..62 "1" + Newline@62..63 "\n" + AugAssignStmt@63..75 + PathPat@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "y" + WhiteSpace@64..65 " " + Lt2@65..67 "<<" + Eq@67..68 "=" + WhiteSpace@68..69 " " + BinExpr@69..75 + LitExpr@69..70 + Int@69..70 "1" + WhiteSpace@70..71 " " + Gt2@71..73 ">>" + WhiteSpace@73..74 " " + LitExpr@74..75 + Int@74..75 "2" + Newline@75..77 "\n\n" + LetStmt@77..102 + LetKw@77..80 "let" + WhiteSpace@80..81 " " + PathTuplePat@81..98 + Path@81..92 + PathSegment@81..87 + Ident@81..87 "MyEnum" + Colon2@87..89 "::" + PathSegment@89..92 + Ident@89..92 "Foo" + TuplePatElemList@92..98 + LParen@92..93 "(" + TuplePatElem@93..94 + PathPat@93..94 + Path@93..94 + PathSegment@93..94 + Ident@93..94 "x" + Comma@94..95 "," + WhiteSpace@95..96 " " + TuplePatElem@96..97 + PathPat@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "y" + RParen@97..98 ")" + WhiteSpace@98..99 " " + Eq@99..100 "=" + WhiteSpace@100..101 " " + Path@101..102 + PathSegment@101..102 + Ident@101..102 "e" + Newline@102..104 "\n\n" + LetStmt@104..123 + LetKw@104..107 "let" + WhiteSpace@107..108 " " + RecordPat@108..119 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "S" + WhiteSpace@109..110 " " + RecordPatFieldList@110..119 + LBrace@110..111 "{" + RecordPatField@111..112 + PathPat@111..112 + Path@111..112 + PathSegment@111..112 + Ident@111..112 "x" + Comma@112..113 "," + WhiteSpace@113..114 " " + RecordPatField@114..118 + Ident@114..115 "y" + Colon@115..116 ":" + WhiteSpace@116..117 " " + PathPat@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "z" + RBrace@118..119 "}" + WhiteSpace@119..120 " " + Eq@120..121 "=" + WhiteSpace@121..122 " " + Path@122..123 + PathSegment@122..123 + Ident@122..123 "s" + Newline@123..125 "\n\n" + LetStmt@125..152 + LetKw@125..128 "let" + WhiteSpace@128..129 " " + PathPat@129..130 + Path@129..130 + PathSegment@129..130 + Ident@129..130 "x" + WhiteSpace@130..131 " " + Eq@131..132 "=" + WhiteSpace@132..133 " " + IfExpr@133..152 + IfKw@133..135 "if" + WhiteSpace@135..136 " " + Path@136..137 + PathSegment@136..137 + Ident@136..137 "b" + WhiteSpace@137..138 " " + BlockExpr@138..147 + LBrace@138..139 "{" + Newline@139..140 "\n" + WhiteSpace@140..144 " " + ExprStmt@144..145 + Path@144..145 + PathSegment@144..145 + Ident@144..145 "y" + Newline@145..146 "\n" + RBrace@146..147 "}" + WhiteSpace@147..148 " " + ElseKw@148..152 "else" + WhiteSpace@152..153 " " + ExprStmt@153..162 + BlockExpr@153..162 + LBrace@153..154 "{" + Newline@154..155 "\n" + WhiteSpace@155..159 " " + ExprStmt@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "z" + Newline@160..161 "\n" + RBrace@161..162 "}" + Newline@162..164 "\n\n" + LetStmt@164..231 + LetKw@164..167 "let" + WhiteSpace@167..168 " " + PathPat@168..169 + Path@168..169 + PathSegment@168..169 + Ident@168..169 "x" + WhiteSpace@169..170 " " + Eq@170..171 "=" + WhiteSpace@171..172 " " + MatchExpr@172..231 + MatchKw@172..177 "match" + WhiteSpace@177..178 " " + Path@178..179 + PathSegment@178..179 + Ident@178..179 "b" + WhiteSpace@179..180 " " + MatchArmList@180..231 + LBrace@180..181 "{" + Newline@181..182 "\n" + WhiteSpace@182..186 " " + MatchArm@186..218 + OrPat@186..213 + PathTuplePat@186..198 + Path@186..195 + PathSegment@186..192 + Ident@186..192 "MyEnum" + Colon2@192..194 "::" + PathSegment@194..195 + Ident@194..195 "A" + TuplePatElemList@195..198 + LParen@195..196 "(" + TuplePatElem@196..197 + PathPat@196..197 + Path@196..197 + PathSegment@196..197 + Ident@196..197 "x" + RParen@197..198 ")" + WhiteSpace@198..199 " " + Pipe@199..200 "|" + WhiteSpace@200..201 " " + PathTuplePat@201..213 + Path@201..210 + PathSegment@201..207 + Ident@201..207 "MyEnum" + Colon2@207..209 "::" + PathSegment@209..210 + Ident@209..210 "B" + TuplePatElemList@210..213 + LParen@210..211 "(" + TuplePatElem@211..212 + PathPat@211..212 + Path@211..212 + PathSegment@211..212 + Ident@211..212 "x" + RParen@212..213 ")" + WhiteSpace@213..214 " " + FatArrow@214..216 "=>" + WhiteSpace@216..217 " " + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" + Newline@218..219 "\n" + WhiteSpace@219..223 " " + MatchArm@223..229 + WildCardPat@223..224 + Underscore@223..224 "_" + WhiteSpace@224..225 " " + FatArrow@225..227 "=>" + WhiteSpace@227..228 " " + LitExpr@228..229 + Int@228..229 "0" + Newline@229..230 "\n" + RBrace@230..231 "}" + diff --git a/crates/parser2/test_files/syntax_node/stmts/while_.fe b/crates/parser2/test_files/syntax_node/stmts/while_.fe new file mode 100644 index 0000000000..6af89ff648 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/while_.fe @@ -0,0 +1,4 @@ +while i < 10 { + sum = 1 + 2 + i = i + 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/while_.snap b/crates/parser2/test_files/syntax_node/stmts/while_.snap new file mode 100644 index 0000000000..70c6c59ff3 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/while_.snap @@ -0,0 +1,60 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..46 + WhileStmt@0..46 + WhileKw@0..5 "while" + WhiteSpace@5..6 " " + BinExpr@6..12 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "i" + WhiteSpace@7..8 " " + Lt@8..9 "<" + WhiteSpace@9..10 " " + LitExpr@10..12 + Int@10..12 "10" + WhiteSpace@12..13 " " + BlockExpr@13..46 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + AssignStmt@19..30 + PathPat@19..22 + Path@19..22 + PathSegment@19..22 + Ident@19..22 "sum" + WhiteSpace@22..23 " " + Eq@23..24 "=" + WhiteSpace@24..25 " " + BinExpr@25..30 + LitExpr@25..26 + Int@25..26 "1" + WhiteSpace@26..27 " " + Plus@27..28 "+" + WhiteSpace@28..29 " " + LitExpr@29..30 + Int@29..30 "2" + Newline@30..31 "\n" + WhiteSpace@31..35 " " + AssignStmt@35..44 + PathPat@35..36 + Path@35..36 + PathSegment@35..36 + Ident@35..36 "i" + WhiteSpace@36..37 " " + Eq@37..38 "=" + WhiteSpace@38..39 " " + BinExpr@39..44 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "i" + WhiteSpace@40..41 " " + Plus@41..42 "+" + WhiteSpace@42..43 " " + LitExpr@43..44 + Int@43..44 "1" + Newline@44..45 "\n" + RBrace@45..46 "}" + diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index a6d3e437ed..fb838522ca 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,6 +1,8 @@ use fe_parser2::{ lexer, - parser::{expr::parse_expr, item::ItemListScope, parse_pat, Parser, RootScope}, + parser::{ + expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt, Parser, RootScope, + }, syntax_node::SyntaxNode, SyntaxKind, }; @@ -37,11 +39,6 @@ fn test_expr(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { parser.set_newline_as_trivia(false); - fn bump_newlines(parser: &mut Parser) { - while parser.current_kind() == Some(SyntaxKind::Newline) { - parser.bump(); - } - } bump_newlines(parser); while parser.current_kind().is_some() { bump_newlines(parser); @@ -51,12 +48,32 @@ fn test_expr(input: &str) -> SyntaxNode { }); runner.run(input) } + fe_compiler_test_utils::build_debug_snap_tests! { "parser2/test_files/syntax_node/exprs", "parser2/test_files/syntax_node/exprs", test_expr } +fn test_stmt(input: &str) -> SyntaxNode { + let runner = TestRunner::new(|parser| { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_stmt(parser, None); + bump_newlines(parser); + } + }); + runner.run(input) +} +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/syntax_node/stmts", + "parser2/test_files/syntax_node/stmts", + test_stmt +} + struct TestRunner where F: Fn(&mut Parser), @@ -91,3 +108,9 @@ where cst } } + +fn bump_newlines(parser: &mut Parser) { + while parser.current_kind() == Some(SyntaxKind::Newline) { + parser.bump(); + } +} From 31de6b29bac5efbd503e1108a03e24b31b7c7143 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 15:28:32 +0100 Subject: [PATCH 030/678] Allow to specify visibility and attributes in `define_scope` macro --- crates/parser2/src/parser/expr.rs | 2 ++ crates/parser2/src/parser/expr_atom.rs | 4 +-- crates/parser2/src/parser/func.rs | 2 +- crates/parser2/src/parser/item.rs | 34 +++++++++++++++++++++++--- crates/parser2/src/parser/mod.rs | 30 +++++++++++++++++------ crates/parser2/src/parser/param.rs | 6 ++--- crates/parser2/src/parser/path.rs | 3 ++- crates/parser2/src/parser/struct_.rs | 2 +- crates/parser2/src/syntax_kind.rs | 9 ++++++- 9 files changed, 71 insertions(+), 21 deletions(-) diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 13824f2096..75c26cad76 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -7,10 +7,12 @@ use super::{ Checkpoint, Parser, }; +/// Parses expression. pub fn parse_expr(parser: &mut Parser) -> bool { parse_expr_with_min_bp(parser, 0, true) } +/// Parses expression except for `struct` initialization expression. pub fn parse_expr_no_struct(parser: &mut Parser) -> bool { parse_expr_with_min_bp(parser, 0, false) } diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 602b282d13..7cbeb80427 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -41,7 +41,7 @@ pub(super) fn parse_expr_atom( } define_scope! { - BlockExprScope, + pub(crate) BlockExprScope, BlockExpr, Override( RBrace, @@ -176,7 +176,7 @@ impl super::Parse for MatchArmScope { } } -define_scope! { LitExprScope, LitExpr, Inheritance } +define_scope! { pub(crate) LitExprScope, LitExpr, Inheritance } impl super::Parse for LitExprScope { fn parse(&mut self, parser: &mut Parser) { match parser.current_kind() { diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index deb926e81e..d16d0488c8 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -1,7 +1,7 @@ use super::{define_scope, token_stream::TokenStream, Parser}; define_scope! { - FnScope, + pub(crate) FnScope, Fn, Inheritance } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 24f4a031e4..e18662542d 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -2,10 +2,13 @@ use std::cell::Cell; use crate::SyntaxKind; -use super::{attr, define_scope, token_stream::TokenStream, Parser}; +use super::{ + attr, define_scope, expr::parse_expr, token_stream::TokenStream, type_::parse_type, Parser, +}; define_scope! { - ItemListScope, + #[doc(hidden)] + pub ItemListScope, ItemList, Override( FnKw, @@ -152,8 +155,31 @@ impl super::Parse for UseScope { define_scope! { ConstScope, Const, Inheritance } impl super::Parse for ConstScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ConstKw); + + parser.set_newline_as_trivia(true); + + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier", None); + return; + } + + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected type annotation for `const`", None); + return; + } + + if !parse_type(parser, None) { + return; + } + + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `=` for const value definition", None); + return; + } + + parse_expr(parser); } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 354a7d586a..b797238677 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -382,7 +382,6 @@ impl Parser { } } -/// The current scope of parsing. pub trait ParsingScope { /// Returns the recovery method of the current scope. fn recovery_method(&self) -> &RecoveryMethod; @@ -445,14 +444,19 @@ define_scope! { } define_scope! { - RootScope, + pub RootScope, Root, Override() } macro_rules! define_scope { - ($scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Inheritance $(($($recoveries: path), *))?) => { - crate::parser::define_scope_struct! {$scope_name {$($($field: $ty), *)?}, $kind} + ( + $(#[$attrs: expr])* + $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, + $kind: path, + Inheritance $(($($recoveries: path), *))? + ) => { + crate::parser::define_scope_struct! {$visibility $scope_name {$($($field: $ty), *)?}, $kind} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { lazy_static::lazy_static! { @@ -472,8 +476,13 @@ macro_rules! define_scope { } }; - ($scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Override($($recoveries: path), *)) => { - crate::parser::define_scope_struct! {$scope_name {$($($field: $ty), *)?}, $kind} + ( + $(#[$attrs: expr])* + $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, + $kind: path, + Override($($recoveries: path), *) + ) => { + crate::parser::define_scope_struct! {$visibility $scope_name {$($($field: $ty), *)?}, $kind} impl crate::parser::ParsingScope for $scope_name { fn recovery_method(&self) -> &crate::parser::RecoveryMethod { @@ -496,9 +505,14 @@ macro_rules! define_scope { } macro_rules! define_scope_struct { - ($scope_name: ident { $($field: ident: $ty: ty),* } , $kind: path) => { + ( + $(#[$attrs: expr])* + $visibility: vis $scope_name: ident { $($field: ident: $ty: ty),* }, + $kind: path + ) => { + $(#[$attrs])* #[derive(Debug, Clone)] - pub struct $scope_name { + $visibility struct $scope_name { __inner: std::rc::Rc>, $($field: $ty),* } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index b64202e7b9..b431280566 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -10,7 +10,7 @@ use super::{ Parser, }; define_scope! { - GenericParamListScope, + pub(crate) GenericParamListScope, GenericParamList, Override(Gt) } @@ -80,7 +80,7 @@ impl super::Parse for TraitBoundScope { } define_scope! { - GenericArgListScope, + pub(crate) GenericArgListScope, GenericParamList, Override(Gt, Comma) } @@ -127,7 +127,7 @@ impl super::Parse for GenericArgScope { } } -define_scope! { CallArgListScope, CallArgList, Override(RParen, Comma) } +define_scope! { pub(crate) CallArgListScope, CallArgList, Override(RParen, Comma) } impl super::Parse for CallArgListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index d470aab226..075d646ef9 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -3,7 +3,8 @@ use crate::SyntaxKind; use super::{define_scope, token_stream::TokenStream, Parser}; define_scope! { - PathScope, + #[doc(hidden)] + pub PathScope, Path, Inheritance } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 6f5867f5cf..3bd41f059d 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -6,7 +6,7 @@ use super::{ }; define_scope! { - StructScope, + pub(crate) StructScope, Struct, Inheritance } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ce7d620f81..9b9ac5627f 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -143,6 +143,9 @@ pub enum SyntaxKind { #[token("!=")] NotEq, + /// `as`' + #[token("as")] + AsKw, /// `true' #[token("true")] TrueKw, @@ -346,8 +349,12 @@ pub enum SyntaxKind { TraitImpl, /// `const FOO: i32 = 1` Const, - /// `use foo::bar` + /// `use foo::{Foo, bar::Baz}` Use, + /// `foo::{Foo, bar::Bar` + UseTree, + /// `{foo::bar, ` + UseTreeList, /// `extern { .. }` Extern, ItemList, From 83ab580df783f4437f4702d01ea4024a9574fd31 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 18:54:50 +0100 Subject: [PATCH 031/678] Implement parser for `use` --- crates/parser2/src/parser/item.rs | 14 +- crates/parser2/src/parser/mod.rs | 1 + crates/parser2/src/parser/path.rs | 2 +- crates/parser2/src/parser/stmt.rs | 2 +- crates/parser2/src/parser/use_tree.rs | 132 +++++++++ crates/parser2/src/syntax_kind.rs | 16 +- .../test_files/syntax_node/items/use_.fe | 13 + .../test_files/syntax_node/items/use_.snap | 267 ++++++++++++++++++ crates/parser2/tests/syntax_node.rs | 38 +-- 9 files changed, 459 insertions(+), 26 deletions(-) create mode 100644 crates/parser2/src/parser/use_tree.rs create mode 100644 crates/parser2/test_files/syntax_node/items/use_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/use_.snap diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index e18662542d..8637dd66f9 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -3,7 +3,8 @@ use std::cell::Cell; use crate::SyntaxKind; use super::{ - attr, define_scope, expr::parse_expr, token_stream::TokenStream, type_::parse_type, Parser, + attr, define_scope, expr::parse_expr, token_stream::TokenStream, type_::parse_type, + use_tree::UseTreeScope, Parser, }; define_scope! { @@ -31,6 +32,7 @@ impl super::Parse for ItemListScope { use crate::SyntaxKind::*; loop { + parser.set_newline_as_trivia(true); if parser.current_kind().is_none() { break; } @@ -78,6 +80,11 @@ impl super::Parse for ItemListScope { tok => parser .error_and_recover(&format! {"expected item: but got {:?}", tok}, checkpoint), } + + parser.set_newline_as_trivia(false); + if parser.current_kind().is_some() && !parser.bump_if(SyntaxKind::Newline) { + parser.error_and_recover("expected newline after item definition", checkpoint) + } } } } @@ -148,8 +155,9 @@ impl super::Parse for ImplScope { define_scope! { UseScope, Use, Inheritance } impl super::Parse for UseScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::UseKw); + parser.parse(UseTreeScope::default(), None); } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index b797238677..e7a66549eb 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -22,6 +22,7 @@ pub mod path; pub mod stmt; pub mod struct_; pub mod type_; +pub mod use_tree; mod expr_atom; diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 075d646ef9..3d6361e9d7 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -6,7 +6,7 @@ define_scope! { #[doc(hidden)] pub PathScope, Path, - Inheritance + Inheritance(Colon2) } impl super::Parse for PathScope { fn parse(&mut self, parser: &mut Parser) { diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index a5b6d1148b..c152631903 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -149,7 +149,7 @@ impl super::Parse for AssignStmtScope { parser.set_newline_as_trivia(false); if parser .current_kind() - .map(|kind| is_aug_assign_kind(kind)) + .map(is_aug_assign_kind) .unwrap_or_default() { parser.bump(); diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs new file mode 100644 index 0000000000..4b7f3b0a82 --- /dev/null +++ b/crates/parser2/src/parser/use_tree.rs @@ -0,0 +1,132 @@ +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + pub(crate) UseTreeScope, + UseTree, + Inheritance +} +impl super::Parse for UseTreeScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + parser.parse(UseTreeListScope::default(), None); + return; + } + Some(SyntaxKind::Star) => { + parser.bump(); + return; + } + _ => {} + } + + parser.parse(UsePathScope::default(), None); + + if !parser.bump_if(SyntaxKind::Colon2) { + if parser.current_kind() == Some(SyntaxKind::AsKw) { + parser.parse(UseTreeRenameScope::default(), None); + } + return; + } + + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + parser.parse(UseTreeListScope::default(), None); + } + Some(SyntaxKind::Star) => { + parser.bump(); + } + _ => { + parser.error_and_recover("expected identifier or `self`", None); + } + }; + } +} + +define_scope! { + UseTreeListScope, + UseTreeList, + Override(Comma, RBrace) +} +impl super::Parse for UseTreeListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + parser.parse(UseTreeScope::default(), None); + + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(UseTreeScope::default(), None); + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover("expected `}`", None); + } + } +} + +define_scope! { + UsePathScope, + UsePath, + Inheritance(Colon2) +} +impl super::Parse for UsePathScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.parse(UsePathSegmentScope::default(), None); + + loop { + parser.start_dry_run(); + let is_path_segment = parser.bump_if(SyntaxKind::Colon2) + && parser.parse(UsePathSegmentScope::default(), None).0; + parser.end_dry_run(); + if is_path_segment { + parser.bump_expected(SyntaxKind::Colon2); + parser.parse(UsePathSegmentScope::default(), None); + } else { + break; + } + } + } +} + +define_scope! { + UsePathSegmentScope, + UsePathSegment, + Inheritance +} +impl super::Parse for UsePathSegmentScope { + fn parse(&mut self, parser: &mut Parser) { + match parser.current_kind() { + Some(kind) if is_use_path_segment(kind) => { + parser.bump(); + } + _ => { + parser.error_and_recover("expected identifier or `self`", None); + } + } + } +} + +define_scope! { + UseTreeRenameScope, + UseTreeRename, + Inheritance +} +impl super::Parse for UseTreeRenameScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::AsKw); + + match parser.current_kind() { + Some(SyntaxKind::Ident) => parser.bump_expected(SyntaxKind::Ident), + Some(SyntaxKind::Underscore) => parser.bump_expected(SyntaxKind::Underscore), + _ => parser.error_and_recover("expected identifier or `_`", None), + }; + } +} + +fn is_use_path_segment(kind: SyntaxKind) -> bool { + use SyntaxKind::*; + matches!(kind, Ident | SelfKw) +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 9b9ac5627f..9cf849f7d7 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -16,7 +16,7 @@ pub enum SyntaxKind { /// `foo` #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] Ident, - /// `1` or `0b1010` or `0o77` or `0xff` + /// `1`, `0b1010`, `0o77`, `0xff` #[regex("[0-9]+(?:_[0-9]+)*")] #[regex("0[bB][0-1]+")] #[regex("0[oO][0-7]+")] @@ -257,7 +257,7 @@ pub enum SyntaxKind { RecordFieldList, /// `x: 1` RecordField, - /// `foo.bar` or `foo.0` + /// `foo.bar`, `foo.0` FieldExpr, /// `foo[1]` IndexExpr, @@ -349,12 +349,18 @@ pub enum SyntaxKind { TraitImpl, /// `const FOO: i32 = 1` Const, - /// `use foo::{Foo, bar::Baz}` + /// `use foo::{Foo as Foo1, bar::Baz}` Use, - /// `foo::{Foo, bar::Bar` + /// `foo::{Foo as Foo1, bar::Baz}` UseTree, - /// `{foo::bar, ` + /// `{Foo as Foo1, bar::Baz}` UseTreeList, + /// `Foo::Bar`, `Foo::*`,`*`. + UsePath, + /// `Foo`, `self` + UsePathSegment, + /// `as Foo` + UseTreeRename, /// `extern { .. }` Extern, ItemList, diff --git a/crates/parser2/test_files/syntax_node/items/use_.fe b/crates/parser2/test_files/syntax_node/items/use_.fe new file mode 100644 index 0000000000..5975bd9edd --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/use_.fe @@ -0,0 +1,13 @@ +use Foo::Bar +pub use Foo::Bar +use Foo::* +use Foo::Bar as Bar1 +use Foo::Trait as _ + +use Foo::{Foo, Bar} +use Foo::{self, Bar} +use Foo::{self, Bar as Bar1} +use Foo::{self as self_, Bar::{Bar as _, Baz}, *} + +use {Foo::Bar as Bar1, Bar::Bar as Bar2, Baz::Bar as Bar3, Trait::T} +use * \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/use_.snap b/crates/parser2/test_files/syntax_node/items/use_.snap new file mode 100644 index 0000000000..367ef8d7f4 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/use_.snap @@ -0,0 +1,267 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..278 + ItemList@0..278 + Use@0..12 + ItemModifier@0..0 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..12 + UsePath@4..12 + UsePathSegment@4..7 + Ident@4..7 "Foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "Bar" + Newline@12..13 "\n" + Use@13..29 + ItemModifier@13..16 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UseKw@17..20 "use" + WhiteSpace@20..21 " " + UseTree@21..29 + UsePath@21..29 + UsePathSegment@21..24 + Ident@21..24 "Foo" + Colon2@24..26 "::" + UsePathSegment@26..29 + Ident@26..29 "Bar" + Newline@29..30 "\n" + Use@30..40 + ItemModifier@30..30 + UseKw@30..33 "use" + WhiteSpace@33..34 " " + UseTree@34..40 + UsePath@34..37 + UsePathSegment@34..37 + Ident@34..37 "Foo" + Colon2@37..39 "::" + Star@39..40 "*" + Newline@40..41 "\n" + Use@41..61 + ItemModifier@41..41 + UseKw@41..44 "use" + WhiteSpace@44..45 " " + UseTree@45..61 + UsePath@45..53 + UsePathSegment@45..48 + Ident@45..48 "Foo" + Colon2@48..50 "::" + UsePathSegment@50..53 + Ident@50..53 "Bar" + WhiteSpace@53..54 " " + UseTreeRename@54..61 + AsKw@54..56 "as" + WhiteSpace@56..57 " " + Ident@57..61 "Bar1" + Newline@61..62 "\n" + Use@62..81 + ItemModifier@62..62 + UseKw@62..65 "use" + WhiteSpace@65..66 " " + UseTree@66..81 + UsePath@66..76 + UsePathSegment@66..69 + Ident@66..69 "Foo" + Colon2@69..71 "::" + UsePathSegment@71..76 + Ident@71..76 "Trait" + WhiteSpace@76..77 " " + UseTreeRename@77..81 + AsKw@77..79 "as" + WhiteSpace@79..80 " " + Underscore@80..81 "_" + Newline@81..83 "\n\n" + Use@83..102 + ItemModifier@83..83 + UseKw@83..86 "use" + WhiteSpace@86..87 " " + UseTree@87..102 + UsePath@87..90 + UsePathSegment@87..90 + Ident@87..90 "Foo" + Colon2@90..92 "::" + UseTreeList@92..102 + LBrace@92..93 "{" + UseTree@93..96 + UsePath@93..96 + UsePathSegment@93..96 + Ident@93..96 "Foo" + Comma@96..97 "," + WhiteSpace@97..98 " " + UseTree@98..101 + UsePath@98..101 + UsePathSegment@98..101 + Ident@98..101 "Bar" + RBrace@101..102 "}" + Newline@102..103 "\n" + Use@103..123 + ItemModifier@103..103 + UseKw@103..106 "use" + WhiteSpace@106..107 " " + UseTree@107..123 + UsePath@107..110 + UsePathSegment@107..110 + Ident@107..110 "Foo" + Colon2@110..112 "::" + UseTreeList@112..123 + LBrace@112..113 "{" + UseTree@113..117 + UsePath@113..117 + UsePathSegment@113..117 + SelfKw@113..117 "self" + Comma@117..118 "," + WhiteSpace@118..119 " " + UseTree@119..122 + UsePath@119..122 + UsePathSegment@119..122 + Ident@119..122 "Bar" + RBrace@122..123 "}" + Newline@123..124 "\n" + Use@124..152 + ItemModifier@124..124 + UseKw@124..127 "use" + WhiteSpace@127..128 " " + UseTree@128..152 + UsePath@128..131 + UsePathSegment@128..131 + Ident@128..131 "Foo" + Colon2@131..133 "::" + UseTreeList@133..152 + LBrace@133..134 "{" + UseTree@134..138 + UsePath@134..138 + UsePathSegment@134..138 + SelfKw@134..138 "self" + Comma@138..139 "," + WhiteSpace@139..140 " " + UseTree@140..151 + UsePath@140..143 + UsePathSegment@140..143 + Ident@140..143 "Bar" + WhiteSpace@143..144 " " + UseTreeRename@144..151 + AsKw@144..146 "as" + WhiteSpace@146..147 " " + Ident@147..151 "Bar1" + RBrace@151..152 "}" + Newline@152..153 "\n" + Use@153..202 + ItemModifier@153..153 + UseKw@153..156 "use" + WhiteSpace@156..157 " " + UseTree@157..202 + UsePath@157..160 + UsePathSegment@157..160 + Ident@157..160 "Foo" + Colon2@160..162 "::" + UseTreeList@162..202 + LBrace@162..163 "{" + UseTree@163..176 + UsePath@163..167 + UsePathSegment@163..167 + SelfKw@163..167 "self" + WhiteSpace@167..168 " " + UseTreeRename@168..176 + AsKw@168..170 "as" + WhiteSpace@170..171 " " + Ident@171..176 "self_" + Comma@176..177 "," + WhiteSpace@177..178 " " + UseTree@178..198 + UsePath@178..181 + UsePathSegment@178..181 + Ident@178..181 "Bar" + Colon2@181..183 "::" + UseTreeList@183..198 + LBrace@183..184 "{" + UseTree@184..192 + UsePath@184..187 + UsePathSegment@184..187 + Ident@184..187 "Bar" + WhiteSpace@187..188 " " + UseTreeRename@188..192 + AsKw@188..190 "as" + WhiteSpace@190..191 " " + Underscore@191..192 "_" + Comma@192..193 "," + WhiteSpace@193..194 " " + UseTree@194..197 + UsePath@194..197 + UsePathSegment@194..197 + Ident@194..197 "Baz" + RBrace@197..198 "}" + Comma@198..199 "," + WhiteSpace@199..200 " " + UseTree@200..201 + Star@200..201 "*" + RBrace@201..202 "}" + Newline@202..204 "\n\n" + Use@204..272 + ItemModifier@204..204 + UseKw@204..207 "use" + WhiteSpace@207..208 " " + UseTree@208..272 + UseTreeList@208..272 + LBrace@208..209 "{" + UseTree@209..225 + UsePath@209..217 + UsePathSegment@209..212 + Ident@209..212 "Foo" + Colon2@212..214 "::" + UsePathSegment@214..217 + Ident@214..217 "Bar" + WhiteSpace@217..218 " " + UseTreeRename@218..225 + AsKw@218..220 "as" + WhiteSpace@220..221 " " + Ident@221..225 "Bar1" + Comma@225..226 "," + WhiteSpace@226..227 " " + UseTree@227..243 + UsePath@227..235 + UsePathSegment@227..230 + Ident@227..230 "Bar" + Colon2@230..232 "::" + UsePathSegment@232..235 + Ident@232..235 "Bar" + WhiteSpace@235..236 " " + UseTreeRename@236..243 + AsKw@236..238 "as" + WhiteSpace@238..239 " " + Ident@239..243 "Bar2" + Comma@243..244 "," + WhiteSpace@244..245 " " + UseTree@245..261 + UsePath@245..253 + UsePathSegment@245..248 + Ident@245..248 "Baz" + Colon2@248..250 "::" + UsePathSegment@250..253 + Ident@250..253 "Bar" + WhiteSpace@253..254 " " + UseTreeRename@254..261 + AsKw@254..256 "as" + WhiteSpace@256..257 " " + Ident@257..261 "Bar3" + Comma@261..262 "," + WhiteSpace@262..263 " " + UseTree@263..271 + UsePath@263..271 + UsePathSegment@263..268 + Ident@263..268 "Trait" + Colon2@268..270 "::" + UsePathSegment@270..271 + Ident@270..271 "T" + RBrace@271..272 "}" + Newline@272..273 "\n" + Use@273..278 + ItemModifier@273..273 + UseKw@273..276 "use" + WhiteSpace@276..277 " " + UseTree@277..278 + Star@277..278 "*" + diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index fb838522ca..032e96eff4 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -7,6 +7,11 @@ use fe_parser2::{ SyntaxKind, }; +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/syntax_node/structs", + "parser2/test_files/syntax_node/structs", + test_item_list +} fn test_item_list(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { while parser.current_kind().is_some() { @@ -15,12 +20,12 @@ fn test_item_list(input: &str) -> SyntaxNode { }); runner.run(input) } + fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/structs", - "parser2/test_files/syntax_node/structs", - test_item_list + "parser2/test_files/syntax_node/pats", + "parser2/test_files/syntax_node/pats", + test_pat } - fn test_pat(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { while parser.current_kind().is_some() { @@ -29,12 +34,12 @@ fn test_pat(input: &str) -> SyntaxNode { }); runner.run(input) } + fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/pats", - "parser2/test_files/syntax_node/pats", - test_pat + "parser2/test_files/syntax_node/exprs", + "parser2/test_files/syntax_node/exprs", + test_expr } - fn test_expr(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { parser.set_newline_as_trivia(false); @@ -50,9 +55,9 @@ fn test_expr(input: &str) -> SyntaxNode { } fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/exprs", - "parser2/test_files/syntax_node/exprs", - test_expr + "parser2/test_files/syntax_node/stmts", + "parser2/test_files/syntax_node/stmts", + test_stmt } fn test_stmt(input: &str) -> SyntaxNode { @@ -68,11 +73,12 @@ fn test_stmt(input: &str) -> SyntaxNode { }); runner.run(input) } -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/stmts", - "parser2/test_files/syntax_node/stmts", - test_stmt -} + +fe_compiler_test_utils::build_debug_snap_tests!( + "parser2/test_files/syntax_node/items", + "parser2/test_files/syntax_node/items", + test_item_list +); struct TestRunner where From d335afa238c0cf057e5ac81aa484014943fb1908 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 20:07:24 +0100 Subject: [PATCH 032/678] Add parser for `contract` --- crates/parser2/src/parser/item.rs | 25 +++++++- crates/parser2/src/parser/struct_.rs | 18 +++--- crates/parser2/src/syntax_kind.rs | 7 ++- .../test_files/syntax_node/items/contract.fe | 7 +++ .../syntax_node/items/contract.snap | 62 +++++++++++++++++++ .../test_files/syntax_node/structs/attr.snap | 6 +- .../test_files/syntax_node/structs/empty.snap | 2 +- .../syntax_node/structs/generics.snap | 24 +++---- .../syntax_node/structs/tupel_field.snap | 8 +-- 9 files changed, 125 insertions(+), 34 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/contract.fe create mode 100644 crates/parser2/test_files/syntax_node/items/contract.snap diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 8637dd66f9..b3c6a43fe0 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -3,8 +3,8 @@ use std::cell::Cell; use crate::SyntaxKind; use super::{ - attr, define_scope, expr::parse_expr, token_stream::TokenStream, type_::parse_type, - use_tree::UseTreeScope, Parser, + attr, define_scope, expr::parse_expr, struct_::RecordFieldDefListScope, + token_stream::TokenStream, type_::parse_type, use_tree::UseTreeScope, Parser, }; define_scope! { @@ -14,6 +14,7 @@ define_scope! { Override( FnKw, StructKw, + ContractKw, EnumKw, TraitKw, ImplKw, @@ -56,6 +57,9 @@ impl super::Parse for ItemListScope { Some(StructKw) => { parser.parse(super::struct_::StructScope::default(), checkpoint); } + Some(ContractKw) => { + parser.parse(ContractScope::default(), checkpoint); + } Some(EnumKw) => { parser.parse(EnumScope::default(), checkpoint); } @@ -132,6 +136,23 @@ impl ModifierKind { } } +define_scope! { ContractScope, Contract, Inheritance } +impl super::Parse for ContractScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ContractKw); + + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the struct name", None) + } + + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(RecordFieldDefListScope::default(), None); + } else { + parser.error_and_recover("expected contract field definition", None); + } + } +} + define_scope! { EnumScope, Enum, Inheritance } impl super::Parse for EnumScope { fn parse(&mut self, _parser: &mut Parser) { diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 3bd41f059d..86b37c63ff 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -23,22 +23,22 @@ impl super::Parse for StructScope { } if parser.current_kind() == Some(SyntaxKind::LBrace) { - parser.parse(StructFieldDefListScope::default(), None); + parser.parse(RecordFieldDefListScope::default(), None); } else { - parser.error_and_recover("expected the struct field definition", None); + parser.error_and_recover("expected struct field definition", None); } } } define_scope! { - StructFieldDefListScope, - StructFieldDefList, + pub(crate) RecordFieldDefListScope, + RecordFieldDefList, Override( RBrace, Newline ) } -impl super::Parse for StructFieldDefListScope { +impl super::Parse for RecordFieldDefListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); @@ -48,7 +48,7 @@ impl super::Parse for StructFieldDefListScope { { break; } - parser.parse(StructFieldDefScope::default(), None); + parser.parse(RecordFieldDefScope::default(), None); parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Newline) && parser.current_kind() != Some(SyntaxKind::RBrace) @@ -67,11 +67,11 @@ impl super::Parse for StructFieldDefListScope { } define_scope! { - StructFieldDefScope, - StructFieldDef, + RecordFieldDefScope, + RecordFieldDef, Inheritance } -impl super::Parse for StructFieldDefScope { +impl super::Parse for RecordFieldDefScope { fn parse(&mut self, parser: &mut Parser) { parse_attr_list(parser); diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 9cf849f7d7..3dfc382f80 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -336,7 +336,7 @@ pub enum SyntaxKind { /// `struct Foo { .. }` Struct, /// `contract Foo { .. }` - ContractDef, + Contract, /// `enum Foo { .. }` Enum, /// `type Foo = i32` @@ -400,8 +400,9 @@ pub enum SyntaxKind { Visibility, /// `x: i32` - StructFieldDef, - StructFieldDefList, + RecordFieldDef, + /// `{x: i32, y: u32}` + RecordFieldDefList, VariantDef, VariantDefList, diff --git a/crates/parser2/test_files/syntax_node/items/contract.fe b/crates/parser2/test_files/syntax_node/items/contract.fe new file mode 100644 index 0000000000..ef29266b0a --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/contract.fe @@ -0,0 +1,7 @@ +contract Empty {} + +pub contract C { + x: i32 + y: u256 + z: MyStruct::Encodable +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/contract.snap b/crates/parser2/test_files/syntax_node/items/contract.snap new file mode 100644 index 0000000000..e32cada52a --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/contract.snap @@ -0,0 +1,62 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..87 + ItemList@0..87 + Contract@0..17 + ItemModifier@0..0 + ContractKw@0..8 "contract" + WhiteSpace@8..9 " " + Ident@9..14 "Empty" + WhiteSpace@14..15 " " + RecordFieldDefList@15..17 + LBrace@15..16 "{" + RBrace@16..17 "}" + Newline@17..19 "\n\n" + Contract@19..87 + ItemModifier@19..22 + PubKw@19..22 "pub" + WhiteSpace@22..23 " " + ContractKw@23..31 "contract" + WhiteSpace@31..32 " " + Ident@32..33 "C" + WhiteSpace@33..34 " " + RecordFieldDefList@34..87 + LBrace@34..35 "{" + Newline@35..36 "\n" + WhiteSpace@36..40 " " + RecordFieldDef@40..46 + Ident@40..41 "x" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "i32" + Newline@46..47 "\n" + WhiteSpace@47..51 " " + RecordFieldDef@51..58 + Ident@51..52 "y" + Colon@52..53 ":" + WhiteSpace@53..54 " " + PathType@54..58 + Path@54..58 + PathSegment@54..58 + Ident@54..58 "u256" + Newline@58..59 "\n" + WhiteSpace@59..63 " " + RecordFieldDef@63..85 + Ident@63..64 "z" + Colon@64..65 ":" + WhiteSpace@65..66 " " + PathType@66..85 + Path@66..85 + PathSegment@66..74 + Ident@66..74 "MyStruct" + Colon2@74..76 "::" + PathSegment@76..85 + Ident@76..85 "Encodable" + Newline@85..86 "\n" + RBrace@86..87 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap index 91b53df1eb..ddacf555a7 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.snap +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -25,11 +25,11 @@ Root@0..170 WhiteSpace@66..67 " " Ident@67..77 "StructAttr" WhiteSpace@77..78 " " - StructFieldDefList@78..170 + RecordFieldDefList@78..170 LBrace@78..79 "{" Newline@79..80 "\n" WhiteSpace@80..84 " " - StructFieldDef@84..115 + RecordFieldDef@84..115 AttrList@84..100 DocCommentAttr@84..99 DocComment@84..99 "/// This is `x`" @@ -47,7 +47,7 @@ Root@0..170 Ident@112..115 "Bar" Newline@115..116 "\n" WhiteSpace@116..120 " " - StructFieldDef@120..168 + RecordFieldDef@120..168 AttrList@120..158 DocCommentAttr@120..135 DocComment@120..135 "/// This is `y`" diff --git a/crates/parser2/test_files/syntax_node/structs/empty.snap b/crates/parser2/test_files/syntax_node/structs/empty.snap index 13c29b9854..fa786daf5c 100644 --- a/crates/parser2/test_files/syntax_node/structs/empty.snap +++ b/crates/parser2/test_files/syntax_node/structs/empty.snap @@ -12,7 +12,7 @@ Root@0..26 WhiteSpace@10..11 " " Ident@11..22 "EmptyStruct" WhiteSpace@22..23 " " - StructFieldDefList@23..26 + RecordFieldDefList@23..26 LBrace@23..24 "{" Newline@24..25 "\n" RBrace@25..26 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 5b120748b7..d32c6b0d76 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -25,11 +25,11 @@ Root@0..312 Ident@40..41 "U" Gt@41..42 ">" WhiteSpace@42..43 " " - StructFieldDefList@43..73 + RecordFieldDefList@43..73 LBrace@43..44 "{" Newline@44..45 "\n" WhiteSpace@45..49 " " - StructFieldDef@49..53 + RecordFieldDef@49..53 Ident@49..50 "x" Colon@50..51 ":" WhiteSpace@51..52 " " @@ -39,7 +39,7 @@ Root@0..312 Ident@52..53 "S" Newline@53..54 "\n" WhiteSpace@54..58 " " - StructFieldDef@58..62 + RecordFieldDef@58..62 Ident@58..59 "y" Colon@59..60 ":" WhiteSpace@60..61 " " @@ -49,7 +49,7 @@ Root@0..312 Ident@61..62 "T" Newline@62..63 "\n" WhiteSpace@63..67 " " - StructFieldDef@67..71 + RecordFieldDef@67..71 Ident@67..68 "z" Colon@68..69 ":" WhiteSpace@69..70 " " @@ -98,11 +98,11 @@ Root@0..312 Newline@143..144 "\n" Gt@144..145 ">" WhiteSpace@145..146 " " - StructFieldDefList@146..185 + RecordFieldDefList@146..185 LBrace@146..147 "{" Newline@147..148 "\n" WhiteSpace@148..152 " " - StructFieldDef@152..165 + RecordFieldDef@152..165 Ident@152..153 "x" Colon@153..154 ":" WhiteSpace@154..155 " " @@ -125,7 +125,7 @@ Root@0..312 RParen@164..165 ")" Newline@165..166 "\n" WhiteSpace@166..170 " " - StructFieldDef@170..174 + RecordFieldDef@170..174 Ident@170..171 "y" Colon@171..172 ":" WhiteSpace@172..173 " " @@ -135,7 +135,7 @@ Root@0..312 Ident@173..174 "T" Newline@174..175 "\n" WhiteSpace@175..179 " " - StructFieldDef@179..183 + RecordFieldDef@179..183 Ident@179..180 "z" Colon@180..181 ":" WhiteSpace@181..182 " " @@ -202,11 +202,11 @@ Root@0..312 Newline@279..280 "\n" Gt@280..281 ">" WhiteSpace@281..282 " " - StructFieldDefList@282..312 + RecordFieldDefList@282..312 LBrace@282..283 "{" Newline@283..284 "\n" WhiteSpace@284..288 " " - StructFieldDef@288..292 + RecordFieldDef@288..292 Ident@288..289 "x" Colon@289..290 ":" WhiteSpace@290..291 " " @@ -216,7 +216,7 @@ Root@0..312 Ident@291..292 "S" Newline@292..293 "\n" WhiteSpace@293..297 " " - StructFieldDef@297..301 + RecordFieldDef@297..301 Ident@297..298 "y" Colon@298..299 ":" WhiteSpace@299..300 " " @@ -226,7 +226,7 @@ Root@0..312 Ident@300..301 "T" Newline@301..302 "\n" WhiteSpace@302..306 " " - StructFieldDef@306..310 + RecordFieldDef@306..310 Ident@306..307 "z" Colon@307..308 ":" WhiteSpace@308..309 " " diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap index 5a289d2fb1..7ceb8105a5 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -10,11 +10,11 @@ Root@0..117 WhiteSpace@6..7 " " Ident@7..27 "StructWithTupleField" WhiteSpace@27..28 " " - StructFieldDefList@28..117 + RecordFieldDefList@28..117 LBrace@28..29 "{" Newline@29..30 "\n" WhiteSpace@30..34 " " - StructFieldDef@34..47 + RecordFieldDef@34..47 Ident@34..35 "x" Colon@35..36 ":" WhiteSpace@36..37 " " @@ -33,7 +33,7 @@ Root@0..117 RParen@46..47 ")" Newline@47..48 "\n" WhiteSpace@48..52 " " - StructFieldDef@52..105 + RecordFieldDef@52..105 Ident@52..53 "y" Colon@53..54 ":" WhiteSpace@54..55 " " @@ -67,7 +67,7 @@ Root@0..117 RParen@104..105 ")" Newline@105..106 "\n" WhiteSpace@106..110 " " - StructFieldDef@110..115 + RecordFieldDef@110..115 Ident@110..111 "z" Colon@111..112 ":" WhiteSpace@112..113 " " From 78c2464a9f77e1c3a129caefe640a83649dab8b9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 21:03:28 +0100 Subject: [PATCH 033/678] Implement parser for `const` --- crates/parser2/src/parser/expr_atom.rs | 2 + crates/parser2/src/parser/item.rs | 13 +- .../test_files/syntax_node/exprs/if_.snap | 146 +++++++++--------- .../test_files/syntax_node/items/const_.fe | 13 ++ .../test_files/syntax_node/items/const_.snap | 132 ++++++++++++++++ .../test_files/syntax_node/stmts/let_.snap | 27 ++-- 6 files changed, 239 insertions(+), 94 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/const_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/const_.snap diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 7cbeb80427..aac3353576 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -108,7 +108,9 @@ impl super::Parse for IfExprScope { Some(SyntaxKind::LBrace | SyntaxKind::IfKw) ) { parser.error_and_recover("expected `{` or `if` after `else`", None); + return; } + parse_expr(parser); } } } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index b3c6a43fe0..5979109a81 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -187,21 +187,20 @@ impl super::Parse for ConstScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ConstKw); - parser.set_newline_as_trivia(true); + parser.set_newline_as_trivia(false); + parser.add_recovery_token(SyntaxKind::Colon); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected identifier", None); - return; } + parser.remove_recovery_token(SyntaxKind::Colon); + parser.add_recovery_token(SyntaxKind::Eq); if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected type annotation for `const`", None); - return; - } - - if !parse_type(parser, None) { - return; } + parse_type(parser, None); + parser.remove_recovery_token(SyntaxKind::Eq); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` for const value definition", None); diff --git a/crates/parser2/test_files/syntax_node/exprs/if_.snap b/crates/parser2/test_files/syntax_node/exprs/if_.snap index 3b560b3ca4..1513eb3dd8 100644 --- a/crates/parser2/test_files/syntax_node/exprs/if_.snap +++ b/crates/parser2/test_files/syntax_node/exprs/if_.snap @@ -3,7 +3,7 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- Root@0..279 - IfExpr@0..12 + IfExpr@0..15 IfKw@0..2 "if" WhiteSpace@2..3 " " Path@3..4 @@ -15,12 +15,12 @@ Root@0..279 RBrace@6..7 "}" WhiteSpace@7..8 " " ElseKw@8..12 "else" - WhiteSpace@12..13 " " - BlockExpr@13..15 - LBrace@13..14 "{" - RBrace@14..15 "}" + WhiteSpace@12..13 " " + BlockExpr@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" Newline@15..17 "\n\n" - IfExpr@17..29 + IfExpr@17..53 IfKw@17..19 "if" WhiteSpace@19..20 " " Path@20..21 @@ -32,33 +32,33 @@ Root@0..279 RBrace@23..24 "}" WhiteSpace@24..25 " " ElseKw@25..29 "else" - WhiteSpace@29..30 " " - BlockExpr@30..53 - LBrace@30..31 "{" - Newline@31..32 "\n" - WhiteSpace@32..36 " " - LetStmt@36..45 - LetKw@36..39 "let" - WhiteSpace@39..40 " " - PathPat@40..41 - Path@40..41 - PathSegment@40..41 - Ident@40..41 "x" - WhiteSpace@41..42 " " - Eq@42..43 "=" - WhiteSpace@43..44 " " - LitExpr@44..45 - Int@44..45 "1" - Newline@45..46 "\n" - WhiteSpace@46..50 " " - ExprStmt@50..51 - Path@50..51 - PathSegment@50..51 - Ident@50..51 "x" - Newline@51..52 "\n" - RBrace@52..53 "}" + WhiteSpace@29..30 " " + BlockExpr@30..53 + LBrace@30..31 "{" + Newline@31..32 "\n" + WhiteSpace@32..36 " " + LetStmt@36..45 + LetKw@36..39 "let" + WhiteSpace@39..40 " " + PathPat@40..41 + Path@40..41 + PathSegment@40..41 + Ident@40..41 "x" + WhiteSpace@41..42 " " + Eq@42..43 "=" + WhiteSpace@43..44 " " + LitExpr@44..45 + Int@44..45 "1" + Newline@45..46 "\n" + WhiteSpace@46..50 " " + ExprStmt@50..51 + Path@50..51 + PathSegment@50..51 + Ident@50..51 "x" + Newline@51..52 "\n" + RBrace@52..53 "}" Newline@53..55 "\n\n" - IfExpr@55..88 + IfExpr@55..91 IfKw@55..57 "if" WhiteSpace@57..58 " " Path@58..59 @@ -91,10 +91,10 @@ Root@0..279 RBrace@82..83 "}" WhiteSpace@83..84 " " ElseKw@84..88 "else" - WhiteSpace@88..89 " " - BlockExpr@89..91 - LBrace@89..90 "{" - RBrace@90..91 "}" + WhiteSpace@88..89 " " + BlockExpr@89..91 + LBrace@89..90 "{" + RBrace@90..91 "}" Newline@91..93 "\n\n" IfExpr@93..121 IfKw@93..95 "if" @@ -128,7 +128,7 @@ Root@0..279 Newline@119..120 "\n" RBrace@120..121 "}" Newline@121..123 "\n\n" - IfExpr@123..156 + IfExpr@123..180 IfKw@123..125 "if" WhiteSpace@125..126 " " Path@126..127 @@ -161,33 +161,33 @@ Root@0..279 RBrace@150..151 "}" WhiteSpace@151..152 " " ElseKw@152..156 "else" - WhiteSpace@156..157 " " - BlockExpr@157..180 - LBrace@157..158 "{" - Newline@158..159 "\n" - WhiteSpace@159..163 " " - LetStmt@163..172 - LetKw@163..166 "let" - WhiteSpace@166..167 " " - PathPat@167..168 - Path@167..168 - PathSegment@167..168 - Ident@167..168 "y" - WhiteSpace@168..169 " " - Eq@169..170 "=" - WhiteSpace@170..171 " " - LitExpr@171..172 - Int@171..172 "1" - Newline@172..173 "\n" - WhiteSpace@173..177 " " - ExprStmt@177..178 - Path@177..178 - PathSegment@177..178 - Ident@177..178 "y" - Newline@178..179 "\n" - RBrace@179..180 "}" + WhiteSpace@156..157 " " + BlockExpr@157..180 + LBrace@157..158 "{" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + LetStmt@163..172 + LetKw@163..166 "let" + WhiteSpace@166..167 " " + PathPat@167..168 + Path@167..168 + PathSegment@167..168 + Ident@167..168 "y" + WhiteSpace@168..169 " " + Eq@169..170 "=" + WhiteSpace@170..171 " " + LitExpr@171..172 + Int@171..172 "1" + Newline@172..173 "\n" + WhiteSpace@173..177 " " + ExprStmt@177..178 + Path@177..178 + PathSegment@177..178 + Ident@177..178 "y" + Newline@178..179 "\n" + RBrace@179..180 "}" Newline@180..182 "\n\n" - IfExpr@182..269 + IfExpr@182..279 IfKw@182..184 "if" WhiteSpace@184..185 " " MatchExpr@185..248 @@ -243,14 +243,14 @@ Root@0..279 RBrace@263..264 "}" WhiteSpace@264..265 " " ElseKw@265..269 "else" - WhiteSpace@269..270 " " - BlockExpr@270..279 - LBrace@270..271 "{" - Newline@271..272 "\n" - WhiteSpace@272..276 " " - ExprStmt@276..277 - LitExpr@276..277 - Int@276..277 "1" - Newline@277..278 "\n" - RBrace@278..279 "}" + WhiteSpace@269..270 " " + BlockExpr@270..279 + LBrace@270..271 "{" + Newline@271..272 "\n" + WhiteSpace@272..276 " " + ExprStmt@276..277 + LitExpr@276..277 + Int@276..277 "1" + Newline@277..278 "\n" + RBrace@278..279 "}" diff --git a/crates/parser2/test_files/syntax_node/items/const_.fe b/crates/parser2/test_files/syntax_node/items/const_.fe new file mode 100644 index 0000000000..16aaad59fd --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/const_.fe @@ -0,0 +1,13 @@ +pub const FOO: i32 = 1 + +const BAR: u256 = { + let b = true + let x = 1 + if b { + 1 + } else if x == 1 { + 2 + } else { + 3 + } +} diff --git a/crates/parser2/test_files/syntax_node/items/const_.snap b/crates/parser2/test_files/syntax_node/items/const_.snap new file mode 100644 index 0000000000..c0068ff052 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/const_.snap @@ -0,0 +1,132 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..160 + ItemList@0..160 + Const@0..22 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ConstKw@4..9 "const" + WhiteSpace@9..10 " " + Ident@10..13 "FOO" + Colon@13..14 ":" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Eq@19..20 "=" + WhiteSpace@20..21 " " + LitExpr@21..22 + Int@21..22 "1" + Newline@22..24 "\n\n" + Const@24..159 + ItemModifier@24..24 + ConstKw@24..29 "const" + WhiteSpace@29..30 " " + Ident@30..33 "BAR" + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..39 + Path@35..39 + PathSegment@35..39 + Ident@35..39 "u256" + WhiteSpace@39..40 " " + Eq@40..41 "=" + WhiteSpace@41..42 " " + BlockExpr@42..159 + LBrace@42..43 "{" + Newline@43..44 "\n" + WhiteSpace@44..48 " " + LetStmt@48..60 + LetKw@48..51 "let" + WhiteSpace@51..52 " " + PathPat@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "b" + WhiteSpace@53..54 " " + Eq@54..55 "=" + WhiteSpace@55..56 " " + LitExpr@56..60 + TrueKw@56..60 "true" + Newline@60..61 "\n" + WhiteSpace@61..65 " " + LetStmt@65..74 + LetKw@65..68 "let" + WhiteSpace@68..69 " " + PathPat@69..70 + Path@69..70 + PathSegment@69..70 + Ident@69..70 "x" + WhiteSpace@70..71 " " + Eq@71..72 "=" + WhiteSpace@72..73 " " + LitExpr@73..74 + Int@73..74 "1" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + ExprStmt@79..157 + IfExpr@79..157 + IfKw@79..81 "if" + WhiteSpace@81..82 " " + Path@82..83 + PathSegment@82..83 + Ident@82..83 "b" + WhiteSpace@83..84 " " + BlockExpr@84..101 + LBrace@84..85 "{" + Newline@85..86 "\n" + WhiteSpace@86..94 " " + ExprStmt@94..95 + LitExpr@94..95 + Int@94..95 "1" + Newline@95..96 "\n" + WhiteSpace@96..100 " " + RBrace@100..101 "}" + WhiteSpace@101..102 " " + ElseKw@102..106 "else" + WhiteSpace@106..107 " " + IfExpr@107..157 + IfKw@107..109 "if" + WhiteSpace@109..110 " " + BinExpr@110..116 + Path@110..111 + PathSegment@110..111 + Ident@110..111 "x" + WhiteSpace@111..112 " " + Eq2@112..114 "==" + WhiteSpace@114..115 " " + LitExpr@115..116 + Int@115..116 "1" + WhiteSpace@116..117 " " + BlockExpr@117..134 + LBrace@117..118 "{" + Newline@118..119 "\n" + WhiteSpace@119..127 " " + ExprStmt@127..128 + LitExpr@127..128 + Int@127..128 "2" + Newline@128..129 "\n" + WhiteSpace@129..133 " " + RBrace@133..134 "}" + WhiteSpace@134..135 " " + ElseKw@135..139 "else" + WhiteSpace@139..140 " " + BlockExpr@140..157 + LBrace@140..141 "{" + Newline@141..142 "\n" + WhiteSpace@142..150 " " + ExprStmt@150..151 + LitExpr@150..151 + Int@150..151 "3" + Newline@151..152 "\n" + WhiteSpace@152..156 " " + RBrace@156..157 "}" + Newline@157..158 "\n" + RBrace@158..159 "}" + Newline@159..160 "\n" + diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.snap b/crates/parser2/test_files/syntax_node/stmts/let_.snap index 0c68276ab8..dcbb3d017e 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let_.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let_.snap @@ -165,7 +165,7 @@ Root@0..231 PathSegment@122..123 Ident@122..123 "s" Newline@123..125 "\n\n" - LetStmt@125..152 + LetStmt@125..162 LetKw@125..128 "let" WhiteSpace@128..129 " " PathPat@129..130 @@ -175,7 +175,7 @@ Root@0..231 WhiteSpace@130..131 " " Eq@131..132 "=" WhiteSpace@132..133 " " - IfExpr@133..152 + IfExpr@133..162 IfKw@133..135 "if" WhiteSpace@135..136 " " Path@136..137 @@ -194,18 +194,17 @@ Root@0..231 RBrace@146..147 "}" WhiteSpace@147..148 " " ElseKw@148..152 "else" - WhiteSpace@152..153 " " - ExprStmt@153..162 - BlockExpr@153..162 - LBrace@153..154 "{" - Newline@154..155 "\n" - WhiteSpace@155..159 " " - ExprStmt@159..160 - Path@159..160 - PathSegment@159..160 - Ident@159..160 "z" - Newline@160..161 "\n" - RBrace@161..162 "}" + WhiteSpace@152..153 " " + BlockExpr@153..162 + LBrace@153..154 "{" + Newline@154..155 "\n" + WhiteSpace@155..159 " " + ExprStmt@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "z" + Newline@160..161 "\n" + RBrace@161..162 "}" Newline@162..164 "\n\n" LetStmt@164..231 LetKw@164..167 "let" From a79c54fa50911e446ac83e3b57d3e535e7f464f4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 21:20:02 +0100 Subject: [PATCH 034/678] Add parer for type alias --- crates/parser2/src/parser/item.rs | 29 ++++++- crates/parser2/src/parser/struct_.rs | 4 + .../test_files/syntax_node/items/type_.fe | 5 ++ .../test_files/syntax_node/items/type_.snap | 85 +++++++++++++++++++ 4 files changed, 119 insertions(+), 4 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/type_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/type_.snap diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 5979109a81..f4bfc00e8a 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -3,8 +3,9 @@ use std::cell::Cell; use crate::SyntaxKind; use super::{ - attr, define_scope, expr::parse_expr, struct_::RecordFieldDefListScope, - token_stream::TokenStream, type_::parse_type, use_tree::UseTreeScope, Parser, + attr, define_scope, expr::parse_expr, param::GenericParamListScope, + struct_::RecordFieldDefListScope, token_stream::TokenStream, type_::parse_type, + use_tree::UseTreeScope, Parser, }; define_scope! { @@ -220,7 +221,27 @@ impl super::Parse for ExternScope { define_scope! { TypeAliasScope, TypeAlias, Inheritance } impl super::Parse for TypeAliasScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::TypeKw); + + parser.add_recovery_token(SyntaxKind::Lt); + parser.add_recovery_token(SyntaxKind::Eq); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier for type alias name", None) + } + parser.remove_recovery_token(SyntaxKind::Lt); + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + parser.remove_recovery_token(SyntaxKind::Eq); + + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `=` for type alias definition", None); + return; + } + + parse_type(parser, None); } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 86b37c63ff..2bfb416b1f 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -14,13 +14,17 @@ impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::StructKw); + parser.add_recovery_token(SyntaxKind::Lt); + parser.add_recovery_token(SyntaxKind::LBrace); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected ident for the struct name", None) } + parser.remove_recovery_token(SyntaxKind::Lt); if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } + parser.remove_recovery_token(SyntaxKind::LBrace); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(RecordFieldDefListScope::default(), None); diff --git a/crates/parser2/test_files/syntax_node/items/type_.fe b/crates/parser2/test_files/syntax_node/items/type_.fe new file mode 100644 index 0000000000..8245e8d296 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/type_.fe @@ -0,0 +1,5 @@ +pub type Int = i32 + +type Result = Result + +type WithBound = NoBound \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/type_.snap b/crates/parser2/test_files/syntax_node/items/type_.snap new file mode 100644 index 0000000000..770c868935 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/type_.snap @@ -0,0 +1,85 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..98 + ItemList@0..98 + TypeAlias@0..18 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + TypeKw@4..8 "type" + WhiteSpace@8..9 " " + Ident@9..12 "Int" + WhiteSpace@12..13 " " + Eq@13..14 "=" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Newline@19..21 "\n\n" + TypeAlias@21..54 + ItemModifier@21..21 + TypeKw@21..25 "type" + WhiteSpace@25..26 " " + Ident@26..32 "Result" + GenericParamList@32..35 + Lt@32..33 "<" + GenericParam@33..34 + Ident@33..34 "T" + Gt@34..35 ">" + WhiteSpace@35..36 " " + Eq@36..37 "=" + WhiteSpace@37..38 " " + PathType@38..54 + Path@38..44 + PathSegment@38..44 + Ident@38..44 "Result" + GenericParamList@44..54 + Lt@44..45 "<" + GenericParam@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + GenericParam@48..53 + Path@48..53 + PathSegment@48..53 + Ident@48..53 "Error" + Gt@53..54 ">" + Newline@54..56 "\n\n" + TypeAlias@56..98 + ItemModifier@56..56 + TypeKw@56..60 "type" + WhiteSpace@60..61 " " + Ident@61..70 "WithBound" + GenericParamList@70..85 + Lt@70..71 "<" + GenericParam@71..84 + Ident@71..72 "T" + Colon@72..73 ":" + WhiteSpace@73..74 " " + TraitBoundList@74..84 + TraitBound@74..84 + Path@74..84 + PathSegment@74..84 + Ident@74..84 "TraitBound" + Gt@84..85 ">" + WhiteSpace@85..86 " " + Eq@86..87 "=" + WhiteSpace@87..88 " " + PathType@88..98 + Path@88..95 + PathSegment@88..95 + Ident@88..95 "NoBound" + GenericParamList@95..98 + Lt@95..96 "<" + GenericParam@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "T" + Gt@97..98 ">" + From f289dfe1395ed46d6e0f27d0b9ea28e4ab298f1d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 22:59:39 +0100 Subject: [PATCH 035/678] Implement parser for `fn` --- crates/parser2/src/parser/func.rs | 46 ++- crates/parser2/src/parser/param.rs | 60 ++++ crates/parser2/src/syntax_kind.rs | 6 + .../test_files/syntax_node/items/func.fe | 17 ++ .../test_files/syntax_node/items/func.snap | 283 ++++++++++++++++++ 5 files changed, 408 insertions(+), 4 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/func.fe create mode 100644 crates/parser2/test_files/syntax_node/items/func.snap diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index d16d0488c8..847e26b3e6 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -1,13 +1,51 @@ -use super::{define_scope, token_stream::TokenStream, Parser}; +use crate::SyntaxKind; + +use super::{ + define_scope, + expr_atom::BlockExprScope, + param::{FnArgListScope, GenericParamListScope}, + token_stream::TokenStream, + type_::parse_type, + Parser, +}; define_scope! { pub(crate) FnScope, Fn, Inheritance } - impl super::Parse for FnScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::FnKw); + + parser.add_recovery_token(SyntaxKind::Lt); + parser.add_recovery_token(SyntaxKind::LParen); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the function name", None) + } + parser.remove_recovery_token(SyntaxKind::Lt); + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + parser.remove_recovery_token(SyntaxKind::LParen); + + parser.add_recovery_token(SyntaxKind::LBrace); + parser.add_recovery_token(SyntaxKind::Arrow); + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); + } + parser.remove_recovery_token(SyntaxKind::Arrow); + + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None); + } + parser.remove_recovery_token(SyntaxKind::LBrace); + + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(BlockExprScope::default(), None); + } } } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index b431280566..8a4ab53829 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -9,6 +9,66 @@ use super::{ type_::parse_type, Parser, }; + +define_scope! { + pub(crate) FnArgListScope, + FnArgList, + Override(RParen, Comma) +} +impl super::Parse for FnArgListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LParen); + if parser.bump_if(SyntaxKind::RParen) { + return; + } + + parser.parse(FnArgScope::default(), None); + while parser.bump_if(SyntaxKind::Comma) { + parser.parse(FnArgScope::default(), None); + } + + if !parser.bump_if(SyntaxKind::RParen) { + parser.error_and_bump_until("expected closing `)`", None, SyntaxKind::RParen); + parser.bump_if(SyntaxKind::LParen); + } + } +} + +define_scope! { + FnArgScope, + FnArg, + Inheritance +} +impl super::Parse for FnArgScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_if(SyntaxKind::MutKw); + + parser.add_recovery_token(SyntaxKind::Colon); + match parser.current_kind() { + Some(SyntaxKind::SelfKw) => { + parser.bump_expected(SyntaxKind::SelfKw); + return; + } + Some(SyntaxKind::Ident | SyntaxKind::Underscore) => { + parser.bump(); + if !parser.bump_if(SyntaxKind::Ident) { + parser.bump_if(SyntaxKind::Underscore); + } + } + _ => { + parser.error_and_recover("expected identifier for argument name", None); + } + } + parser.remove_recovery_token(SyntaxKind::Colon); + + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected `:` after argument name", None); + } + + parse_type(parser, None); + } +} + define_scope! { pub(crate) GenericParamListScope, GenericParamList, diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 3dfc382f80..84509c2267 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -413,6 +413,12 @@ pub enum SyntaxKind { /// `` GenericParamList, + /// `(x: i32, _ y: mut i32)` + FnArgList, + + /// `_ x: mut i32` + FnArg, + /// `foo::Trait1 + Trait2` TraitBoundList, /// `Trait1` diff --git a/crates/parser2/test_files/syntax_node/items/func.fe b/crates/parser2/test_files/syntax_node/items/func.fe new file mode 100644 index 0000000000..c1072cd66f --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/func.fe @@ -0,0 +1,17 @@ +pub fn foo() { + let x = 1 +} + +fn bar(bar: i32, mut baz: u256) -> i32 { + 1 +} + +fn baz(from sender: address, mut to recipient: address, _ val: u256, _ _: u256) -> i32 { + 1 +} + +fn generics1(t: T, u: Option) -> T { + t +} + +fn decl(t: MyStruct) -> Result \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap new file mode 100644 index 0000000000..14998b1d21 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -0,0 +1,283 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..292 + ItemList@0..292 + Fn@0..30 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + FnKw@4..6 "fn" + WhiteSpace@6..7 " " + Ident@7..10 "foo" + FnArgList@10..12 + LParen@10..11 "(" + RParen@11..12 ")" + WhiteSpace@12..13 " " + BlockExpr@13..30 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + LetStmt@19..28 + LetKw@19..22 "let" + WhiteSpace@22..23 " " + PathPat@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "x" + WhiteSpace@24..25 " " + Eq@25..26 "=" + WhiteSpace@26..27 " " + LitExpr@27..28 + Int@27..28 "1" + Newline@28..29 "\n" + RBrace@29..30 "}" + Newline@30..32 "\n\n" + Fn@32..80 + ItemModifier@32..32 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "bar" + FnArgList@38..63 + LParen@38..39 "(" + FnArg@39..47 + Ident@39..42 "bar" + Colon@42..43 ":" + WhiteSpace@43..44 " " + PathType@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "i32" + Comma@47..48 "," + WhiteSpace@48..49 " " + FnArg@49..62 + MutKw@49..52 "mut" + WhiteSpace@52..53 " " + Ident@53..56 "baz" + Colon@56..57 ":" + WhiteSpace@57..58 " " + PathType@58..62 + Path@58..62 + PathSegment@58..62 + Ident@58..62 "u256" + RParen@62..63 ")" + WhiteSpace@63..64 " " + Arrow@64..66 "->" + WhiteSpace@66..67 " " + PathType@67..70 + Path@67..70 + PathSegment@67..70 + Ident@67..70 "i32" + WhiteSpace@70..71 " " + BlockExpr@71..80 + LBrace@71..72 "{" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + ExprStmt@77..78 + LitExpr@77..78 + Int@77..78 "1" + Newline@78..79 "\n" + RBrace@79..80 "}" + Newline@80..82 "\n\n" + Fn@82..178 + ItemModifier@82..82 + FnKw@82..84 "fn" + WhiteSpace@84..85 " " + Ident@85..88 "baz" + FnArgList@88..161 + LParen@88..89 "(" + FnArg@89..109 + Ident@89..93 "from" + WhiteSpace@93..94 " " + Ident@94..100 "sender" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..109 + Path@102..109 + PathSegment@102..109 + Ident@102..109 "address" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnArg@111..136 + MutKw@111..114 "mut" + WhiteSpace@114..115 " " + Ident@115..117 "to" + WhiteSpace@117..118 " " + Ident@118..127 "recipient" + Colon@127..128 ":" + WhiteSpace@128..129 " " + PathType@129..136 + Path@129..136 + PathSegment@129..136 + Ident@129..136 "address" + Comma@136..137 "," + WhiteSpace@137..138 " " + FnArg@138..149 + Underscore@138..139 "_" + WhiteSpace@139..140 " " + Ident@140..143 "val" + Colon@143..144 ":" + WhiteSpace@144..145 " " + PathType@145..149 + Path@145..149 + PathSegment@145..149 + Ident@145..149 "u256" + Comma@149..150 "," + WhiteSpace@150..151 " " + FnArg@151..160 + Underscore@151..152 "_" + WhiteSpace@152..153 " " + Underscore@153..154 "_" + Colon@154..155 ":" + WhiteSpace@155..156 " " + PathType@156..160 + Path@156..160 + PathSegment@156..160 + Ident@156..160 "u256" + RParen@160..161 ")" + WhiteSpace@161..162 " " + Arrow@162..164 "->" + WhiteSpace@164..165 " " + PathType@165..168 + Path@165..168 + PathSegment@165..168 + Ident@165..168 "i32" + WhiteSpace@168..169 " " + BlockExpr@169..178 + LBrace@169..170 "{" + Newline@170..171 "\n" + WhiteSpace@171..175 " " + ExprStmt@175..176 + LitExpr@175..176 + Int@175..176 "1" + Newline@176..177 "\n" + RBrace@177..178 "}" + Newline@178..180 "\n\n" + Fn@180..240 + ItemModifier@180..180 + FnKw@180..182 "fn" + WhiteSpace@182..183 " " + Ident@183..192 "generics1" + GenericParamList@192..205 + Lt@192..193 "<" + GenericParam@193..201 + Ident@193..194 "T" + Colon@194..195 ":" + WhiteSpace@195..196 " " + TraitBoundList@196..201 + TraitBound@196..201 + Path@196..201 + PathSegment@196..201 + Ident@196..201 "Trait" + Comma@201..202 "," + WhiteSpace@202..203 " " + GenericParam@203..204 + Ident@203..204 "U" + Gt@204..205 ">" + FnArgList@205..225 + LParen@205..206 "(" + FnArg@206..210 + Ident@206..207 "t" + Colon@207..208 ":" + WhiteSpace@208..209 " " + PathType@209..210 + Path@209..210 + PathSegment@209..210 + Ident@209..210 "T" + Comma@210..211 "," + WhiteSpace@211..212 " " + FnArg@212..224 + Ident@212..213 "u" + Colon@213..214 ":" + WhiteSpace@214..215 " " + PathType@215..224 + Path@215..221 + PathSegment@215..221 + Ident@215..221 "Option" + GenericParamList@221..224 + Lt@221..222 "<" + GenericParam@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + RParen@224..225 ")" + WhiteSpace@225..226 " " + Arrow@226..228 "->" + WhiteSpace@228..229 " " + PathType@229..230 + Path@229..230 + PathSegment@229..230 + Ident@229..230 "T" + WhiteSpace@230..231 " " + BlockExpr@231..240 + LBrace@231..232 "{" + Newline@232..233 "\n" + WhiteSpace@233..237 " " + ExprStmt@237..238 + Path@237..238 + PathSegment@237..238 + Ident@237..238 "t" + Newline@238..239 "\n" + RBrace@239..240 "}" + Newline@240..242 "\n\n" + Fn@242..292 + ItemModifier@242..242 + FnKw@242..244 "fn" + WhiteSpace@244..245 " " + Ident@245..249 "decl" + GenericParamList@249..255 + Lt@249..250 "<" + GenericParam@250..251 + Ident@250..251 "T" + Comma@251..252 "," + WhiteSpace@252..253 " " + GenericParam@253..254 + Ident@253..254 "U" + Gt@254..255 ">" + FnArgList@255..274 + LParen@255..256 "(" + FnArg@256..273 + Ident@256..257 "t" + Colon@257..258 ":" + WhiteSpace@258..259 " " + PathType@259..273 + Path@259..267 + PathSegment@259..267 + Ident@259..267 "MyStruct" + GenericParamList@267..273 + Lt@267..268 "<" + GenericParam@268..269 + Path@268..269 + PathSegment@268..269 + Ident@268..269 "T" + Comma@269..270 "," + WhiteSpace@270..271 " " + GenericParam@271..272 + Path@271..272 + PathSegment@271..272 + Ident@271..272 "U" + Gt@272..273 ">" + RParen@273..274 ")" + WhiteSpace@274..275 " " + Arrow@275..277 "->" + WhiteSpace@277..278 " " + PathType@278..292 + Path@278..284 + PathSegment@278..284 + Ident@278..284 "Result" + GenericParamList@284..292 + Lt@284..285 "<" + GenericParam@285..286 + Path@285..286 + PathSegment@285..286 + Ident@285..286 "T" + Comma@286..287 "," + WhiteSpace@287..288 " " + GenericParam@288..291 + Path@288..291 + PathSegment@288..291 + Ident@288..291 "Err" + Gt@291..292 ">" + From 1c63f9f6c3a70a84fe8ac28c8326e263067cafe1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 23:17:59 +0100 Subject: [PATCH 036/678] Implement parser for `trait` definition --- crates/parser2/src/parser/item.rs | 54 +++++- crates/parser2/src/parser/param.rs | 2 + crates/parser2/src/syntax_kind.rs | 6 +- .../test_files/syntax_node/items/trait_.fe | 9 + .../test_files/syntax_node/items/trait_.snap | 170 ++++++++++++++++++ 5 files changed, 235 insertions(+), 6 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/trait_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/trait_.snap diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index f4bfc00e8a..ceeef0ddff 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -1,6 +1,6 @@ use std::cell::Cell; -use crate::SyntaxKind; +use crate::{parser::func::FnScope, SyntaxKind}; use super::{ attr, define_scope, expr::parse_expr, param::GenericParamListScope, @@ -53,7 +53,7 @@ impl super::Parse for ItemListScope { match parser.current_kind() { Some(FnKw) => { - parser.parse(super::func::FnScope::default(), checkpoint); + parser.parse(FnScope::default(), checkpoint); } Some(StructKw) => { parser.parse(super::struct_::StructScope::default(), checkpoint); @@ -163,8 +163,54 @@ impl super::Parse for EnumScope { define_scope! { TraitScope, Trait, Inheritance } impl super::Parse for TraitScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::TraitKw); + + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the trait name", None) + } + + if parser.current_kind() != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected trait body", None) + } + + parser.parse(TraitItemListScope::default(), None); + } +} + +define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline) } +impl super::Parse for TraitItemListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + loop { + parser.set_newline_as_trivia(true); + if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + break; + } + + let checkpoint = attr::parse_attr_list(parser); + + match parser.current_kind() { + Some(SyntaxKind::FnKw) => { + parser.parse(FnScope::default(), checkpoint); + } + _ => { + parser.error_and_recover("trait item is restricted to `fn`", checkpoint); + } + } + + parser.set_newline_as_trivia(false); + if !matches!( + parser.current_kind(), + Some(SyntaxKind::RBrace | SyntaxKind::Newline) + ) { + parser.error_and_recover("expected newline after trait item definition", checkpoint) + } + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover("expected `}` to close the trait body", None) + } } } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 8a4ab53829..6bdce5b23d 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -100,6 +100,8 @@ define_scope! { } impl super::Parse for GenericParamScope { fn parse(&mut self, parser: &mut Parser) { + parser.bump_if(SyntaxKind::ConstKw); + if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected type parameter", None); } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 84509c2267..e7ee8f000a 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -343,10 +343,12 @@ pub enum SyntaxKind { TypeAlias, /// `impl Foo { .. }` Impl, - /// `trait Foo { .. }` + /// `trait Foo {..}` Trait, + /// `{ fn foo() {..} }` + TraitItemList, /// `impl Trait for Foo { .. }` - TraitImpl, + ImplTrait, /// `const FOO: i32 = 1` Const, /// `use foo::{Foo as Foo1, bar::Baz}` diff --git a/crates/parser2/test_files/syntax_node/items/trait_.fe b/crates/parser2/test_files/syntax_node/items/trait_.fe new file mode 100644 index 0000000000..3202f6c16b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/trait_.fe @@ -0,0 +1,9 @@ +trait Marker {} + +pub trait Foo { + fn foo(t: T, u: U) + + fn default_method(lhs: T, rhs: T) -> i32 { + lhs + lhs - (rhs + rhs) + } +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/trait_.snap b/crates/parser2/test_files/syntax_node/items/trait_.snap new file mode 100644 index 0000000000..76e847efbf --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/trait_.snap @@ -0,0 +1,170 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..182 + ItemList@0..182 + Trait@0..15 + ItemModifier@0..0 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..12 "Marker" + WhiteSpace@12..13 " " + TraitItemList@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" + Newline@15..17 "\n\n" + Trait@17..182 + ItemModifier@17..20 + PubKw@17..20 "pub" + WhiteSpace@20..21 " " + TraitKw@21..26 "trait" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + WhiteSpace@30..31 " " + TraitItemList@31..182 + LBrace@31..32 "{" + Newline@32..33 "\n" + WhiteSpace@33..37 " " + Fn@37..79 + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..43 "foo" + GenericParamList@43..67 + Lt@43..44 "<" + GenericParam@44..52 + Ident@44..45 "T" + Colon@45..46 ":" + WhiteSpace@46..47 " " + TraitBoundList@47..52 + TraitBound@47..52 + Path@47..52 + PathSegment@47..52 + Ident@47..52 "Trait" + Comma@52..53 "," + WhiteSpace@53..54 " " + GenericParam@54..66 + ConstKw@54..59 "const" + WhiteSpace@59..60 " " + Ident@60..61 "U" + Colon@61..62 ":" + WhiteSpace@62..63 " " + TraitBoundList@63..66 + TraitBound@63..66 + Path@63..66 + PathSegment@63..66 + Ident@63..66 "i32" + Gt@66..67 ">" + FnArgList@67..79 + LParen@67..68 "(" + FnArg@68..72 + Ident@68..69 "t" + Colon@69..70 ":" + WhiteSpace@70..71 " " + PathType@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "T" + Comma@72..73 "," + WhiteSpace@73..74 " " + FnArg@74..78 + Ident@74..75 "u" + Colon@75..76 ":" + WhiteSpace@76..77 " " + PathType@77..78 + Path@77..78 + PathSegment@77..78 + Ident@77..78 "U" + RParen@78..79 ")" + Newline@79..81 "\n\n" + WhiteSpace@81..85 " " + Fn@85..180 + FnKw@85..87 "fn" + WhiteSpace@87..88 " " + Ident@88..102 "default_method" + GenericParamList@102..116 + Lt@102..103 "<" + GenericParam@103..115 + Ident@103..104 "T" + Colon@104..105 ":" + WhiteSpace@105..106 " " + TraitBoundList@106..115 + TraitBound@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "Add" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + TraitBound@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "Sub" + Gt@115..116 ">" + FnArgList@116..132 + LParen@116..117 "(" + FnArg@117..123 + Ident@117..120 "lhs" + Colon@120..121 ":" + WhiteSpace@121..122 " " + PathType@122..123 + Path@122..123 + PathSegment@122..123 + Ident@122..123 "T" + Comma@123..124 "," + WhiteSpace@124..125 " " + FnArg@125..131 + Ident@125..128 "rhs" + Colon@128..129 ":" + WhiteSpace@129..130 " " + PathType@130..131 + Path@130..131 + PathSegment@130..131 + Ident@130..131 "T" + RParen@131..132 ")" + WhiteSpace@132..134 " " + Arrow@134..136 "->" + WhiteSpace@136..137 " " + PathType@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "i32" + WhiteSpace@140..141 " " + BlockExpr@141..180 + LBrace@141..142 "{" + Newline@142..143 "\n" + WhiteSpace@143..151 " " + ExprStmt@151..174 + BinExpr@151..174 + BinExpr@151..160 + Path@151..154 + PathSegment@151..154 + Ident@151..154 "lhs" + WhiteSpace@154..155 " " + Plus@155..156 "+" + WhiteSpace@156..157 " " + Path@157..160 + PathSegment@157..160 + Ident@157..160 "lhs" + WhiteSpace@160..161 " " + Minus@161..162 "-" + WhiteSpace@162..163 " " + ParenExpr@163..174 + LParen@163..164 "(" + BinExpr@164..173 + Path@164..167 + PathSegment@164..167 + Ident@164..167 "rhs" + WhiteSpace@167..168 " " + Plus@168..169 "+" + WhiteSpace@169..170 " " + Path@170..173 + PathSegment@170..173 + Ident@170..173 "rhs" + RParen@173..174 ")" + Newline@174..175 "\n" + WhiteSpace@175..179 " " + RBrace@179..180 "}" + Newline@180..181 "\n" + RBrace@181..182 "}" + From 212796928c57e0602519a8647d3a27320b46e28b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 24 Jan 2023 23:37:22 +0100 Subject: [PATCH 037/678] Implement parser for `extern` --- crates/parser2/src/parser/func.rs | 23 +++- crates/parser2/src/parser/item.rs | 56 ++++++++- crates/parser2/src/syntax_kind.rs | 2 + .../test_files/syntax_node/items/extern_.fe | 9 ++ .../test_files/syntax_node/items/extern_.snap | 116 ++++++++++++++++++ 5 files changed, 199 insertions(+), 7 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/extern_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/extern_.snap diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 847e26b3e6..3106db0fbe 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -10,10 +10,20 @@ use super::{ }; define_scope! { - pub(crate) FnScope, + pub(crate) FnScope { + disallow_def: bool + }, Fn, Inheritance } +impl FnScope { + pub(crate) fn disallow_def() -> Self { + Self { + disallow_def: true, + ..Self::default() + } + } +} impl super::Parse for FnScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::FnKw); @@ -30,7 +40,9 @@ impl super::Parse for FnScope { } parser.remove_recovery_token(SyntaxKind::LParen); - parser.add_recovery_token(SyntaxKind::LBrace); + if !self.disallow_def { + parser.add_recovery_token(SyntaxKind::LBrace); + } parser.add_recovery_token(SyntaxKind::Arrow); if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(FnArgListScope::default(), None); @@ -42,9 +54,14 @@ impl super::Parse for FnScope { if parser.bump_if(SyntaxKind::Arrow) { parse_type(parser, None); } - parser.remove_recovery_token(SyntaxKind::LBrace); + if !self.disallow_def { + parser.remove_recovery_token(SyntaxKind::LBrace); + } if parser.current_kind() == Some(SyntaxKind::LBrace) { + if self.disallow_def { + parser.error_and_recover("function definition is not allowed", None); + } parser.parse(BlockExprScope::default(), None); } } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index ceeef0ddff..25aea1d3d8 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -171,14 +171,15 @@ impl super::Parse for TraitScope { } if parser.current_kind() != Some(SyntaxKind::LBrace) { - parser.error_and_recover("expected trait body", None) + parser.error_and_recover("expected trait body", None); + return; } parser.parse(TraitItemListScope::default(), None); } } -define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline) } +define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline, FnKw) } impl super::Parse for TraitItemListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); @@ -260,8 +261,55 @@ impl super::Parse for ConstScope { define_scope! { ExternScope, Extern, Inheritance } impl super::Parse for ExternScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ExternKw); + + if parser.current_kind() != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected extern block", None) + } + + parser.parse(ExternItemListScope::default(), None); + } +} + +define_scope! { ExternItemListScope, ExternItemList, Override(RBrace, Newline, FnKw) } +impl super::Parse for ExternItemListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + + loop { + parser.set_newline_as_trivia(true); + if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + break; + } + + let mut checkpoint = attr::parse_attr_list(parser); + let modifier_scope = ItemModifierScope::default(); + let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); + checkpoint.get_or_insert(modifier_checkpoint); + + match parser.current_kind() { + Some(SyntaxKind::FnKw) => { + parser.parse(FnScope::disallow_def(), checkpoint); + } + _ => { + parser.error_and_recover("extern item is restricted to `fn`", checkpoint); + } + } + + parser.set_newline_as_trivia(false); + if !matches!( + parser.current_kind(), + Some(SyntaxKind::RBrace | SyntaxKind::Newline) + ) { + parser + .error_and_recover("expected newline after extern item definition", checkpoint) + } + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover("expected `}` to close the extern body", None) + } } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index e7ee8f000a..33f3521063 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -365,6 +365,8 @@ pub enum SyntaxKind { UseTreeRename, /// `extern { .. }` Extern, + /// `extern { .. }` + ExternItemList, ItemList, /// `pub unsafe ` diff --git a/crates/parser2/test_files/syntax_node/items/extern_.fe b/crates/parser2/test_files/syntax_node/items/extern_.fe new file mode 100644 index 0000000000..000de9c818 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/extern_.fe @@ -0,0 +1,9 @@ +extern { + +} + +extern { + pub unsafe fn write(loc: *u32, value: u32) -> bool + pub unsafe fn read(loc: *u32, len: usize) -> usize + fn foo() +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/extern_.snap b/crates/parser2/test_files/syntax_node/items/extern_.snap new file mode 100644 index 0000000000..fb0c8694de --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/extern_.snap @@ -0,0 +1,116 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..146 + ItemList@0..146 + Extern@0..11 + ItemModifier@0..0 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..11 + LBrace@7..8 "{" + Newline@8..10 "\n\n" + RBrace@10..11 "}" + Newline@11..13 "\n\n" + Extern@13..146 + ItemModifier@13..13 + ExternKw@13..19 "extern" + WhiteSpace@19..20 " " + ExternItemList@20..146 + LBrace@20..21 "{" + Newline@21..22 "\n" + WhiteSpace@22..26 " " + Fn@26..76 + ItemModifier@26..36 + PubKw@26..29 "pub" + WhiteSpace@29..30 " " + UnsafeKw@30..36 "unsafe" + WhiteSpace@36..37 " " + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..45 "write" + FnArgList@45..68 + LParen@45..46 "(" + FnArg@46..55 + Ident@46..49 "loc" + Colon@49..50 ":" + WhiteSpace@50..51 " " + PtrType@51..55 + Star@51..52 "*" + PathType@52..55 + Path@52..55 + PathSegment@52..55 + Ident@52..55 "u32" + Comma@55..56 "," + WhiteSpace@56..57 " " + FnArg@57..67 + Ident@57..62 "value" + Colon@62..63 ":" + WhiteSpace@63..64 " " + PathType@64..67 + Path@64..67 + PathSegment@64..67 + Ident@64..67 "u32" + RParen@67..68 ")" + WhiteSpace@68..69 " " + Arrow@69..71 "->" + WhiteSpace@71..72 " " + PathType@72..76 + Path@72..76 + PathSegment@72..76 + Ident@72..76 "bool" + Newline@76..77 "\n" + WhiteSpace@77..81 " " + Fn@81..131 + ItemModifier@81..91 + PubKw@81..84 "pub" + WhiteSpace@84..85 " " + UnsafeKw@85..91 "unsafe" + WhiteSpace@91..92 " " + FnKw@92..94 "fn" + WhiteSpace@94..95 " " + Ident@95..99 "read" + FnArgList@99..122 + LParen@99..100 "(" + FnArg@100..109 + Ident@100..103 "loc" + Colon@103..104 ":" + WhiteSpace@104..105 " " + PtrType@105..109 + Star@105..106 "*" + PathType@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "u32" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnArg@111..121 + Ident@111..114 "len" + Colon@114..115 ":" + WhiteSpace@115..116 " " + PathType@116..121 + Path@116..121 + PathSegment@116..121 + Ident@116..121 "usize" + RParen@121..122 ")" + WhiteSpace@122..123 " " + Arrow@123..125 "->" + WhiteSpace@125..126 " " + PathType@126..131 + Path@126..131 + PathSegment@126..131 + Ident@126..131 "usize" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Fn@136..144 + ItemModifier@136..136 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + FnArgList@142..144 + LParen@142..143 "(" + RParen@143..144 ")" + Newline@144..145 "\n" + RBrace@145..146 "}" + From a136c93042e104a5ccf8451fa315b060c703e1d2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 25 Jan 2023 12:35:36 +0100 Subject: [PATCH 038/678] Implement parser for `enum` --- crates/parser2/src/parser/item.rs | 78 ++++++++- crates/parser2/src/parser/struct_.rs | 1 + crates/parser2/src/parser/type_.rs | 2 +- .../test_files/syntax_node/items/enums.fe | 16 ++ .../test_files/syntax_node/items/enums.snap | 150 ++++++++++++++++++ 5 files changed, 241 insertions(+), 6 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/enums.fe create mode 100644 crates/parser2/test_files/syntax_node/items/enums.snap diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 25aea1d3d8..bd10e69252 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -3,9 +3,14 @@ use std::cell::Cell; use crate::{parser::func::FnScope, SyntaxKind}; use super::{ - attr, define_scope, expr::parse_expr, param::GenericParamListScope, - struct_::RecordFieldDefListScope, token_stream::TokenStream, type_::parse_type, - use_tree::UseTreeScope, Parser, + attr, define_scope, + expr::parse_expr, + param::GenericParamListScope, + struct_::RecordFieldDefListScope, + token_stream::TokenStream, + type_::{parse_type, TupleTypeScope}, + use_tree::UseTreeScope, + Parser, }; define_scope! { @@ -156,8 +161,71 @@ impl super::Parse for ContractScope { define_scope! { EnumScope, Enum, Inheritance } impl super::Parse for EnumScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::EnumKw); + + parser.add_recovery_token(SyntaxKind::Lt); + parser.add_recovery_token(SyntaxKind::LBrace); + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the enum name", None) + } + parser.remove_recovery_token(SyntaxKind::Lt); + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + parser.remove_recovery_token(SyntaxKind::LBrace); + + if parser.current_kind() != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected enum body", None); + return; + } + + parser.parse(VariantDefListScope::default(), None); + } +} + +define_scope! { VariantDefListScope, VariantDefList, Override(RBrace, Newline) } +impl super::Parse for VariantDefListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::LBrace); + + loop { + parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() + { + break; + } + parser.parse(VariantDefScope::default(), None); + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Newline) + && parser.current_kind() != Some(SyntaxKind::RBrace) + { + parser.error_and_recover("expected newline after variant definition", None); + } + } + + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover( + "expected the closing brace of the enum variants definition", + None, + ); + parser.bump_if(SyntaxKind::RBrace); + } + } +} + +define_scope! { VariantDefScope, VariantDef, Override(RBrace, Newline) } +impl super::Parse for VariantDefScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the variant name", None); + return; + } + + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(TupleTypeScope::default(), None); + } } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 2bfb416b1f..7ded9c4bbd 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -66,6 +66,7 @@ impl super::Parse for RecordFieldDefListScope { "expected the closing brace of the struct field definition", None, ); + parser.bump_if(SyntaxKind::RBrace); } } } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 3c5176e741..f99bdc47af 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -49,7 +49,7 @@ impl super::Parse for SelfTypeScope { } } define_scope! { - TupleTypeScope, + pub(crate) TupleTypeScope, TupleType, Override( RParen, diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe new file mode 100644 index 0000000000..ed5f30df74 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -0,0 +1,16 @@ +enum Empty {} + +enum Basic { + Unit + Tup(i32, u32) +} + +enum Option { + Some(T) + None +} + +enum BoundEnum { + AddMul(T) + SubDiv(U) +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap new file mode 100644 index 0000000000..068d2131f6 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -0,0 +1,150 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..174 + ItemList@0..174 + Enum@0..13 + ItemModifier@0..0 + EnumKw@0..4 "enum" + WhiteSpace@4..5 " " + Ident@5..10 "Empty" + WhiteSpace@10..11 " " + VariantDefList@11..13 + LBrace@11..12 "{" + RBrace@12..13 "}" + Newline@13..15 "\n\n" + Enum@15..56 + ItemModifier@15..15 + EnumKw@15..19 "enum" + WhiteSpace@19..20 " " + Ident@20..25 "Basic" + WhiteSpace@25..26 " " + VariantDefList@26..56 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + VariantDef@32..36 + Ident@32..36 "Unit" + Newline@36..37 "\n" + WhiteSpace@37..41 " " + VariantDef@41..54 + Ident@41..44 "Tup" + TupleType@44..54 + LParen@44..45 "(" + PathType@45..48 + Path@45..48 + PathSegment@45..48 + Ident@45..48 "i32" + Comma@48..49 "," + WhiteSpace@49..50 " " + PathType@50..53 + Path@50..53 + PathSegment@50..53 + Ident@50..53 "u32" + RParen@53..54 ")" + Newline@54..55 "\n" + RBrace@55..56 "}" + Newline@56..58 "\n\n" + Enum@58..97 + ItemModifier@58..58 + EnumKw@58..62 "enum" + WhiteSpace@62..63 " " + Ident@63..69 "Option" + GenericParamList@69..72 + Lt@69..70 "<" + GenericParam@70..71 + Ident@70..71 "T" + Gt@71..72 ">" + WhiteSpace@72..73 " " + VariantDefList@73..97 + LBrace@73..74 "{" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + VariantDef@79..86 + Ident@79..83 "Some" + TupleType@83..86 + LParen@83..84 "(" + PathType@84..85 + Path@84..85 + PathSegment@84..85 + Ident@84..85 "T" + RParen@85..86 ")" + Newline@86..87 "\n" + WhiteSpace@87..91 " " + VariantDef@91..95 + Ident@91..95 "None" + Newline@95..96 "\n" + RBrace@96..97 "}" + Newline@97..99 "\n\n" + Enum@99..174 + ItemModifier@99..99 + EnumKw@99..103 "enum" + WhiteSpace@103..104 " " + Ident@104..113 "BoundEnum" + GenericParamList@113..142 + Lt@113..114 "<" + GenericParam@114..126 + Ident@114..115 "T" + Colon@115..116 ":" + WhiteSpace@116..117 " " + TraitBoundList@117..126 + TraitBound@117..120 + Path@117..120 + PathSegment@117..120 + Ident@117..120 "Add" + WhiteSpace@120..121 " " + Plus@121..122 "+" + WhiteSpace@122..123 " " + TraitBound@123..126 + Path@123..126 + PathSegment@123..126 + Ident@123..126 "Mul" + WhiteSpace@126..127 " " + Comma@127..128 "," + WhiteSpace@128..129 " " + GenericParam@129..141 + Ident@129..130 "U" + Colon@130..131 ":" + WhiteSpace@131..132 " " + TraitBoundList@132..141 + TraitBound@132..135 + Path@132..135 + PathSegment@132..135 + Ident@132..135 "Sub" + WhiteSpace@135..136 " " + Plus@136..137 "+" + WhiteSpace@137..138 " " + TraitBound@138..141 + Path@138..141 + PathSegment@138..141 + Ident@138..141 "Div" + Gt@141..142 ">" + WhiteSpace@142..143 " " + VariantDefList@143..174 + LBrace@143..144 "{" + Newline@144..145 "\n" + WhiteSpace@145..149 " " + VariantDef@149..158 + Ident@149..155 "AddMul" + TupleType@155..158 + LParen@155..156 "(" + PathType@156..157 + Path@156..157 + PathSegment@156..157 + Ident@156..157 "T" + RParen@157..158 ")" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + VariantDef@163..172 + Ident@163..169 "SubDiv" + TupleType@169..172 + LParen@169..170 "(" + PathType@170..171 + Path@170..171 + PathSegment@170..171 + Ident@170..171 "U" + RParen@171..172 ")" + Newline@172..173 "\n" + RBrace@173..174 "}" + From 5d1cabcc68c7f4eb6977ba43c200eec979e7cf41 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 25 Jan 2023 21:19:51 +0100 Subject: [PATCH 039/678] Implement parser for `impl` --- crates/parser2/src/parser/expr.rs | 140 ++++++++- crates/parser2/src/parser/expr_atom.rs | 15 +- crates/parser2/src/parser/func.rs | 48 ++-- crates/parser2/src/parser/item.rs | 221 ++++++++++----- crates/parser2/src/parser/mod.rs | 62 ++-- crates/parser2/src/parser/param.rs | 78 ++--- crates/parser2/src/parser/pat.rs | 1 - crates/parser2/src/parser/path.rs | 2 +- crates/parser2/src/parser/stmt.rs | 58 ++-- crates/parser2/src/parser/struct_.rs | 22 +- crates/parser2/src/parser/type_.rs | 33 +-- crates/parser2/src/syntax_kind.rs | 25 +- .../test_files/syntax_node/exprs/binop.fe | 4 +- .../test_files/syntax_node/exprs/binop.snap | 267 ++++++++++-------- .../test_files/syntax_node/exprs/call.snap | 32 ++- .../test_files/syntax_node/exprs/method.snap | 13 +- .../test_files/syntax_node/items/const_.snap | 1 - .../syntax_node/items/contract.snap | 1 - .../test_files/syntax_node/items/enums.snap | 16 +- .../test_files/syntax_node/items/extern_.snap | 3 - .../test_files/syntax_node/items/func.snap | 61 ++-- .../test_files/syntax_node/items/impl_.fe | 13 + .../test_files/syntax_node/items/impl_.snap | 192 +++++++++++++ .../syntax_node/items/impl_trait.fe | 11 + .../syntax_node/items/impl_trait.snap | 172 +++++++++++ .../test_files/syntax_node/items/trait_.snap | 19 +- .../test_files/syntax_node/items/type_.snap | 39 +-- .../test_files/syntax_node/items/use_.snap | 10 - .../test_files/syntax_node/stmts/let_.snap | 8 +- .../syntax_node/structs/generics.snap | 18 +- .../syntax_node/structs/tupel_field.snap | 1 - crates/parser2/tests/syntax_node.rs | 6 +- 32 files changed, 1109 insertions(+), 483 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/impl_.fe create mode 100644 crates/parser2/test_files/syntax_node/items/impl_.snap create mode 100644 crates/parser2/test_files/syntax_node/items/impl_trait.fe create mode 100644 crates/parser2/test_files/syntax_node/items/impl_trait.snap diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 75c26cad76..d347de6b58 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -87,7 +87,8 @@ fn parse_expr_with_min_bp( } None => {} } - if let Some((lbp, _)) = infix_binding_power(kind) { + + if let Some((lbp, _)) = infix_binding_power(parser) { if lbp < min_bp { break; } @@ -145,26 +146,40 @@ fn postfix_binding_power(kind: SyntaxKind) -> Option { /// Specifies how tightly does an infix operator bind to its left and right /// operands. -fn infix_binding_power(kind: SyntaxKind) -> Option<(u8, u8)> { +fn infix_binding_power(parser: &mut Parser) -> Option<(u8, u8)> { use SyntaxKind::*; - let bp = match kind { + let bp = match parser.current_kind()? { Pipe2 => (50, 51), Amp2 => (60, 61), - - // all comparisons are the same - Lt | LtEq | Gt | GtEq | NotEq | Eq2 => (70, 71), - + NotEq | Eq2 => (70, 71), + Lt => { + if is_lshift(parser) { + (110, 111) + } else { + // `LT` and `LtEq` has the same binding power. + (70, 71) + } + } + Gt => { + if is_rshift(parser) { + (110, 111) + } else { + // `Gt` and `GtEq` has the same binding power. + (70, 71) + } + } Pipe => (80, 81), Hat => (90, 91), Amp => (100, 101), - Lt2 | Gt2 => (110, 111), + LShift | RShift => (110, 111), Plus | Minus => (120, 121), Star | Slash | Percent => (130, 131), Star2 => (141, 140), Dot => (151, 150), _ => return None, }; + Some(bp) } @@ -183,9 +198,10 @@ define_scope! { BinExprScope, BinExpr, Inheritance } impl super::Parse for BinExprScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - let kind = parser.current_kind().unwrap(); - let (_, rbp) = infix_binding_power(kind).unwrap(); - parser.bump(); + + let (_, rbp) = infix_binding_power(parser).unwrap(); + bump_bin_op(parser); + parse_expr_with_min_bp(parser, rbp, true); } } @@ -264,3 +280,105 @@ impl super::Parse for FieldExprScope { } } } + +define_scope! { pub(super) LShiftScope, LShift, Inheritance } +impl super::Parse for LShiftScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Lt) { + parser.error_and_recover("expected `<<`", None); + } + if !parser.bump_if(SyntaxKind::Lt) { + parser.error_and_recover("expected `<<`", None); + } + } +} + +define_scope! { pub(super) RShiftScope, RShift, Inheritance } +impl super::Parse for RShiftScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Gt) { + parser.error_and_recover("expected `>>`", None); + } + if !parser.bump_if(SyntaxKind::Gt) { + parser.error_and_recover("expected `>>`", None); + } + } +} + +define_scope! { pub(super) LtEqScope, LtEq, Inheritance } +impl super::Parse for LtEqScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Lt) { + parser.error_and_recover("expected `<=`", None); + } + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `<=`", None); + } + } +} + +define_scope! { pub(super) GtEqScope, GtEq, Inheritance } +impl super::Parse for GtEqScope { + fn parse(&mut self, parser: &mut Parser) { + if !parser.bump_if(SyntaxKind::Gt) { + parser.error_and_recover("expected `>=`", None); + } + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `>=`", None); + } + } +} + +pub(crate) fn is_lshift(parser: &mut Parser) -> bool { + parser.start_dry_run(); + let is_lshift = parser.parse(LShiftScope::default(), None).0; + parser.end_dry_run(); + is_lshift +} + +pub(crate) fn is_rshift(parser: &mut Parser) -> bool { + parser.start_dry_run(); + let is_rshift = parser.parse(RShiftScope::default(), None).0; + parser.end_dry_run(); + is_rshift +} + +fn is_lt_eq(parser: &mut Parser) -> bool { + parser.start_dry_run(); + let is_lt_eq = parser.parse(LtEqScope::default(), None).0; + parser.end_dry_run(); + is_lt_eq +} + +fn is_gt_eq(parser: &mut Parser) -> bool { + parser.start_dry_run(); + let is_gt_eq = parser.parse(GtEqScope::default(), None).0; + parser.end_dry_run(); + is_gt_eq +} + +fn bump_bin_op(parser: &mut Parser) { + match parser.current_kind() { + Some(SyntaxKind::Lt) => { + if is_lshift(parser) { + parser.parse(LShiftScope::default(), None); + } else if is_lt_eq(parser) { + parser.parse(LtEqScope::default(), None); + } else { + parser.bump(); + } + } + Some(SyntaxKind::Gt) => { + if is_rshift(parser) { + parser.parse(RShiftScope::default(), None); + } else if is_gt_eq(parser) { + parser.parse(GtEqScope::default(), None); + } else { + parser.bump(); + } + } + _ => { + parser.bump(); + } + } +} diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index aac3353576..7aad25a788 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -58,7 +58,6 @@ define_scope! { impl super::Parse for BlockExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - parser.set_newline_as_trivia(false); loop { parser.set_newline_as_trivia(true); @@ -76,11 +75,13 @@ impl super::Parse for BlockExprScope { && parser.current_kind() != Some(SyntaxKind::RBrace) { parser.error_and_recover("expected newline after statement", None); + parser.bump_if(SyntaxKind::Newline); } } if !parser.bump_if(SyntaxKind::RBrace) { parser.error_and_bump_until("expected `}`", None, SyntaxKind::RBrace); + parser.bump_if(SyntaxKind::RBrace); } } } @@ -90,9 +91,7 @@ impl super::Parse for IfExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::IfKw); - parser.add_recovery_token(SyntaxKind::LBrace); - parse_expr_no_struct(parser); - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -120,9 +119,7 @@ impl super::Parse for MatchExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::MatchKw); - parser.add_recovery_token(SyntaxKind::LBrace); - parse_expr_no_struct(parser); - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -165,9 +162,7 @@ impl super::Parse for MatchArmScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.add_recovery_token(SyntaxKind::FatArrow); - parse_pat(parser); - parser.remove_recovery_token(SyntaxKind::FatArrow); + parser.with_recovery_tokens(&[SyntaxKind::FatArrow], parse_pat); if !parser.bump_if(SyntaxKind::FatArrow) { parser.error_and_recover("expected `=>`", None); diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 3106db0fbe..4516826672 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -28,35 +28,31 @@ impl super::Parse for FnScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::FnKw); - parser.add_recovery_token(SyntaxKind::Lt); - parser.add_recovery_token(SyntaxKind::LParen); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the function name", None) - } - parser.remove_recovery_token(SyntaxKind::Lt); + parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the function name", None) + } + }); - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - parser.remove_recovery_token(SyntaxKind::LParen); + parser.with_recovery_tokens(&[SyntaxKind::LParen], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); - if !self.disallow_def { - parser.add_recovery_token(SyntaxKind::LBrace); - } - parser.add_recovery_token(SyntaxKind::Arrow); - if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); - } else { - parser.error_and_recover("expected `(` for the function arguments", None); - } - parser.remove_recovery_token(SyntaxKind::Arrow); + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::Arrow], |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); + } + }); - if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None); - } - if !self.disallow_def { - parser.remove_recovery_token(SyntaxKind::LBrace); - } + parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); + } + }); if parser.current_kind() == Some(SyntaxKind::LBrace) { if self.disallow_def { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index bd10e69252..2df20db2fc 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -1,4 +1,4 @@ -use std::cell::Cell; +use std::{cell::Cell, rc::Rc}; use crate::{parser::func::FnScope, SyntaxKind}; @@ -46,9 +46,14 @@ impl super::Parse for ItemListScope { let mut checkpoint = attr::parse_attr_list(parser); let modifier_scope = ItemModifierScope::default(); - let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); - checkpoint.get_or_insert(modifier_checkpoint); - let modifier = modifier_scope.kind.get(); + let modifier = match parser.current_kind() { + Some(kind) if is_modifier_head(kind) => { + let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); + checkpoint.get_or_insert(modifier_checkpoint); + modifier_scope.kind.get() + } + _ => ModifierKind::None, + }; if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { parser.error("expected `fn` after `unsafe` keyword"); @@ -100,7 +105,7 @@ impl super::Parse for ItemListScope { } define_scope! { - ItemModifierScope {kind: Cell}, + ItemModifierScope {kind: Rc>}, ItemModifier, Inheritance } @@ -164,17 +169,17 @@ impl super::Parse for EnumScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::EnumKw); - parser.add_recovery_token(SyntaxKind::Lt); - parser.add_recovery_token(SyntaxKind::LBrace); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the enum name", None) - } - parser.remove_recovery_token(SyntaxKind::Lt); + parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LBrace], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the enum name", None) + } + }); - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected enum body", None); @@ -285,8 +290,46 @@ impl super::Parse for TraitItemListScope { define_scope! { ImplScope, Impl, Inheritance } impl super::Parse for ImplScope { - fn parse(&mut self, _parser: &mut Parser) { - todo!() + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ImplKw); + + parser.start_dry_run(); + let is_trait_impl = parse_type(parser, None, true) && parser.bump_if(SyntaxKind::ForKw); + parser.end_dry_run(); + + if is_trait_impl { + self.set_kind(SyntaxKind::ImplTrait); + parse_type(parser, None, false); + parser.bump_expected(SyntaxKind::ForKw); + parse_type(parser, None, false); + } else { + parse_type(parser, None, true); + } + + if parser.current_kind() != Some(SyntaxKind::LBrace) { + parser.error_and_recover("expected impl body", None); + return; + } + + if is_trait_impl { + parser.parse(ImplTraitItemListScope::default(), None); + } else { + parser.parse(ImplItemListScope::default(), None); + } + } +} + +define_scope! { ImplTraitItemListScope, ImplTraitItemList, Override(RBrace, FnKw) } +impl super::Parse for ImplTraitItemListScope { + fn parse(&mut self, parser: &mut Parser) { + parse_fn_item_block(parser, false, true) + } +} + +define_scope! { ImplItemListScope, ImplItemList, Override(RBrace, FnKw) } +impl super::Parse for ImplItemListScope { + fn parse(&mut self, parser: &mut Parser) { + parse_fn_item_block(parser, true, true) } } @@ -305,18 +348,18 @@ impl super::Parse for ConstScope { parser.set_newline_as_trivia(false); - parser.add_recovery_token(SyntaxKind::Colon); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier", None); - } - parser.remove_recovery_token(SyntaxKind::Colon); + parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier", None); + } + }); - parser.add_recovery_token(SyntaxKind::Eq); - if !parser.bump_if(SyntaxKind::Colon) { - parser.error_and_recover("expected type annotation for `const`", None); - } - parse_type(parser, None); - parser.remove_recovery_token(SyntaxKind::Eq); + parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected type annotation for `const`", None); + } + parse_type(parser, None, false); + }); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` for const value definition", None); @@ -340,70 +383,100 @@ impl super::Parse for ExternScope { } } -define_scope! { ExternItemListScope, ExternItemList, Override(RBrace, Newline, FnKw) } +define_scope! { ExternItemListScope, ExternItemList, Override(RBrace, FnKw) } impl super::Parse for ExternItemListScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::LBrace); - - loop { - parser.set_newline_as_trivia(true); - if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { - break; - } + parse_fn_item_block(parser, true, false); + } +} - let mut checkpoint = attr::parse_attr_list(parser); - let modifier_scope = ItemModifierScope::default(); - let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); - checkpoint.get_or_insert(modifier_checkpoint); +define_scope! { TypeAliasScope, TypeAlias, Inheritance } +impl super::Parse for TypeAliasScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::TypeKw); - match parser.current_kind() { - Some(SyntaxKind::FnKw) => { - parser.parse(FnScope::disallow_def(), checkpoint); - } - _ => { - parser.error_and_recover("extern item is restricted to `fn`", checkpoint); - } + parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::Eq], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected identifier for type alias name", None) } + }); - parser.set_newline_as_trivia(false); - if !matches!( - parser.current_kind(), - Some(SyntaxKind::RBrace | SyntaxKind::Newline) - ) { - parser - .error_and_recover("expected newline after extern item definition", checkpoint) + parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); } - } + }); - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover("expected `}` to close the extern body", None) + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `=` for type alias definition", None); + return; } + + parse_type(parser, None, false); } } -define_scope! { TypeAliasScope, TypeAlias, Inheritance } -impl super::Parse for TypeAliasScope { - fn parse(&mut self, parser: &mut Parser) { - parser.set_newline_as_trivia(false); - parser.bump_expected(SyntaxKind::TypeKw); +/// Currently, `impl` block, `impl trait` block, `trait` block and `extern` +/// block only allow `fn` as their items. This function is used to parse the +/// `fn` item in these blocks. NOTE: This function will be invalidated when +/// these block have their own allowed items, eg. `trait` block will allow +/// `type` item. +fn parse_fn_item_block( + parser: &mut Parser, + allow_modifier: bool, + allow_fn_def: bool, +) { + parser.bump_expected(SyntaxKind::LBrace); + loop { + parser.set_newline_as_trivia(true); + if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + break; + } - parser.add_recovery_token(SyntaxKind::Lt); - parser.add_recovery_token(SyntaxKind::Eq); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier for type alias name", None) + let mut checkpoint = attr::parse_attr_list(parser); + let modifier_scope = ItemModifierScope::default(); + match parser.current_kind() { + Some(kind) if is_modifier_head(kind) && allow_modifier => { + if allow_modifier { + let (_, modifier_checkpoint) = parser.parse(modifier_scope, None); + checkpoint.get_or_insert(modifier_checkpoint); + } else { + parser.error_and_recover("modifier is not allowed in the block", checkpoint); + } + } + _ => {} } - parser.remove_recovery_token(SyntaxKind::Lt); - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); + match parser.current_kind() { + Some(SyntaxKind::FnKw) => { + let scope = if allow_fn_def { + FnScope::default() + } else { + FnScope::disallow_def() + }; + parser.parse(scope, checkpoint); + } + _ => { + parser.error_and_recover("only `fn` is allowed in the block", checkpoint); + } } - parser.remove_recovery_token(SyntaxKind::Eq); - if !parser.bump_if(SyntaxKind::Eq) { - parser.error_and_recover("expected `=` for type alias definition", None); - return; + parser.set_newline_as_trivia(false); + if !matches!( + parser.current_kind(), + Some(SyntaxKind::RBrace | SyntaxKind::Newline) + ) { + parser.error_and_recover("expected newline after item definition", checkpoint) } + } - parse_type(parser, None); + if !parser.bump_if(SyntaxKind::RBrace) { + parser.error_and_recover("expected `}` to close the block", None); + parser.bump_if(SyntaxKind::RBrace); } } + +fn is_modifier_head(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::PubKw | SyntaxKind::UnsafeKw) +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index e7a66549eb..77f8ed3f8b 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -2,7 +2,7 @@ use std::collections::VecDeque; pub(crate) use item::ItemListScope; -use fxhash::FxHashSet; +use fxhash::{FxHashMap, FxHashSet}; use crate::{syntax_node::SyntaxNode, ParseError, SyntaxKind, TextRange}; @@ -48,7 +48,7 @@ pub struct Parser { /// enters dry run mode. dry_run_states: Vec>, - auxiliary_recovery_set: FxHashSet, + auxiliary_recovery_set: FxHashMap, } impl Parser { @@ -63,18 +63,13 @@ impl Parser { is_newline_trivia: true, next_trivias: VecDeque::new(), dry_run_states: Vec::new(), - auxiliary_recovery_set: FxHashSet::default(), + auxiliary_recovery_set: FxHashMap::default(), } } /// Returns the current token of the parser. pub fn current_token(&mut self) -> Option { self.peek_non_trivia() - // if !self.next_trivias.is_empty() { - // Some(&self.next_trivias[0]) - // } else { - // self.stream.peek() - // } } /// Returns the current non-trivia token kind of the parser. @@ -98,6 +93,23 @@ impl Parser { (SyntaxNode::new_root(self.builder.finish()), self.errors) } + pub fn with_recovery_tokens(&mut self, recovery_tokens: &[SyntaxKind], f: F) -> R + where + F: FnOnce(&mut Self) -> R, + { + for token in recovery_tokens { + self.add_recovery_token(*token); + } + + let r = f(self); + + for token in recovery_tokens { + self.remove_recovery_token(*token); + } + + r + } + /// Invoke the scope to parse. The scope is wrapped up by the node specified /// by the scope. /// @@ -142,14 +154,6 @@ impl Parser { checkpoint.unwrap_or_else(|| self.checkpoint()) } - pub fn add_recovery_token(&mut self, token: SyntaxKind) { - self.auxiliary_recovery_set.insert(token); - } - - pub fn remove_recovery_token(&mut self, token: SyntaxKind) { - self.auxiliary_recovery_set.remove(&token); - } - #[doc(hidden)] /// Leave the scope and wrap up the checkpoint by the scope's node. // NOTE: This method is limited to testing and internal usage. @@ -163,7 +167,6 @@ impl Parser { self.bump_trivias() } - self.auxiliary_recovery_set.clear(); if !self.is_dry_run() { self.builder .start_node_at(checkpoint, scope.syntax_kind().into()); @@ -289,7 +292,7 @@ impl Parser { } while let Some(kind) = self.current_kind() { - if recovery_set.contains(&kind) | self.auxiliary_recovery_set.contains(&kind) { + if recovery_set.contains(&kind) || self.auxiliary_recovery_set.contains_key(&kind) { break; } else { self.bump(); @@ -381,6 +384,19 @@ impl Parser { fn is_trivia(&self, kind: SyntaxKind) -> bool { kind.is_trivia() || (self.is_newline_trivia && kind == SyntaxKind::Newline) } + + fn add_recovery_token(&mut self, token: SyntaxKind) { + *self.auxiliary_recovery_set.entry(token).or_insert(0) += 1; + } + + fn remove_recovery_token(&mut self, token: SyntaxKind) { + if let Some(num) = self.auxiliary_recovery_set.get_mut(&token) { + *num -= 1; + } + if self.auxiliary_recovery_set.get(&token) == Some(&0) { + self.auxiliary_recovery_set.remove(&token); + } + } } pub trait ParsingScope { @@ -401,7 +417,7 @@ struct DryRunState { err_num: usize, /// The stored trivias when the dry run started. next_trivias: VecDeque, - auxiliary_recovery_set: FxHashSet, + auxiliary_recovery_set: FxHashMap, } /// Represents the recovery method of the current scope. @@ -518,6 +534,14 @@ macro_rules! define_scope_struct { $($field: $ty),* } impl $scope_name { + #[allow(unused)] + $visibility fn new($($field: $ty),*) -> Self { + use crate::SyntaxKind::*; + Self { + $($field,)* + __inner: std::cell::Cell::new($kind).into(), + } + } #[allow(unused)] fn set_kind(&mut self, kind: crate::SyntaxKind) { self.__inner.set(kind); diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 6bdce5b23d..634f889b68 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -28,7 +28,7 @@ impl super::Parse for FnArgListScope { } if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_bump_until("expected closing `)`", None, SyntaxKind::RParen); + parser.error_and_recover("expected closing `)`", None); parser.bump_if(SyntaxKind::LParen); } } @@ -43,29 +43,34 @@ impl super::Parse for FnArgScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_if(SyntaxKind::MutKw); - parser.add_recovery_token(SyntaxKind::Colon); - match parser.current_kind() { - Some(SyntaxKind::SelfKw) => { - parser.bump_expected(SyntaxKind::SelfKw); - return; - } - Some(SyntaxKind::Ident | SyntaxKind::Underscore) => { - parser.bump(); - if !parser.bump_if(SyntaxKind::Ident) { - parser.bump_if(SyntaxKind::Underscore); + let is_self = parser.with_recovery_tokens(&[SyntaxKind::Colon], |parser| { + match parser.current_kind() { + Some(SyntaxKind::SelfKw) => { + parser.bump_expected(SyntaxKind::SelfKw); + true + } + Some(SyntaxKind::Ident | SyntaxKind::Underscore) => { + parser.bump(); + if !parser.bump_if(SyntaxKind::Ident) { + parser.bump_if(SyntaxKind::Underscore); + } + false + } + _ => { + parser.error_and_recover("expected identifier for argument name", None); + false } } - _ => { - parser.error_and_recover("expected identifier for argument name", None); - } + }); + if is_self { + return; } - parser.remove_recovery_token(SyntaxKind::Colon); if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected `:` after argument name", None); } - parse_type(parser, None); + parse_type(parser, None, false); } } @@ -87,7 +92,7 @@ impl super::Parse for GenericParamListScope { } if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_bump_until("expected closing `>`", None, SyntaxKind::Gt); + parser.error_and_recover("expected closing `>`", None); parser.bump_if(SyntaxKind::Gt); } } @@ -107,7 +112,6 @@ impl super::Parse for GenericParamScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.bump_expected(SyntaxKind::Colon); parser.parse(TraitBoundListScope::default(), None); } } @@ -120,6 +124,8 @@ define_scope! { } impl super::Parse for TraitBoundListScope { fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Colon); + parser.parse(TraitBoundScope::default(), None); while parser.current_kind() == Some(SyntaxKind::Plus) { parser.bump_expected(SyntaxKind::Plus); @@ -136,14 +142,15 @@ define_scope! { impl super::Parse for TraitBoundScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); - // TODO: Allow trait bound with associated type bound. - // `Trait`. + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::new(false), None); + } } } define_scope! { - pub(crate) GenericArgListScope, - GenericParamList, + pub(crate) GenericArgListScope{ allow_bounds: bool }, + GenericArgList, Override(Gt, Comma) } impl super::Parse for GenericArgListScope { @@ -154,19 +161,23 @@ impl super::Parse for GenericArgListScope { return; } - parser.parse(GenericArgScope::default(), None); + parser.parse(GenericArgScope::new(self.allow_bounds), None); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(GenericArgScope::default(), None); + parser.parse(GenericArgScope::new(self.allow_bounds), None); } if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_bump_until("expected closing `>`", None, SyntaxKind::Gt); + parser.error_and_recover("expected closing `>`", None); parser.bump_if(SyntaxKind::Gt); } } } -define_scope! { GenericArgScope, GenericParam, Inheritance} +define_scope! { + GenericArgScope{ allow_bounds: bool }, + GenericArg, + Inheritance +} impl super::Parse for GenericArgScope { fn parse(&mut self, parser: &mut Parser) { match parser.current_kind() { @@ -174,16 +185,19 @@ impl super::Parse for GenericArgScope { parser.parse(BlockExprScope::default(), None); } - Some(SyntaxKind::Star | SyntaxKind::LBracket | SyntaxKind::LParen) => { - parse_type(parser, None); - } - Some(kind) if kind.is_literal_leaf() => { parser.parse(LitExprScope::default(), None); } _ => { - parser.parse(PathScope::default(), None); + parse_type(parser, None, self.allow_bounds); + if parser.current_kind() == Some(SyntaxKind::Colon) { + if !self.allow_bounds { + parser.error_and_recover("type bounds are not allowed here", None); + } else { + parser.parse(TraitBoundListScope::default(), None); + } + } } } } @@ -204,7 +218,7 @@ impl super::Parse for CallArgListScope { } if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_bump_until("expected closing `)`", None, SyntaxKind::RParen); + parser.error_and_recover("expected closing `)`", None); parser.bump_if(SyntaxKind::RParen); } } diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index a3834064d4..044ce1bad7 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -110,7 +110,6 @@ impl super::Parse for RecordPatFieldListScope { while parser.bump_if(SyntaxKind::Comma) { parser.parse(RecordPatFieldScope::default(), None); } - parser.remove_recovery_token(SyntaxKind::Comma); if !parser.bump_if(SyntaxKind::RBrace) { parser.error_and_recover("expected `}`", None); diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 3d6361e9d7..11551183f5 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -38,6 +38,6 @@ impl super::Parse for PathSegmentScope { pub(super) fn is_path_segment(kind: SyntaxKind) -> bool { matches!( kind, - SyntaxKind::SelfType | SyntaxKind::SelfKw | SyntaxKind::Ident + SyntaxKind::SelfTypeKw | SyntaxKind::SelfKw | SyntaxKind::Ident ) } diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index c152631903..5a4256b378 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -1,4 +1,4 @@ -use crate::SyntaxKind; +use crate::{parser::expr, SyntaxKind}; use super::{ define_scope, @@ -46,9 +46,7 @@ impl super::Parse for LetStmtScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { parser.bump_expected(SyntaxKind::Colon); - parser.add_recovery_token(SyntaxKind::Eq); - parse_type(parser, None); - parser.remove_recovery_token(SyntaxKind::Eq); + parse_type(parser, None, false); } if parser.bump_if(SyntaxKind::Eq) { @@ -62,18 +60,14 @@ impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); - parser.add_recovery_token(SyntaxKind::InKw); - parse_pat(parser); - parser.remove_recovery_token(SyntaxKind::InKw); + parser.with_recovery_tokens(&[SyntaxKind::InKw], parse_pat); if !parser.bump_if(SyntaxKind::InKw) { parser.error_and_recover("expected `in` keyword", None); return; } - parser.add_recovery_token(SyntaxKind::LBrace); - parse_expr_no_struct(parser); - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -88,9 +82,7 @@ impl super::Parse for WhileStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhileKw); - parser.add_recovery_token(SyntaxKind::LBrace); - parse_expr_no_struct(parser); - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -142,17 +134,10 @@ impl super::Parse for ReturnStmtScope { define_scope! { AssignStmtScope, AssignStmt, Inheritance } impl super::Parse for AssignStmtScope { fn parse(&mut self, parser: &mut Parser) { - parser.add_recovery_token(SyntaxKind::Eq); - parse_pat(parser); - parser.remove_recovery_token(SyntaxKind::Eq); + parser.with_recovery_tokens(&[SyntaxKind::Eq], parse_pat); parser.set_newline_as_trivia(false); - if parser - .current_kind() - .map(is_aug_assign_kind) - .unwrap_or_default() - { - parser.bump(); + if bump_aug_assign_op_opt(parser) { self.set_kind(SyntaxKind::AugAssignStmt); } @@ -172,10 +157,29 @@ impl super::Parse for ExprStmtScope { } } -fn is_aug_assign_kind(kind: SyntaxKind) -> bool { +fn bump_aug_assign_op_opt(parser: &mut Parser) -> bool { use SyntaxKind::*; - matches!( - kind, - Pipe | Hat | Amp | Lt2 | Gt2 | Plus | Minus | Star | Slash | Percent | Star2 - ) + match parser.current_kind() { + Some(Pipe | Hat | Amp | Plus | Minus | Star | Slash | Percent | Star2) => { + parser.bump(); + true + } + Some(Lt) => { + if expr::is_lshift(parser) { + parser.parse(expr::LShiftScope::default(), None); + true + } else { + false + } + } + Some(Gt) => { + if expr::is_rshift(parser) { + parser.parse(expr::RShiftScope::default(), None); + true + } else { + false + } + } + _ => false, + } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 7ded9c4bbd..ba3a9d2e77 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -14,17 +14,17 @@ impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::StructKw); - parser.add_recovery_token(SyntaxKind::Lt); - parser.add_recovery_token(SyntaxKind::LBrace); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the struct name", None) - } - parser.remove_recovery_token(SyntaxKind::Lt); + parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LBrace], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the struct name", None) + } + }); - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - parser.remove_recovery_token(SyntaxKind::LBrace); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(RecordFieldDefListScope::default(), None); @@ -87,6 +87,6 @@ impl super::Parse for RecordFieldDefScope { if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected `name: type` for the field definition", None); } - parse_type(parser, None); + parse_type(parser, None, false); } } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index f99bdc47af..ed47da81ec 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -8,27 +8,28 @@ use super::{ pub(super) fn parse_type( parser: &mut Parser, checkpoint: Option, + allow_bounds: bool, ) -> bool { match parser.current_kind() { - Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::default(), checkpoint), - Some(SyntaxKind::SelfTypeKw) => parser.parse(SelfTypeScope::default(), checkpoint), - Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), - Some(SyntaxKind::LBracket) => parser.parse(ArrayTypeScope::default(), checkpoint), - _ => parser.parse(PathTypeScope::default(), checkpoint), + Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::new(allow_bounds), checkpoint), + Some(SyntaxKind::SelfTypeKw) => parser.parse(SelfTypeScope::new(), checkpoint), + Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::new(allow_bounds), checkpoint), + Some(SyntaxKind::LBracket) => parser.parse(ArrayTypeScope::new(allow_bounds), checkpoint), + _ => parser.parse(PathTypeScope::new(allow_bounds), checkpoint), } .0 } -define_scope!(PtrTypeScope, PtrType, Inheritance); +define_scope!(PtrTypeScope { allow_bounds: bool }, PtrType, Inheritance); impl super::Parse for PtrTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Star); parser.set_newline_as_trivia(false); - parse_type(parser, None); + parse_type(parser, None, self.allow_bounds); } } -define_scope!(PathTypeScope, PathType, Inheritance); +define_scope!(PathTypeScope { allow_bounds: bool }, PathType, Inheritance); impl super::Parse for PathTypeScope { fn parse(&mut self, parser: &mut Parser) { if !parser.parse(PathScope::default(), None).0 { @@ -37,7 +38,7 @@ impl super::Parse for PathTypeScope { parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::default(), None); + parser.parse(GenericArgListScope::new(self.allow_bounds), None); } } } @@ -49,7 +50,7 @@ impl super::Parse for SelfTypeScope { } } define_scope! { - pub(crate) TupleTypeScope, + pub(crate) TupleTypeScope{ allow_bounds: bool }, TupleType, Override( RParen, @@ -63,9 +64,9 @@ impl super::Parse for TupleTypeScope { return; } - parse_type(parser, None); + parse_type(parser, None, self.allow_bounds); while parser.bump_if(SyntaxKind::Comma) { - parse_type(parser, None); + parse_type(parser, None, self.allow_bounds); } if !parser.bump_if(SyntaxKind::RParen) { @@ -76,7 +77,7 @@ impl super::Parse for TupleTypeScope { } define_scope! { - ArrayTypeScope, + ArrayTypeScope{ allow_bounds: bool }, ArrayType, Override(RBracket) } @@ -84,9 +85,9 @@ impl super::Parse for ArrayTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBracket); - parser.add_recovery_token(SyntaxKind::SemiColon); - parse_type(parser, None); - parser.remove_recovery_token(SyntaxKind::SemiColon); + parser.with_recovery_tokens(&[SyntaxKind::SemiColon], |parser| { + parse_type(parser, None, self.allow_bounds) + }); if !parser.bump_if(SyntaxKind::SemiColon) { parser.error_and_recover("expected `;`", None); diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 33f3521063..a8c79a521c 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -118,21 +118,9 @@ pub enum SyntaxKind { /// `<` #[token("<")] Lt, - /// `<<` - #[token("<<")] - Lt2, - /// `<=` - #[token("<=")] - LtEq, /// `>` #[token(">")] Gt, - /// `>>` - #[token(">>")] - Gt2, - /// `>=` - #[token(">=")] - GtEq, /// `=` #[token("=")] Eq, @@ -230,6 +218,15 @@ pub enum SyntaxKind { #[token("unsafe")] UnsafeKw, + /// `<<` + LShift, + /// `>>` + RShift, + /// `<=` + LtEq, + /// `>=` + GtEq, + // Expressions. These are non-leaf nodes. /// { statement-list } BlockExpr, @@ -343,12 +340,16 @@ pub enum SyntaxKind { TypeAlias, /// `impl Foo { .. }` Impl, + /// `{ fn ... }` + ImplItemList, /// `trait Foo {..}` Trait, /// `{ fn foo() {..} }` TraitItemList, /// `impl Trait for Foo { .. }` ImplTrait, + /// `{ fn foo() {..} }` + ImplTraitItemList, /// `const FOO: i32 = 1` Const, /// `use foo::{Foo as Foo1, bar::Baz}` diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.fe b/crates/parser2/test_files/syntax_node/exprs/binop.fe index e83e6a9ad9..c3c4c31831 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.fe +++ b/crates/parser2/test_files/syntax_node/exprs/binop.fe @@ -2,10 +2,12 @@ 1 * 2 + 3 1 < 2 1 < (2 + 3) +1 <= 2 +1 >= 2 true || false && 1 < 2 true || false && (1 < 2) > 3 ^ 2 a ** 2 ** 3 1 - 2 - 3 1 << 3 >> 2 a.b.c -a.0.c +a.0.c \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap index 126f588e59..185668907b 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.snap +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -2,7 +2,7 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..140 +Root@0..153 BinExpr@0..9 LitExpr@0..1 Int@0..1 "1" @@ -60,127 +60,152 @@ Root@0..140 Int@35..36 "3" RParen@36..37 ")" Newline@37..38 "\n" - BinExpr@38..60 - LitExpr@38..42 - TrueKw@38..42 "true" + BinExpr@38..44 + LitExpr@38..39 + Int@38..39 "1" + WhiteSpace@39..40 " " + LtEq@40..42 + Lt@40..41 "<" + Eq@41..42 "=" WhiteSpace@42..43 " " - Pipe2@43..45 "||" - WhiteSpace@45..46 " " - BinExpr@46..60 - LitExpr@46..51 - FalseKw@46..51 "false" - WhiteSpace@51..52 " " - Amp2@52..54 "&&" - WhiteSpace@54..55 " " - BinExpr@55..60 - LitExpr@55..56 - Int@55..56 "1" - WhiteSpace@56..57 " " - Lt@57..58 "<" - WhiteSpace@58..59 " " - LitExpr@59..60 - Int@59..60 "2" - Newline@60..61 "\n" - BinExpr@61..93 - LitExpr@61..65 - TrueKw@61..65 "true" - WhiteSpace@65..66 " " - Pipe2@66..68 "||" - WhiteSpace@68..69 " " - BinExpr@69..93 - LitExpr@69..74 - FalseKw@69..74 "false" - WhiteSpace@74..75 " " - Amp2@75..77 "&&" - WhiteSpace@77..78 " " - BinExpr@78..93 - ParenExpr@78..85 - LParen@78..79 "(" - BinExpr@79..84 - LitExpr@79..80 - Int@79..80 "1" - WhiteSpace@80..81 " " - Lt@81..82 "<" - WhiteSpace@82..83 " " - LitExpr@83..84 - Int@83..84 "2" - RParen@84..85 ")" - WhiteSpace@85..86 " " - Gt@86..87 ">" - WhiteSpace@87..88 " " - BinExpr@88..93 - LitExpr@88..89 - Int@88..89 "3" - WhiteSpace@89..90 " " - Hat@90..91 "^" - WhiteSpace@91..92 " " - LitExpr@92..93 - Int@92..93 "2" - Newline@93..94 "\n" - BinExpr@94..105 - Path@94..95 - PathSegment@94..95 - Ident@94..95 "a" - WhiteSpace@95..96 " " - Star2@96..98 "**" - WhiteSpace@98..99 " " - BinExpr@99..105 - LitExpr@99..100 - Int@99..100 "2" - WhiteSpace@100..101 " " - Star2@101..103 "**" - WhiteSpace@103..104 " " - LitExpr@104..105 - Int@104..105 "3" - Newline@105..106 "\n" - BinExpr@106..115 - BinExpr@106..111 - LitExpr@106..107 - Int@106..107 "1" - WhiteSpace@107..108 " " - Minus@108..109 "-" - WhiteSpace@109..110 " " - LitExpr@110..111 - Int@110..111 "2" - WhiteSpace@111..112 " " - Minus@112..113 "-" - WhiteSpace@113..114 " " - LitExpr@114..115 - Int@114..115 "3" - Newline@115..116 "\n" - BinExpr@116..127 - BinExpr@116..122 - LitExpr@116..117 - Int@116..117 "1" + LitExpr@43..44 + Int@43..44 "2" + Newline@44..45 "\n" + BinExpr@45..51 + LitExpr@45..46 + Int@45..46 "1" + WhiteSpace@46..47 " " + GtEq@47..49 + Gt@47..48 ">" + Eq@48..49 "=" + WhiteSpace@49..50 " " + LitExpr@50..51 + Int@50..51 "2" + Newline@51..52 "\n" + BinExpr@52..74 + LitExpr@52..56 + TrueKw@52..56 "true" + WhiteSpace@56..57 " " + Pipe2@57..59 "||" + WhiteSpace@59..60 " " + BinExpr@60..74 + LitExpr@60..65 + FalseKw@60..65 "false" + WhiteSpace@65..66 " " + Amp2@66..68 "&&" + WhiteSpace@68..69 " " + BinExpr@69..74 + LitExpr@69..70 + Int@69..70 "1" + WhiteSpace@70..71 " " + Lt@71..72 "<" + WhiteSpace@72..73 " " + LitExpr@73..74 + Int@73..74 "2" + Newline@74..75 "\n" + BinExpr@75..107 + LitExpr@75..79 + TrueKw@75..79 "true" + WhiteSpace@79..80 " " + Pipe2@80..82 "||" + WhiteSpace@82..83 " " + BinExpr@83..107 + LitExpr@83..88 + FalseKw@83..88 "false" + WhiteSpace@88..89 " " + Amp2@89..91 "&&" + WhiteSpace@91..92 " " + BinExpr@92..107 + ParenExpr@92..99 + LParen@92..93 "(" + BinExpr@93..98 + LitExpr@93..94 + Int@93..94 "1" + WhiteSpace@94..95 " " + Lt@95..96 "<" + WhiteSpace@96..97 " " + LitExpr@97..98 + Int@97..98 "2" + RParen@98..99 ")" + WhiteSpace@99..100 " " + Gt@100..101 ">" + WhiteSpace@101..102 " " + BinExpr@102..107 + LitExpr@102..103 + Int@102..103 "3" + WhiteSpace@103..104 " " + Hat@104..105 "^" + WhiteSpace@105..106 " " + LitExpr@106..107 + Int@106..107 "2" + Newline@107..108 "\n" + BinExpr@108..119 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "a" + WhiteSpace@109..110 " " + Star2@110..112 "**" + WhiteSpace@112..113 " " + BinExpr@113..119 + LitExpr@113..114 + Int@113..114 "2" + WhiteSpace@114..115 " " + Star2@115..117 "**" WhiteSpace@117..118 " " - Lt2@118..120 "<<" - WhiteSpace@120..121 " " - LitExpr@121..122 - Int@121..122 "3" - WhiteSpace@122..123 " " - Gt2@123..125 ">>" + LitExpr@118..119 + Int@118..119 "3" + Newline@119..120 "\n" + BinExpr@120..129 + BinExpr@120..125 + LitExpr@120..121 + Int@120..121 "1" + WhiteSpace@121..122 " " + Minus@122..123 "-" + WhiteSpace@123..124 " " + LitExpr@124..125 + Int@124..125 "2" WhiteSpace@125..126 " " - LitExpr@126..127 - Int@126..127 "2" - Newline@127..128 "\n" - FieldExpr@128..133 - FieldExpr@128..131 - Path@128..129 - PathSegment@128..129 - Ident@128..129 "a" - Dot@129..130 "." - Ident@130..131 "b" - Dot@131..132 "." - Ident@132..133 "c" - Newline@133..134 "\n" - FieldExpr@134..139 - FieldExpr@134..137 - Path@134..135 - PathSegment@134..135 - Ident@134..135 "a" - Dot@135..136 "." - Int@136..137 "0" - Dot@137..138 "." - Ident@138..139 "c" - Newline@139..140 "\n" + Minus@126..127 "-" + WhiteSpace@127..128 " " + LitExpr@128..129 + Int@128..129 "3" + Newline@129..130 "\n" + BinExpr@130..141 + BinExpr@130..136 + LitExpr@130..131 + Int@130..131 "1" + WhiteSpace@131..132 " " + LShift@132..134 + Lt@132..133 "<" + Lt@133..134 "<" + WhiteSpace@134..135 " " + LitExpr@135..136 + Int@135..136 "3" + WhiteSpace@136..137 " " + RShift@137..139 + Gt@137..138 ">" + Gt@138..139 ">" + WhiteSpace@139..140 " " + LitExpr@140..141 + Int@140..141 "2" + Newline@141..142 "\n" + FieldExpr@142..147 + FieldExpr@142..145 + Path@142..143 + PathSegment@142..143 + Ident@142..143 "a" + Dot@143..144 "." + Ident@144..145 "b" + Dot@145..146 "." + Ident@146..147 "c" + Newline@147..148 "\n" + FieldExpr@148..153 + FieldExpr@148..151 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "a" + Dot@149..150 "." + Int@150..151 "0" + Dot@151..152 "." + Ident@152..153 "c" diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap index 948017628c..0fcbe962b9 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.snap +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -124,21 +124,23 @@ Root@0..169 Path@88..91 PathSegment@88..91 Ident@88..91 "foo" - GenericParamList@91..109 + GenericArgList@91..109 Lt@91..92 "<" - GenericParam@92..95 - Path@92..95 - PathSegment@92..95 - Ident@92..95 "i32" + GenericArg@92..95 + PathType@92..95 + Path@92..95 + PathSegment@92..95 + Ident@92..95 "i32" Comma@95..96 "," WhiteSpace@96..97 " " - GenericParam@97..108 - Path@97..108 - PathSegment@97..100 - Ident@97..100 "foo" - Colon2@100..102 "::" - PathSegment@102..108 - Ident@102..108 "MyType" + GenericArg@97..108 + PathType@97..108 + Path@97..108 + PathSegment@97..100 + Ident@97..100 "foo" + Colon2@100..102 "::" + PathSegment@102..108 + Ident@102..108 "MyType" Gt@108..109 ">" CallArgList@109..134 LParen@109..110 "(" @@ -162,9 +164,9 @@ Root@0..169 Path@135..138 PathSegment@135..138 Ident@135..138 "foo" - GenericParamList@138..157 + GenericArgList@138..157 Lt@138..139 "<" - GenericParam@139..147 + GenericArg@139..147 ArrayType@139..147 LBracket@139..140 "[" PathType@140..143 @@ -178,7 +180,7 @@ Root@0..169 RBracket@146..147 "]" Comma@147..148 "," WhiteSpace@148..149 " " - GenericParam@149..156 + GenericArg@149..156 BlockExpr@149..156 LBrace@149..150 "{" ExprStmt@150..155 diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap index 0f7f7118ac..b76eb79e16 100644 --- a/crates/parser2/test_files/syntax_node/exprs/method.snap +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -85,15 +85,16 @@ Root@0..75 Ident@49..50 "x" Dot@50..51 "." Ident@51..52 "y" - GenericParamList@52..66 + GenericArgList@52..66 Lt@52..53 "<" - GenericParam@53..56 - Path@53..56 - PathSegment@53..56 - Ident@53..56 "i32" + GenericArg@53..56 + PathType@53..56 + Path@53..56 + PathSegment@53..56 + Ident@53..56 "i32" Comma@56..57 "," WhiteSpace@57..58 " " - GenericParam@58..65 + GenericArg@58..65 BlockExpr@58..65 LBrace@58..59 "{" ExprStmt@59..64 diff --git a/crates/parser2/test_files/syntax_node/items/const_.snap b/crates/parser2/test_files/syntax_node/items/const_.snap index c0068ff052..c6eceb2337 100644 --- a/crates/parser2/test_files/syntax_node/items/const_.snap +++ b/crates/parser2/test_files/syntax_node/items/const_.snap @@ -24,7 +24,6 @@ Root@0..160 Int@21..22 "1" Newline@22..24 "\n\n" Const@24..159 - ItemModifier@24..24 ConstKw@24..29 "const" WhiteSpace@29..30 " " Ident@30..33 "BAR" diff --git a/crates/parser2/test_files/syntax_node/items/contract.snap b/crates/parser2/test_files/syntax_node/items/contract.snap index e32cada52a..e214038046 100644 --- a/crates/parser2/test_files/syntax_node/items/contract.snap +++ b/crates/parser2/test_files/syntax_node/items/contract.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..87 ItemList@0..87 Contract@0..17 - ItemModifier@0..0 ContractKw@0..8 "contract" WhiteSpace@8..9 " " Ident@9..14 "Empty" diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 068d2131f6..5d7a0ff512 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..174 ItemList@0..174 Enum@0..13 - ItemModifier@0..0 EnumKw@0..4 "enum" WhiteSpace@4..5 " " Ident@5..10 "Empty" @@ -15,7 +14,6 @@ Root@0..174 RBrace@12..13 "}" Newline@13..15 "\n\n" Enum@15..56 - ItemModifier@15..15 EnumKw@15..19 "enum" WhiteSpace@19..20 " " Ident@20..25 "Basic" @@ -47,7 +45,6 @@ Root@0..174 RBrace@55..56 "}" Newline@56..58 "\n\n" Enum@58..97 - ItemModifier@58..58 EnumKw@58..62 "enum" WhiteSpace@62..63 " " Ident@63..69 "Option" @@ -78,7 +75,6 @@ Root@0..174 RBrace@96..97 "}" Newline@97..99 "\n\n" Enum@99..174 - ItemModifier@99..99 EnumKw@99..103 "enum" WhiteSpace@103..104 " " Ident@104..113 "BoundEnum" @@ -86,9 +82,9 @@ Root@0..174 Lt@113..114 "<" GenericParam@114..126 Ident@114..115 "T" - Colon@115..116 ":" - WhiteSpace@116..117 " " - TraitBoundList@117..126 + TraitBoundList@115..126 + Colon@115..116 ":" + WhiteSpace@116..117 " " TraitBound@117..120 Path@117..120 PathSegment@117..120 @@ -105,9 +101,9 @@ Root@0..174 WhiteSpace@128..129 " " GenericParam@129..141 Ident@129..130 "U" - Colon@130..131 ":" - WhiteSpace@131..132 " " - TraitBoundList@132..141 + TraitBoundList@130..141 + Colon@130..131 ":" + WhiteSpace@131..132 " " TraitBound@132..135 Path@132..135 PathSegment@132..135 diff --git a/crates/parser2/test_files/syntax_node/items/extern_.snap b/crates/parser2/test_files/syntax_node/items/extern_.snap index fb0c8694de..8c2df581f4 100644 --- a/crates/parser2/test_files/syntax_node/items/extern_.snap +++ b/crates/parser2/test_files/syntax_node/items/extern_.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..146 ItemList@0..146 Extern@0..11 - ItemModifier@0..0 ExternKw@0..6 "extern" WhiteSpace@6..7 " " ExternItemList@7..11 @@ -14,7 +13,6 @@ Root@0..146 RBrace@10..11 "}" Newline@11..13 "\n\n" Extern@13..146 - ItemModifier@13..13 ExternKw@13..19 "extern" WhiteSpace@19..20 " " ExternItemList@20..146 @@ -104,7 +102,6 @@ Root@0..146 Newline@131..132 "\n" WhiteSpace@132..136 " " Fn@136..144 - ItemModifier@136..136 FnKw@136..138 "fn" WhiteSpace@138..139 " " Ident@139..142 "foo" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 14998b1d21..32533e5d98 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -35,7 +35,6 @@ Root@0..292 RBrace@29..30 "}" Newline@30..32 "\n\n" Fn@32..80 - ItemModifier@32..32 FnKw@32..34 "fn" WhiteSpace@34..35 " " Ident@35..38 "bar" @@ -81,7 +80,6 @@ Root@0..292 RBrace@79..80 "}" Newline@80..82 "\n\n" Fn@82..178 - ItemModifier@82..82 FnKw@82..84 "fn" WhiteSpace@84..85 " " Ident@85..88 "baz" @@ -155,7 +153,6 @@ Root@0..292 RBrace@177..178 "}" Newline@178..180 "\n\n" Fn@180..240 - ItemModifier@180..180 FnKw@180..182 "fn" WhiteSpace@182..183 " " Ident@183..192 "generics1" @@ -163,9 +160,9 @@ Root@0..292 Lt@192..193 "<" GenericParam@193..201 Ident@193..194 "T" - Colon@194..195 ":" - WhiteSpace@195..196 " " - TraitBoundList@196..201 + TraitBoundList@194..201 + Colon@194..195 ":" + WhiteSpace@195..196 " " TraitBound@196..201 Path@196..201 PathSegment@196..201 @@ -195,12 +192,13 @@ Root@0..292 Path@215..221 PathSegment@215..221 Ident@215..221 "Option" - GenericParamList@221..224 + GenericArgList@221..224 Lt@221..222 "<" - GenericParam@222..223 - Path@222..223 - PathSegment@222..223 - Ident@222..223 "U" + GenericArg@222..223 + PathType@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "U" Gt@223..224 ">" RParen@224..225 ")" WhiteSpace@225..226 " " @@ -223,7 +221,6 @@ Root@0..292 RBrace@239..240 "}" Newline@240..242 "\n\n" Fn@242..292 - ItemModifier@242..242 FnKw@242..244 "fn" WhiteSpace@244..245 " " Ident@245..249 "decl" @@ -246,18 +243,20 @@ Root@0..292 Path@259..267 PathSegment@259..267 Ident@259..267 "MyStruct" - GenericParamList@267..273 + GenericArgList@267..273 Lt@267..268 "<" - GenericParam@268..269 - Path@268..269 - PathSegment@268..269 - Ident@268..269 "T" + GenericArg@268..269 + PathType@268..269 + Path@268..269 + PathSegment@268..269 + Ident@268..269 "T" Comma@269..270 "," WhiteSpace@270..271 " " - GenericParam@271..272 - Path@271..272 - PathSegment@271..272 - Ident@271..272 "U" + GenericArg@271..272 + PathType@271..272 + Path@271..272 + PathSegment@271..272 + Ident@271..272 "U" Gt@272..273 ">" RParen@273..274 ")" WhiteSpace@274..275 " " @@ -267,17 +266,19 @@ Root@0..292 Path@278..284 PathSegment@278..284 Ident@278..284 "Result" - GenericParamList@284..292 + GenericArgList@284..292 Lt@284..285 "<" - GenericParam@285..286 - Path@285..286 - PathSegment@285..286 - Ident@285..286 "T" + GenericArg@285..286 + PathType@285..286 + Path@285..286 + PathSegment@285..286 + Ident@285..286 "T" Comma@286..287 "," WhiteSpace@287..288 " " - GenericParam@288..291 - Path@288..291 - PathSegment@288..291 - Ident@288..291 "Err" + GenericArg@288..291 + PathType@288..291 + Path@288..291 + PathSegment@288..291 + Ident@288..291 "Err" Gt@291..292 ">" diff --git a/crates/parser2/test_files/syntax_node/items/impl_.fe b/crates/parser2/test_files/syntax_node/items/impl_.fe new file mode 100644 index 0000000000..8942868db2 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_.fe @@ -0,0 +1,13 @@ +impl Foo::Bar { + pub fn add(self, rhs: Self) -> Self { + Self { + val: self.val + rhs.val + } + } +} + +impl Foo { + fn add>(self, rhs: U) { + (rhs - self.t) + } +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/impl_.snap b/crates/parser2/test_files/syntax_node/items/impl_.snap new file mode 100644 index 0000000000..c8ed9691d8 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_.snap @@ -0,0 +1,192 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..218 + ItemList@0..218 + Impl@0..134 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..21 + Path@5..13 + PathSegment@5..8 + Ident@5..8 "Foo" + Colon2@8..10 "::" + PathSegment@10..13 + Ident@10..13 "Bar" + GenericArgList@13..21 + Lt@13..14 "<" + GenericArg@14..20 + PathType@14..15 + Path@14..15 + PathSegment@14..15 + Ident@14..15 "T" + TraitBoundList@15..20 + Colon@15..16 ":" + WhiteSpace@16..17 " " + TraitBound@17..20 + Path@17..20 + PathSegment@17..20 + Ident@17..20 "Add" + Gt@20..21 ">" + WhiteSpace@21..22 " " + ImplItemList@22..134 + LBrace@22..23 "{" + Newline@23..24 "\n" + WhiteSpace@24..28 " " + Fn@28..132 + ItemModifier@28..31 + PubKw@28..31 "pub" + WhiteSpace@31..32 " " + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "add" + FnArgList@38..55 + LParen@38..39 "(" + FnArg@39..43 + SelfKw@39..43 "self" + Comma@43..44 "," + WhiteSpace@44..45 " " + FnArg@45..54 + Ident@45..48 "rhs" + Colon@48..49 ":" + WhiteSpace@49..50 " " + SelfType@50..54 + SelfTypeKw@50..54 "Self" + RParen@54..55 ")" + WhiteSpace@55..56 " " + Arrow@56..58 "->" + WhiteSpace@58..59 " " + SelfType@59..63 + SelfTypeKw@59..63 "Self" + WhiteSpace@63..64 " " + BlockExpr@64..132 + LBrace@64..65 "{" + Newline@65..66 "\n" + WhiteSpace@66..74 " " + ExprStmt@74..126 + RecordInitExpr@74..126 + Path@74..78 + PathSegment@74..78 + SelfTypeKw@74..78 "Self" + WhiteSpace@78..79 " " + RecordFieldList@79..126 + LBrace@79..80 "{" + Newline@80..81 "\n" + WhiteSpace@81..93 " " + RecordField@93..116 + Ident@93..96 "val" + Colon@96..97 ":" + WhiteSpace@97..98 " " + BinExpr@98..116 + FieldExpr@98..106 + Path@98..102 + PathSegment@98..102 + SelfKw@98..102 "self" + Dot@102..103 "." + Ident@103..106 "val" + WhiteSpace@106..107 " " + Plus@107..108 "+" + WhiteSpace@108..109 " " + FieldExpr@109..116 + Path@109..112 + PathSegment@109..112 + Ident@109..112 "rhs" + Dot@112..113 "." + Ident@113..116 "val" + Newline@116..117 "\n" + WhiteSpace@117..125 " " + RBrace@125..126 "}" + Newline@126..127 "\n" + WhiteSpace@127..131 " " + RBrace@131..132 "}" + Newline@132..133 "\n" + RBrace@133..134 "}" + Newline@134..136 "\n\n" + Impl@136..218 + ImplKw@136..140 "impl" + WhiteSpace@140..141 " " + PathType@141..147 + Path@141..144 + PathSegment@141..144 + Ident@141..144 "Foo" + GenericArgList@144..147 + Lt@144..145 "<" + GenericArg@145..146 + PathType@145..146 + Path@145..146 + PathSegment@145..146 + Ident@145..146 "T" + Gt@146..147 ">" + WhiteSpace@147..148 " " + ImplItemList@148..218 + LBrace@148..149 "{" + Newline@149..150 "\n" + WhiteSpace@150..154 " " + Fn@154..216 + FnKw@154..156 "fn" + WhiteSpace@156..157 " " + Ident@157..160 "add" + GenericParamList@160..171 + Lt@160..161 "<" + GenericParam@161..170 + Ident@161..162 "U" + TraitBoundList@162..170 + Colon@162..163 ":" + WhiteSpace@163..164 " " + TraitBound@164..170 + Path@164..167 + PathSegment@164..167 + Ident@164..167 "Add" + GenericArgList@167..170 + Lt@167..168 "<" + GenericArg@168..169 + PathType@168..169 + Path@168..169 + PathSegment@168..169 + Ident@168..169 "T" + Gt@169..170 ">" + Gt@170..171 ">" + FnArgList@171..185 + LParen@171..172 "(" + FnArg@172..176 + SelfKw@172..176 "self" + Comma@176..177 "," + WhiteSpace@177..178 " " + FnArg@178..184 + Ident@178..181 "rhs" + Colon@181..182 ":" + WhiteSpace@182..183 " " + PathType@183..184 + Path@183..184 + PathSegment@183..184 + Ident@183..184 "U" + RParen@184..185 ")" + WhiteSpace@185..186 " " + BlockExpr@186..216 + LBrace@186..187 "{" + Newline@187..188 "\n" + WhiteSpace@188..196 " " + ExprStmt@196..210 + ParenExpr@196..210 + LParen@196..197 "(" + BinExpr@197..209 + Path@197..200 + PathSegment@197..200 + Ident@197..200 "rhs" + WhiteSpace@200..201 " " + Minus@201..202 "-" + WhiteSpace@202..203 " " + FieldExpr@203..209 + Path@203..207 + PathSegment@203..207 + SelfKw@203..207 "self" + Dot@207..208 "." + Ident@208..209 "t" + RParen@209..210 ")" + Newline@210..211 "\n" + WhiteSpace@211..215 " " + RBrace@215..216 "}" + Newline@216..217 "\n" + RBrace@217..218 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.fe b/crates/parser2/test_files/syntax_node/items/impl_trait.fe new file mode 100644 index 0000000000..f45fff9fb0 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.fe @@ -0,0 +1,11 @@ +impl Trait for F { + fn foo() { + return 1 + } +} + +impl Trait for F { + fn foo>(t: T) { + do_something(t) + } +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap new file mode 100644 index 0000000000..d4b47c2fd8 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -0,0 +1,172 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..167 + ItemList@0..167 + ImplTrait@0..64 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..13 + Path@5..10 + PathSegment@5..10 + Ident@5..10 "Trait" + GenericArgList@10..13 + Lt@10..11 "<" + GenericArg@11..12 + PathType@11..12 + Path@11..12 + PathSegment@11..12 + Ident@11..12 "T" + Gt@12..13 ">" + WhiteSpace@13..14 " " + ForKw@14..17 "for" + WhiteSpace@17..18 " " + PathType@18..22 + Path@18..19 + PathSegment@18..19 + Ident@18..19 "F" + GenericArgList@19..22 + Lt@19..20 "<" + GenericArg@20..21 + PathType@20..21 + Path@20..21 + PathSegment@20..21 + Ident@20..21 "T" + Gt@21..22 ">" + WhiteSpace@22..23 " " + ImplTraitItemList@23..64 + LBrace@23..24 "{" + Newline@24..25 "\n" + WhiteSpace@25..29 " " + Fn@29..62 + FnKw@29..31 "fn" + WhiteSpace@31..32 " " + Ident@32..35 "foo" + FnArgList@35..37 + LParen@35..36 "(" + RParen@36..37 ")" + WhiteSpace@37..38 " " + BlockExpr@38..62 + LBrace@38..39 "{" + Newline@39..40 "\n" + WhiteSpace@40..48 " " + ReturnStmt@48..56 + ReturnKw@48..54 "return" + WhiteSpace@54..55 " " + LitExpr@55..56 + Int@55..56 "1" + Newline@56..57 "\n" + WhiteSpace@57..61 " " + RBrace@61..62 "}" + Newline@62..63 "\n" + RBrace@63..64 "}" + Newline@64..66 "\n\n" + ImplTrait@66..167 + ImplKw@66..70 "impl" + WhiteSpace@70..71 " " + PathType@71..82 + Path@71..76 + PathSegment@71..76 + Ident@71..76 "Trait" + GenericArgList@76..82 + Lt@76..77 "<" + GenericArg@77..78 + PathType@77..78 + Path@77..78 + PathSegment@77..78 + Ident@77..78 "T" + Comma@78..79 "," + WhiteSpace@79..80 " " + GenericArg@80..81 + PathType@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "U" + Gt@81..82 ">" + WhiteSpace@82..83 " " + ForKw@83..86 "for" + WhiteSpace@86..87 " " + PathType@87..91 + Path@87..88 + PathSegment@87..88 + Ident@87..88 "F" + GenericArgList@88..91 + Lt@88..89 "<" + GenericArg@89..90 + PathType@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "T" + Gt@90..91 ">" + WhiteSpace@91..92 " " + ImplTraitItemList@92..167 + LBrace@92..93 "{" + Newline@93..94 "\n" + WhiteSpace@94..98 " " + Fn@98..165 + FnKw@98..100 "fn" + WhiteSpace@100..101 " " + Ident@101..104 "foo" + GenericParamList@104..122 + Lt@104..105 "<" + GenericParam@105..121 + Ident@105..106 "T" + TraitBoundList@106..121 + Colon@106..107 ":" + WhiteSpace@107..108 " " + TraitBound@108..121 + Path@108..118 + PathSegment@108..118 + Ident@108..118 "OtherTrait" + GenericArgList@118..121 + Lt@118..119 "<" + GenericArg@119..120 + PathType@119..120 + Path@119..120 + PathSegment@119..120 + Ident@119..120 "U" + Gt@120..121 ">" + Gt@121..122 ">" + FnArgList@122..128 + LParen@122..123 "(" + FnArg@123..127 + Ident@123..124 "t" + Colon@124..125 ":" + WhiteSpace@125..126 " " + PathType@126..127 + Path@126..127 + PathSegment@126..127 + Ident@126..127 "T" + RParen@127..128 ")" + WhiteSpace@128..129 " " + BlockExpr@129..165 + LBrace@129..130 "{" + Newline@130..131 "\n" + WhiteSpace@131..139 " " + ExprStmt@139..159 + CallExpr@139..159 + Path@139..151 + PathSegment@139..151 + Ident@139..151 "do_something" + GenericArgList@151..156 + Lt@151..152 "<" + GenericArg@152..155 + PathType@152..155 + Path@152..155 + PathSegment@152..155 + Ident@152..155 "i32" + Gt@155..156 ">" + CallArgList@156..159 + LParen@156..157 "(" + CallArg@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "t" + RParen@158..159 ")" + Newline@159..160 "\n" + WhiteSpace@160..164 " " + RBrace@164..165 "}" + Newline@165..166 "\n" + RBrace@166..167 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/trait_.snap b/crates/parser2/test_files/syntax_node/items/trait_.snap index 76e847efbf..338d9020e9 100644 --- a/crates/parser2/test_files/syntax_node/items/trait_.snap +++ b/crates/parser2/test_files/syntax_node/items/trait_.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..182 ItemList@0..182 Trait@0..15 - ItemModifier@0..0 TraitKw@0..5 "trait" WhiteSpace@5..6 " " Ident@6..12 "Marker" @@ -34,9 +33,9 @@ Root@0..182 Lt@43..44 "<" GenericParam@44..52 Ident@44..45 "T" - Colon@45..46 ":" - WhiteSpace@46..47 " " - TraitBoundList@47..52 + TraitBoundList@45..52 + Colon@45..46 ":" + WhiteSpace@46..47 " " TraitBound@47..52 Path@47..52 PathSegment@47..52 @@ -47,9 +46,9 @@ Root@0..182 ConstKw@54..59 "const" WhiteSpace@59..60 " " Ident@60..61 "U" - Colon@61..62 ":" - WhiteSpace@62..63 " " - TraitBoundList@63..66 + TraitBoundList@61..66 + Colon@61..62 ":" + WhiteSpace@62..63 " " TraitBound@63..66 Path@63..66 PathSegment@63..66 @@ -86,9 +85,9 @@ Root@0..182 Lt@102..103 "<" GenericParam@103..115 Ident@103..104 "T" - Colon@104..105 ":" - WhiteSpace@105..106 " " - TraitBoundList@106..115 + TraitBoundList@104..115 + Colon@104..105 ":" + WhiteSpace@105..106 " " TraitBound@106..109 Path@106..109 PathSegment@106..109 diff --git a/crates/parser2/test_files/syntax_node/items/type_.snap b/crates/parser2/test_files/syntax_node/items/type_.snap index 770c868935..0ca6e1b1f8 100644 --- a/crates/parser2/test_files/syntax_node/items/type_.snap +++ b/crates/parser2/test_files/syntax_node/items/type_.snap @@ -21,7 +21,6 @@ Root@0..98 WhiteSpace@18..19 " " Newline@19..21 "\n\n" TypeAlias@21..54 - ItemModifier@21..21 TypeKw@21..25 "type" WhiteSpace@25..26 " " Ident@26..32 "Result" @@ -37,22 +36,23 @@ Root@0..98 Path@38..44 PathSegment@38..44 Ident@38..44 "Result" - GenericParamList@44..54 + GenericArgList@44..54 Lt@44..45 "<" - GenericParam@45..46 - Path@45..46 - PathSegment@45..46 - Ident@45..46 "T" + GenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" Comma@46..47 "," WhiteSpace@47..48 " " - GenericParam@48..53 - Path@48..53 - PathSegment@48..53 - Ident@48..53 "Error" + GenericArg@48..53 + PathType@48..53 + Path@48..53 + PathSegment@48..53 + Ident@48..53 "Error" Gt@53..54 ">" Newline@54..56 "\n\n" TypeAlias@56..98 - ItemModifier@56..56 TypeKw@56..60 "type" WhiteSpace@60..61 " " Ident@61..70 "WithBound" @@ -60,9 +60,9 @@ Root@0..98 Lt@70..71 "<" GenericParam@71..84 Ident@71..72 "T" - Colon@72..73 ":" - WhiteSpace@73..74 " " - TraitBoundList@74..84 + TraitBoundList@72..84 + Colon@72..73 ":" + WhiteSpace@73..74 " " TraitBound@74..84 Path@74..84 PathSegment@74..84 @@ -75,11 +75,12 @@ Root@0..98 Path@88..95 PathSegment@88..95 Ident@88..95 "NoBound" - GenericParamList@95..98 + GenericArgList@95..98 Lt@95..96 "<" - GenericParam@96..97 - Path@96..97 - PathSegment@96..97 - Ident@96..97 "T" + GenericArg@96..97 + PathType@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "T" Gt@97..98 ">" diff --git a/crates/parser2/test_files/syntax_node/items/use_.snap b/crates/parser2/test_files/syntax_node/items/use_.snap index 367ef8d7f4..26517d5ba7 100644 --- a/crates/parser2/test_files/syntax_node/items/use_.snap +++ b/crates/parser2/test_files/syntax_node/items/use_.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..278 ItemList@0..278 Use@0..12 - ItemModifier@0..0 UseKw@0..3 "use" WhiteSpace@3..4 " " UseTree@4..12 @@ -31,7 +30,6 @@ Root@0..278 Ident@26..29 "Bar" Newline@29..30 "\n" Use@30..40 - ItemModifier@30..30 UseKw@30..33 "use" WhiteSpace@33..34 " " UseTree@34..40 @@ -42,7 +40,6 @@ Root@0..278 Star@39..40 "*" Newline@40..41 "\n" Use@41..61 - ItemModifier@41..41 UseKw@41..44 "use" WhiteSpace@44..45 " " UseTree@45..61 @@ -59,7 +56,6 @@ Root@0..278 Ident@57..61 "Bar1" Newline@61..62 "\n" Use@62..81 - ItemModifier@62..62 UseKw@62..65 "use" WhiteSpace@65..66 " " UseTree@66..81 @@ -76,7 +72,6 @@ Root@0..278 Underscore@80..81 "_" Newline@81..83 "\n\n" Use@83..102 - ItemModifier@83..83 UseKw@83..86 "use" WhiteSpace@86..87 " " UseTree@87..102 @@ -99,7 +94,6 @@ Root@0..278 RBrace@101..102 "}" Newline@102..103 "\n" Use@103..123 - ItemModifier@103..103 UseKw@103..106 "use" WhiteSpace@106..107 " " UseTree@107..123 @@ -122,7 +116,6 @@ Root@0..278 RBrace@122..123 "}" Newline@123..124 "\n" Use@124..152 - ItemModifier@124..124 UseKw@124..127 "use" WhiteSpace@127..128 " " UseTree@128..152 @@ -150,7 +143,6 @@ Root@0..278 RBrace@151..152 "}" Newline@152..153 "\n" Use@153..202 - ItemModifier@153..153 UseKw@153..156 "use" WhiteSpace@156..157 " " UseTree@157..202 @@ -201,7 +193,6 @@ Root@0..278 RBrace@201..202 "}" Newline@202..204 "\n\n" Use@204..272 - ItemModifier@204..204 UseKw@204..207 "use" WhiteSpace@207..208 " " UseTree@208..272 @@ -259,7 +250,6 @@ Root@0..278 RBrace@271..272 "}" Newline@272..273 "\n" Use@273..278 - ItemModifier@273..273 UseKw@273..276 "use" WhiteSpace@276..277 " " UseTree@277..278 diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.snap b/crates/parser2/test_files/syntax_node/stmts/let_.snap index dcbb3d017e..6c6aac9ea3 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let_.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let_.snap @@ -88,14 +88,18 @@ Root@0..231 PathSegment@63..64 Ident@63..64 "y" WhiteSpace@64..65 " " - Lt2@65..67 "<<" + LShift@65..67 + Lt@65..66 "<" + Lt@66..67 "<" Eq@67..68 "=" WhiteSpace@68..69 " " BinExpr@69..75 LitExpr@69..70 Int@69..70 "1" WhiteSpace@70..71 " " - Gt2@71..73 ">>" + RShift@71..73 + Gt@71..72 ">" + Gt@72..73 ">" WhiteSpace@73..74 " " LitExpr@74..75 Int@74..75 "2" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index d32c6b0d76..5aaf5722b9 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -80,9 +80,9 @@ Root@0..312 WhiteSpace@119..123 " " GenericParam@123..136 Ident@123..124 "T" - Colon@124..125 ":" - WhiteSpace@125..126 " " - TraitBoundList@126..136 + TraitBoundList@124..136 + Colon@124..125 ":" + WhiteSpace@125..126 " " TraitBound@126..136 Path@126..136 PathSegment@126..129 @@ -159,9 +159,9 @@ Root@0..312 WhiteSpace@223..227 " " GenericParam@227..253 Ident@227..228 "S" - Colon@228..229 ":" - WhiteSpace@229..230 " " - TraitBoundList@230..253 + TraitBoundList@228..253 + Colon@228..229 ":" + WhiteSpace@229..230 " " TraitBound@230..240 Path@230..240 PathSegment@230..233 @@ -189,9 +189,9 @@ Root@0..312 WhiteSpace@262..266 " " GenericParam@266..279 Ident@266..267 "U" - Colon@267..268 ":" - WhiteSpace@268..269 " " - TraitBoundList@269..279 + TraitBoundList@267..279 + Colon@267..268 ":" + WhiteSpace@268..269 " " TraitBound@269..279 Path@269..279 PathSegment@269..272 diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap index 7ceb8105a5..469d8ebdb2 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -5,7 +5,6 @@ expression: snapshot Root@0..117 ItemList@0..117 Struct@0..117 - ItemModifier@0..0 StructKw@0..6 "struct" WhiteSpace@6..7 " " Ident@7..27 "StructWithTupleField" diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 032e96eff4..489c43bf9e 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -14,9 +14,7 @@ fe_compiler_test_utils::build_debug_snap_tests! { } fn test_item_list(input: &str) -> SyntaxNode { let runner = TestRunner::new(|parser| { - while parser.current_kind().is_some() { - parser.parse(ItemListScope::default(), None); - } + parser.parse(ItemListScope::default(), None); }); runner.run(input) } @@ -106,7 +104,7 @@ where let (cst, errors) = parser.finish(); for error in &errors { - println!("{}", error.msg); + println!("{}@{:?}", error.msg, error.range); } assert! {errors.is_empty()} assert!(input == cst.to_string()); From 83746d5ad6c5a8b0c0be3113f01a0000a3fd033f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 26 Jan 2023 00:15:46 +0100 Subject: [PATCH 040/678] Allow using generics in `trait` definition --- crates/parser2/src/parser/item.rs | 6 + .../test_files/syntax_node/items/trait_.fe | 15 ++ .../test_files/syntax_node/items/trait_.snap | 251 +++++++++++++++++- 3 files changed, 270 insertions(+), 2 deletions(-) diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 2df20db2fc..c4e3d7323a 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -243,6 +243,12 @@ impl super::Parse for TraitScope { parser.error_and_recover("expected ident for the trait name", None) } + parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected trait body", None); return; diff --git a/crates/parser2/test_files/syntax_node/items/trait_.fe b/crates/parser2/test_files/syntax_node/items/trait_.fe index 3202f6c16b..e98f633817 100644 --- a/crates/parser2/test_files/syntax_node/items/trait_.fe +++ b/crates/parser2/test_files/syntax_node/items/trait_.fe @@ -6,4 +6,19 @@ pub trait Foo { fn default_method(lhs: T, rhs: T) -> i32 { lhs + lhs - (rhs + rhs) } +} + +pub trait Add { + fn add(self, rhs: Rhs) -> Self +} + + +pub trait Parse { + fn parse(mut self, mut parser: Parser) +} + +impl Parser { + pub fn parse(mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) { + (SyntaxNode::new_root(self.builder.finish()), self.errors) + } } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/trait_.snap b/crates/parser2/test_files/syntax_node/items/trait_.snap index 338d9020e9..459d6044bd 100644 --- a/crates/parser2/test_files/syntax_node/items/trait_.snap +++ b/crates/parser2/test_files/syntax_node/items/trait_.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..182 - ItemList@0..182 +Root@0..538 + ItemList@0..538 Trait@0..15 TraitKw@0..5 "trait" WhiteSpace@5..6 " " @@ -166,4 +166,251 @@ Root@0..182 RBrace@179..180 "}" Newline@180..181 "\n" RBrace@181..182 "}" + Newline@182..184 "\n\n" + Trait@184..241 + ItemModifier@184..187 + PubKw@184..187 "pub" + WhiteSpace@187..188 " " + TraitKw@188..193 "trait" + WhiteSpace@193..194 " " + Ident@194..197 "Add" + GenericParamList@197..202 + Lt@197..198 "<" + GenericParam@198..201 + Ident@198..201 "RHS" + Gt@201..202 ">" + WhiteSpace@202..203 " " + TraitItemList@203..241 + LBrace@203..204 "{" + Newline@204..205 "\n" + WhiteSpace@205..209 " " + Fn@209..239 + FnKw@209..211 "fn" + WhiteSpace@211..212 " " + Ident@212..215 "add" + FnArgList@215..231 + LParen@215..216 "(" + FnArg@216..220 + SelfKw@216..220 "self" + Comma@220..221 "," + WhiteSpace@221..222 " " + FnArg@222..230 + Ident@222..225 "rhs" + Colon@225..226 ":" + WhiteSpace@226..227 " " + PathType@227..230 + Path@227..230 + PathSegment@227..230 + Ident@227..230 "Rhs" + RParen@230..231 ")" + WhiteSpace@231..232 " " + Arrow@232..234 "->" + WhiteSpace@234..235 " " + SelfType@235..239 + SelfTypeKw@235..239 "Self" + Newline@239..240 "\n" + RBrace@240..241 "}" + Newline@241..244 "\n\n\n" + Trait@244..325 + ItemModifier@244..247 + PubKw@244..247 "pub" + WhiteSpace@247..248 " " + TraitKw@248..253 "trait" + WhiteSpace@253..254 " " + Ident@254..259 "Parse" + WhiteSpace@259..260 " " + TraitItemList@260..325 + LBrace@260..261 "{" + Newline@261..262 "\n" + WhiteSpace@262..266 " " + Fn@266..323 + FnKw@266..268 "fn" + WhiteSpace@268..269 " " + Ident@269..274 "parse" + GenericParamList@274..290 + Lt@274..275 "<" + GenericParam@275..289 + Ident@275..276 "S" + TraitBoundList@276..289 + Colon@276..277 ":" + WhiteSpace@277..278 " " + TraitBound@278..289 + Path@278..289 + PathSegment@278..289 + Ident@278..289 "TokenStream" + Gt@289..290 ">" + FnArgList@290..323 + LParen@290..291 "(" + FnArg@291..299 + MutKw@291..294 "mut" + WhiteSpace@294..295 " " + SelfKw@295..299 "self" + Comma@299..300 "," + WhiteSpace@300..301 " " + FnArg@301..322 + MutKw@301..304 "mut" + WhiteSpace@304..305 " " + Ident@305..311 "parser" + Colon@311..312 ":" + WhiteSpace@312..313 " " + PathType@313..322 + Path@313..319 + PathSegment@313..319 + Ident@313..319 "Parser" + GenericArgList@319..322 + Lt@319..320 "<" + GenericArg@320..321 + PathType@320..321 + Path@320..321 + PathSegment@320..321 + Ident@320..321 "S" + Gt@321..322 ">" + RParen@322..323 ")" + Newline@323..324 "\n" + RBrace@324..325 "}" + Newline@325..327 "\n\n" + Impl@327..538 + ImplKw@327..331 "impl" + WhiteSpace@331..332 " " + PathType@332..354 + Path@332..338 + PathSegment@332..338 + Ident@332..338 "Parser" + GenericArgList@338..354 + Lt@338..339 "<" + GenericArg@339..353 + PathType@339..340 + Path@339..340 + PathSegment@339..340 + Ident@339..340 "S" + TraitBoundList@340..353 + Colon@340..341 ":" + WhiteSpace@341..342 " " + TraitBound@342..353 + Path@342..353 + PathSegment@342..353 + Ident@342..353 "TokenStream" + Gt@353..354 ">" + WhiteSpace@354..355 " " + ImplItemList@355..538 + LBrace@355..356 "{" + Newline@356..357 "\n" + WhiteSpace@357..361 " " + Fn@361..536 + ItemModifier@361..364 + PubKw@361..364 "pub" + WhiteSpace@364..365 " " + FnKw@365..367 "fn" + WhiteSpace@367..368 " " + Ident@368..373 "parse" + GenericParamList@373..383 + Lt@373..374 "<" + GenericParam@374..382 + Ident@374..375 "T" + TraitBoundList@375..382 + Colon@375..376 ":" + WhiteSpace@376..377 " " + TraitBound@377..382 + Path@377..382 + PathSegment@377..382 + Ident@377..382 "Parse" + Gt@382..383 ">" + FnArgList@383..439 + LParen@383..384 "(" + FnArg@384..392 + MutKw@384..387 "mut" + WhiteSpace@387..388 " " + SelfKw@388..392 "self" + Comma@392..393 "," + WhiteSpace@393..394 " " + FnArg@394..406 + MutKw@394..397 "mut" + WhiteSpace@397..398 " " + Ident@398..403 "scope" + Colon@403..404 ":" + WhiteSpace@404..405 " " + PathType@405..406 + Path@405..406 + PathSegment@405..406 + Ident@405..406 "T" + Comma@406..407 "," + WhiteSpace@407..408 " " + FnArg@408..438 + Ident@408..418 "checkpoint" + Colon@418..419 ":" + WhiteSpace@419..420 " " + PathType@420..438 + Path@420..426 + PathSegment@420..426 + Ident@420..426 "Option" + GenericArgList@426..438 + Lt@426..427 "<" + GenericArg@427..437 + PathType@427..437 + Path@427..437 + PathSegment@427..437 + Ident@427..437 "Checkpoint" + Gt@437..438 ">" + RParen@438..439 ")" + WhiteSpace@439..440 " " + Arrow@440..442 "->" + WhiteSpace@442..443 " " + TupleType@443..461 + LParen@443..444 "(" + PathType@444..448 + Path@444..448 + PathSegment@444..448 + Ident@444..448 "bool" + Comma@448..449 "," + WhiteSpace@449..450 " " + PathType@450..460 + Path@450..460 + PathSegment@450..460 + Ident@450..460 "Checkpoint" + RParen@460..461 ")" + WhiteSpace@461..462 " " + BlockExpr@462..536 + LBrace@462..463 "{" + Newline@463..464 "\n" + WhiteSpace@464..472 " " + ExprStmt@472..530 + TupleExpr@472..530 + LParen@472..473 "(" + CallExpr@473..516 + Path@473..493 + PathSegment@473..483 + Ident@473..483 "SyntaxNode" + Colon2@483..485 "::" + PathSegment@485..493 + Ident@485..493 "new_root" + CallArgList@493..516 + LParen@493..494 "(" + CallArg@494..515 + MethodCallExpr@494..515 + FieldExpr@494..506 + Path@494..498 + PathSegment@494..498 + SelfKw@494..498 "self" + Dot@498..499 "." + Ident@499..506 "builder" + Dot@506..507 "." + Ident@507..513 "finish" + CallArgList@513..515 + LParen@513..514 "(" + RParen@514..515 ")" + RParen@515..516 ")" + Comma@516..517 "," + WhiteSpace@517..518 " " + FieldExpr@518..529 + Path@518..522 + PathSegment@518..522 + SelfKw@518..522 "self" + Dot@522..523 "." + Ident@523..529 "errors" + RParen@529..530 ")" + Newline@530..531 "\n" + WhiteSpace@531..535 " " + RBrace@535..536 "}" + Newline@536..537 "\n" + RBrace@537..538 "}" From 7b5358457b91bc8dfcd6db27af0e8e101e8fa1d6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 26 Jan 2023 13:19:50 +0100 Subject: [PATCH 041/678] Refactor some methods of `Parser` and add some comments --- crates/parser2/src/parser/expr.rs | 36 ++++++++------------------- crates/parser2/src/parser/item.rs | 5 ++-- crates/parser2/src/parser/mod.rs | 28 +++++++++++++-------- crates/parser2/src/parser/param.rs | 6 ++--- crates/parser2/src/parser/pat.rs | 6 ++--- crates/parser2/src/parser/stmt.rs | 14 ++++------- crates/parser2/src/parser/use_tree.rs | 8 +++--- 7 files changed, 44 insertions(+), 59 deletions(-) diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index d347de6b58..05f40f3ebc 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -61,25 +61,21 @@ fn parse_expr_with_min_bp( // `expr()`. SyntaxKind::Lt => { - parser.start_dry_run(); - if parser.parse(CallExprScope::default(), Some(checkpoint)).0 { - parser.end_dry_run(); + let is_call_expr = + parser.dry_run(|parser| parser.parse(CallExprScope::default(), None).0); + if is_call_expr { parser.parse(CallExprScope::default(), Some(checkpoint)); continue; - } else { - parser.end_dry_run(); } } // `expr.method()` SyntaxKind::Dot => { - parser.start_dry_run(); - if parser.parse(MethodExprScope::default(), Some(checkpoint)).0 { - parser.end_dry_run(); + let is_method_call = parser + .dry_run(|parser| parser.parse(MethodExprScope::default(), None).0); + if is_method_call { parser.parse(MethodExprScope::default(), Some(checkpoint)); continue; - } else { - parser.end_dry_run(); } } _ => unreachable!(), @@ -330,31 +326,19 @@ impl super::Parse for GtEqScope { } pub(crate) fn is_lshift(parser: &mut Parser) -> bool { - parser.start_dry_run(); - let is_lshift = parser.parse(LShiftScope::default(), None).0; - parser.end_dry_run(); - is_lshift + parser.dry_run(|parser| parser.parse(LShiftScope::default(), None).0) } pub(crate) fn is_rshift(parser: &mut Parser) -> bool { - parser.start_dry_run(); - let is_rshift = parser.parse(RShiftScope::default(), None).0; - parser.end_dry_run(); - is_rshift + parser.dry_run(|parser| parser.parse(RShiftScope::default(), None).0) } fn is_lt_eq(parser: &mut Parser) -> bool { - parser.start_dry_run(); - let is_lt_eq = parser.parse(LtEqScope::default(), None).0; - parser.end_dry_run(); - is_lt_eq + parser.dry_run(|parser| parser.parse(LtEqScope::default(), None).0) } fn is_gt_eq(parser: &mut Parser) -> bool { - parser.start_dry_run(); - let is_gt_eq = parser.parse(GtEqScope::default(), None).0; - parser.end_dry_run(); - is_gt_eq + parser.dry_run(|parser| parser.parse(GtEqScope::default(), None).0) } fn bump_bin_op(parser: &mut Parser) { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index c4e3d7323a..76c3d95ac7 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -299,9 +299,8 @@ impl super::Parse for ImplScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ImplKw); - parser.start_dry_run(); - let is_trait_impl = parse_type(parser, None, true) && parser.bump_if(SyntaxKind::ForKw); - parser.end_dry_run(); + let is_trait_impl = parser + .dry_run(|parser| parse_type(parser, None, true) && parser.bump_if(SyntaxKind::ForKw)); if is_trait_impl { self.set_kind(SyntaxKind::ImplTrait); diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 77f8ed3f8b..3eea476266 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -93,6 +93,11 @@ impl Parser { (SyntaxNode::new_root(self.builder.finish()), self.errors) } + /// Passes the `recovery_tokens` to the parser temporarily. + /// The passed recovery tokens are removed when the closure returns. + /// + /// This is useful when you want to specify auxiliary recovery tokens which + /// are valid only in a limited part of the scope. pub fn with_recovery_tokens(&mut self, recovery_tokens: &[SyntaxKind], f: F) -> R where F: FnOnce(&mut Self) -> R, @@ -212,14 +217,14 @@ impl Parser { self.leave(checkpoint); } - /// Starts the dry run mode. - /// When the parser is in the dry run mode, the parser does not build the - /// syntax tree. + /// Runs the parser in the dry run mode. /// - /// When the [`end_dry_run`] is called, all errors occurred in the dry - /// run mode are discarded, and all tokens which are consumed in the - /// dry run mode are backtracked. - pub fn start_dry_run(&mut self) { + /// Any changes to the parser state will be reverted. + pub fn dry_run(&mut self, f: F) -> R + where + F: FnOnce(&mut Self) -> R, + { + // Enters the dry run mode. self.stream.set_bt_point(); self.dry_run_states.push(DryRunState { pos: self.current_pos, @@ -227,17 +232,18 @@ impl Parser { next_trivias: self.next_trivias.clone(), auxiliary_recovery_set: self.auxiliary_recovery_set.clone(), }); - } - /// Ends the dry run mode. - /// See `[start_dry_run]` for more details. - pub fn end_dry_run(&mut self) { + let r = f(self); + + // Leaves the dry run mode. self.stream.backtrack(); let state = self.dry_run_states.pop().unwrap(); self.errors.truncate(state.err_num); self.current_pos = state.pos; self.next_trivias = state.next_trivias; self.auxiliary_recovery_set = state.auxiliary_recovery_set; + + r } /// Bumps the current token and its leading trivias. diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 634f889b68..dd6d7157c5 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -229,9 +229,9 @@ impl super::Parse for CallArgScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.start_dry_run(); - let has_label = parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon); - parser.end_dry_run(); + let has_label = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); if has_label { parser.bump_expected(SyntaxKind::Ident); diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index 044ce1bad7..57c3e1e10b 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -121,9 +121,9 @@ impl super::Parse for RecordPatFieldListScope { define_scope! { RecordPatFieldScope, RecordPatField, Override(Comma, RBrace) } impl super::Parse for RecordPatFieldScope { fn parse(&mut self, parser: &mut Parser) { - parser.start_dry_run(); - let has_label = parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon); - parser.end_dry_run(); + let has_label = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); if has_label { parser.bump_expected(SyntaxKind::Ident); parser.bump_expected(SyntaxKind::Colon); diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 5a4256b378..e184669288 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -21,12 +21,11 @@ pub fn parse_stmt(parser: &mut Parser, checkpoint: Option parser.parse(AssertStmtScope::default(), checkpoint), Some(ReturnKw) => parser.parse(ReturnStmtScope::default(), checkpoint), _ => { - parser.start_dry_run(); - if parser.parse(AssignStmtScope::default(), checkpoint).0 { - parser.end_dry_run(); + let is_assign_stmt = + parser.dry_run(|parser| parser.parse(AssignStmtScope::default(), None).0); + if is_assign_stmt { parser.parse(AssignStmtScope::default(), checkpoint) } else { - parser.end_dry_run(); parser.parse(ExprStmtScope::default(), checkpoint) } } @@ -121,12 +120,9 @@ impl super::Parse for ReturnStmtScope { parser.bump_expected(SyntaxKind::ReturnKw); parser.set_newline_as_trivia(false); - parser.start_dry_run(); - if parse_expr(parser) { - parser.end_dry_run(); + let has_val = parser.dry_run(parse_expr); + if has_val { parse_expr(parser); - } else { - parser.end_dry_run(); } } } diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index 4b7f3b0a82..06aef60f5d 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -76,10 +76,10 @@ impl super::Parse for UsePathScope { parser.parse(UsePathSegmentScope::default(), None); loop { - parser.start_dry_run(); - let is_path_segment = parser.bump_if(SyntaxKind::Colon2) - && parser.parse(UsePathSegmentScope::default(), None).0; - parser.end_dry_run(); + let is_path_segment = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Colon2) + && parser.parse(UsePathSegmentScope::default(), None).0 + }); if is_path_segment { parser.bump_expected(SyntaxKind::Colon2); parser.parse(UsePathSegmentScope::default(), None); From 19bd8efbda56ffba2c000d289582e580548a9014 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 26 Jan 2023 17:19:15 +0100 Subject: [PATCH 042/678] Implement parser for `where` clause --- crates/parser2/src/parser/func.rs | 26 +- crates/parser2/src/parser/item.rs | 43 +- crates/parser2/src/parser/param.rs | 56 +- crates/parser2/src/parser/struct_.rs | 25 +- crates/parser2/src/syntax_kind.rs | 9 +- .../test_files/syntax_node/items/enums.fe | 8 +- .../test_files/syntax_node/items/enums.snap | 228 +++++--- .../test_files/syntax_node/items/func.fe | 6 +- .../test_files/syntax_node/items/func.snap | 204 ++++--- .../test_files/syntax_node/items/impl_.fe | 8 +- .../test_files/syntax_node/items/impl_.snap | 192 ++++--- .../syntax_node/items/impl_trait.fe | 5 +- .../syntax_node/items/impl_trait.snap | 171 +++--- .../test_files/syntax_node/items/trait_.fe | 10 +- .../test_files/syntax_node/items/trait_.snap | 536 ++++++++++-------- .../test_files/syntax_node/items/type_.snap | 4 +- .../syntax_node/structs/generics.fe | 9 +- .../syntax_node/structs/generics.snap | 508 ++++++++++------- 18 files changed, 1212 insertions(+), 836 deletions(-) diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 4516826672..344fa6f970 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -3,7 +3,7 @@ use crate::SyntaxKind; use super::{ define_scope, expr_atom::BlockExprScope, - param::{FnArgListScope, GenericParamListScope}, + param::{parse_where_clause_opt, FnArgListScope, GenericParamListScope}, token_stream::TokenStream, type_::parse_type, Parser, @@ -40,19 +40,27 @@ impl super::Parse for FnScope { } }); - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::Arrow], |parser| { - if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); - } else { - parser.error_and_recover("expected `(` for the function arguments", None); - } - }); + parser.with_recovery_tokens( + &[ + SyntaxKind::LBrace, + SyntaxKind::Arrow, + SyntaxKind::WhereClause, + ], + |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); + } + }, + ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.bump_if(SyntaxKind::Arrow) { parse_type(parser, None, false); } }); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); if parser.current_kind() == Some(SyntaxKind::LBrace) { if self.disallow_def { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 76c3d95ac7..2a83ea45b2 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -5,7 +5,7 @@ use crate::{parser::func::FnScope, SyntaxKind}; use super::{ attr, define_scope, expr::parse_expr, - param::GenericParamListScope, + param::{parse_where_clause_opt, GenericParamListScope}, struct_::RecordFieldDefListScope, token_stream::TokenStream, type_::{parse_type, TupleTypeScope}, @@ -169,18 +169,23 @@ impl super::Parse for EnumScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::EnumKw); - parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LBrace], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the enum name", None) - } - }); + parser.with_recovery_tokens( + &[SyntaxKind::Lt, SyntaxKind::LBrace, SyntaxKind::WhereKw], + |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the enum name", None) + } + }, + ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } }); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected enum body", None); return; @@ -243,12 +248,14 @@ impl super::Parse for TraitScope { parser.error_and_recover("expected ident for the trait name", None) } - parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } }); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected trait body", None); return; @@ -302,14 +309,18 @@ impl super::Parse for ImplScope { let is_trait_impl = parser .dry_run(|parser| parse_type(parser, None, true) && parser.bump_if(SyntaxKind::ForKw)); - if is_trait_impl { - self.set_kind(SyntaxKind::ImplTrait); - parse_type(parser, None, false); - parser.bump_expected(SyntaxKind::ForKw); - parse_type(parser, None, false); - } else { - parse_type(parser, None, true); - } + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { + if is_trait_impl { + self.set_kind(SyntaxKind::ImplTrait); + parse_type(parser, None, false); + parser.bump_expected(SyntaxKind::ForKw); + parse_type(parser, None, false); + } else { + parse_type(parser, None, true); + } + }); + + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected impl body", None); diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index dd6d7157c5..98bb5fdd32 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -4,7 +4,7 @@ use super::{ define_scope, expr::parse_expr, expr_atom::{BlockExprScope, LitExprScope}, - path::PathScope, + path::{is_path_segment, PathScope}, token_stream::TokenStream, type_::parse_type, Parser, @@ -112,34 +112,34 @@ impl super::Parse for GenericParamScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.parse(TraitBoundListScope::default(), None); + parser.parse(TypeBoundListScope::default(), None); } } } define_scope! { - TraitBoundListScope, - TraitBoundList, + TypeBoundListScope, + TypeBoundList, Inheritance(Plus) } -impl super::Parse for TraitBoundListScope { +impl super::Parse for TypeBoundListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Colon); - parser.parse(TraitBoundScope::default(), None); + parser.parse(TypeBoundScope::default(), None); while parser.current_kind() == Some(SyntaxKind::Plus) { parser.bump_expected(SyntaxKind::Plus); - parser.parse(TraitBoundScope::default(), None); + parser.parse(TypeBoundScope::default(), None); } } } define_scope! { - TraitBoundScope, - TraitBound, + TypeBoundScope, + TypeBound, Inheritance } -impl super::Parse for TraitBoundScope { +impl super::Parse for TypeBoundScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); if parser.current_kind() == Some(SyntaxKind::Lt) { @@ -195,7 +195,7 @@ impl super::Parse for GenericArgScope { if !self.allow_bounds { parser.error_and_recover("type bounds are not allowed here", None); } else { - parser.parse(TraitBoundListScope::default(), None); + parser.parse(TypeBoundListScope::default(), None); } } } @@ -240,3 +240,37 @@ impl super::Parse for CallArgScope { parse_expr(parser); } } + +define_scope! { WhereClauseScope, WhereClause, Inheritance(Newline) } +impl super::Parse for WhereClauseScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::WhereKw); + + loop { + parser.set_newline_as_trivia(true); + match parser.current_kind() { + Some(kind) if is_path_segment(kind) => { + parse_type(parser, None, false); + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::default(), None); + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Newline) { + parser.error_and_recover("expected newline after type bounds", None); + } + } else { + parser.error_and_recover("expected `:` for type bounds", None); + } + } + _ => break, + } + } + } +} + +pub(crate) fn parse_where_clause_opt(parser: &mut Parser) { + let newline_as_trivia = parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::WhereKw) { + parser.parse(WhereClauseScope::default(), None); + } + parser.set_newline_as_trivia(newline_as_trivia); +} diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index ba3a9d2e77..fbf6b0209f 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -1,8 +1,12 @@ use crate::SyntaxKind; use super::{ - attr::parse_attr_list, define_scope, param::GenericParamListScope, token_stream::TokenStream, - type_::parse_type, Parser, + attr::parse_attr_list, + define_scope, + param::{parse_where_clause_opt, GenericParamListScope}, + token_stream::TokenStream, + type_::parse_type, + Parser, }; define_scope! { @@ -14,18 +18,23 @@ impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::StructKw); - parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LBrace], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the struct name", None) - } - }); + parser.with_recovery_tokens( + &[SyntaxKind::Lt, SyntaxKind::LBrace, SyntaxKind::WhereKw], + |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the struct name", None) + } + }, + ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], |parser| { + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } }); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(RecordFieldDefListScope::default(), None); } else { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index a8c79a521c..d481f02d35 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -170,6 +170,9 @@ pub enum SyntaxKind { /// `in` #[token("in")] InKw, + /// `where` + #[token("where")] + WhereKw, /// `while` #[token("while")] WhileKw, @@ -425,9 +428,11 @@ pub enum SyntaxKind { FnArg, /// `foo::Trait1 + Trait2` - TraitBoundList, + TypeBoundList, /// `Trait1` - TraitBound, + TypeBound, + /// `where Option: Trait1 + Trait2` + WhereClause, /// Root node of the input source. Root, diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe index ed5f30df74..32e563010c 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.fe +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -5,12 +5,16 @@ enum Basic { Tup(i32, u32) } -enum Option { +enum Option + where T: Clone +{ Some(T) None } -enum BoundEnum { +enum BoundEnum +where Foo::Bar: Trait +{ AddMul(T) SubDiv(U) } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 5d7a0ff512..30b91daec5 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..174 - ItemList@0..174 +Root@0..220 + ItemList@0..220 Enum@0..13 EnumKw@0..4 "enum" WhiteSpace@4..5 " " @@ -44,7 +44,7 @@ Root@0..174 Newline@54..55 "\n" RBrace@55..56 "}" Newline@56..58 "\n\n" - Enum@58..97 + Enum@58..117 EnumKw@58..62 "enum" WhiteSpace@62..63 " " Ident@63..69 "Option" @@ -54,93 +54,137 @@ Root@0..174 Ident@70..71 "T" Gt@71..72 ">" WhiteSpace@72..73 " " - VariantDefList@73..97 - LBrace@73..74 "{" - Newline@74..75 "\n" - WhiteSpace@75..79 " " - VariantDef@79..86 - Ident@79..83 "Some" - TupleType@83..86 - LParen@83..84 "(" - PathType@84..85 - Path@84..85 - PathSegment@84..85 - Ident@84..85 "T" - RParen@85..86 ")" - Newline@86..87 "\n" - WhiteSpace@87..91 " " - VariantDef@91..95 - Ident@91..95 "None" - Newline@95..96 "\n" - RBrace@96..97 "}" - Newline@97..99 "\n\n" - Enum@99..174 - EnumKw@99..103 "enum" - WhiteSpace@103..104 " " - Ident@104..113 "BoundEnum" - GenericParamList@113..142 - Lt@113..114 "<" - GenericParam@114..126 - Ident@114..115 "T" - TraitBoundList@115..126 - Colon@115..116 ":" - WhiteSpace@116..117 " " - TraitBound@117..120 - Path@117..120 - PathSegment@117..120 - Ident@117..120 "Add" - WhiteSpace@120..121 " " - Plus@121..122 "+" - WhiteSpace@122..123 " " - TraitBound@123..126 - Path@123..126 - PathSegment@123..126 - Ident@123..126 "Mul" - WhiteSpace@126..127 " " - Comma@127..128 "," - WhiteSpace@128..129 " " - GenericParam@129..141 - Ident@129..130 "U" - TraitBoundList@130..141 - Colon@130..131 ":" - WhiteSpace@131..132 " " - TraitBound@132..135 - Path@132..135 - PathSegment@132..135 - Ident@132..135 "Sub" - WhiteSpace@135..136 " " - Plus@136..137 "+" - WhiteSpace@137..138 " " - TraitBound@138..141 - Path@138..141 - PathSegment@138..141 - Ident@138..141 "Div" - Gt@141..142 ">" - WhiteSpace@142..143 " " - VariantDefList@143..174 - LBrace@143..144 "{" - Newline@144..145 "\n" - WhiteSpace@145..149 " " - VariantDef@149..158 - Ident@149..155 "AddMul" - TupleType@155..158 - LParen@155..156 "(" - PathType@156..157 - Path@156..157 - PathSegment@156..157 - Ident@156..157 "T" - RParen@157..158 ")" - Newline@158..159 "\n" - WhiteSpace@159..163 " " - VariantDef@163..172 - Ident@163..169 "SubDiv" - TupleType@169..172 - LParen@169..170 "(" - PathType@170..171 - Path@170..171 - PathSegment@170..171 - Ident@170..171 "U" - RParen@171..172 ")" - Newline@172..173 "\n" - RBrace@173..174 "}" + Newline@73..74 "\n" + WhiteSpace@74..78 " " + WhereClause@78..93 + WhereKw@78..83 "where" + WhiteSpace@83..84 " " + PathType@84..85 + Path@84..85 + PathSegment@84..85 + Ident@84..85 "T" + TypeBoundList@85..92 + Colon@85..86 ":" + WhiteSpace@86..87 " " + TypeBound@87..92 + Path@87..92 + PathSegment@87..92 + Ident@87..92 "Clone" + Newline@92..93 "\n" + VariantDefList@93..117 + LBrace@93..94 "{" + Newline@94..95 "\n" + WhiteSpace@95..99 " " + VariantDef@99..106 + Ident@99..103 "Some" + TupleType@103..106 + LParen@103..104 "(" + PathType@104..105 + Path@104..105 + PathSegment@104..105 + Ident@104..105 "T" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + VariantDef@111..115 + Ident@111..115 "None" + Newline@115..116 "\n" + RBrace@116..117 "}" + Newline@117..119 "\n\n" + Enum@119..220 + EnumKw@119..123 "enum" + WhiteSpace@123..124 " " + Ident@124..133 "BoundEnum" + GenericParamList@133..162 + Lt@133..134 "<" + GenericParam@134..146 + Ident@134..135 "T" + TypeBoundList@135..146 + Colon@135..136 ":" + WhiteSpace@136..137 " " + TypeBound@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "Add" + WhiteSpace@140..141 " " + Plus@141..142 "+" + WhiteSpace@142..143 " " + TypeBound@143..146 + Path@143..146 + PathSegment@143..146 + Ident@143..146 "Mul" + WhiteSpace@146..147 " " + Comma@147..148 "," + WhiteSpace@148..149 " " + GenericParam@149..161 + Ident@149..150 "U" + TypeBoundList@150..161 + Colon@150..151 ":" + WhiteSpace@151..152 " " + TypeBound@152..155 + Path@152..155 + PathSegment@152..155 + Ident@152..155 "Sub" + WhiteSpace@155..156 " " + Plus@156..157 "+" + WhiteSpace@157..158 " " + TypeBound@158..161 + Path@158..161 + PathSegment@158..161 + Ident@158..161 "Div" + Gt@161..162 ">" + WhiteSpace@162..163 " " + Newline@163..164 "\n" + WhereClause@164..189 + WhereKw@164..169 "where" + WhiteSpace@169..170 " " + PathType@170..181 + Path@170..178 + PathSegment@170..173 + Ident@170..173 "Foo" + Colon2@173..175 "::" + PathSegment@175..178 + Ident@175..178 "Bar" + GenericArgList@178..181 + Lt@178..179 "<" + GenericArg@179..180 + PathType@179..180 + Path@179..180 + PathSegment@179..180 + Ident@179..180 "T" + Gt@180..181 ">" + TypeBoundList@181..188 + Colon@181..182 ":" + WhiteSpace@182..183 " " + TypeBound@183..188 + Path@183..188 + PathSegment@183..188 + Ident@183..188 "Trait" + Newline@188..189 "\n" + VariantDefList@189..220 + LBrace@189..190 "{" + Newline@190..191 "\n" + WhiteSpace@191..195 " " + VariantDef@195..204 + Ident@195..201 "AddMul" + TupleType@201..204 + LParen@201..202 "(" + PathType@202..203 + Path@202..203 + PathSegment@202..203 + Ident@202..203 "T" + RParen@203..204 ")" + Newline@204..205 "\n" + WhiteSpace@205..209 " " + VariantDef@209..218 + Ident@209..215 "SubDiv" + TupleType@215..218 + LParen@215..216 "(" + PathType@216..217 + Path@216..217 + PathSegment@216..217 + Ident@216..217 "U" + RParen@217..218 ")" + Newline@218..219 "\n" + RBrace@219..220 "}" diff --git a/crates/parser2/test_files/syntax_node/items/func.fe b/crates/parser2/test_files/syntax_node/items/func.fe index c1072cd66f..9e61b76925 100644 --- a/crates/parser2/test_files/syntax_node/items/func.fe +++ b/crates/parser2/test_files/syntax_node/items/func.fe @@ -10,7 +10,11 @@ fn baz(from sender: address, mut to recipient: address, _ val: u256, _ _: u256) 1 } -fn generics1(t: T, u: Option) -> T { +fn generics1(t: T, u: Option) -> T + where Result: Trait + Option: Clone + +{ t } diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 32533e5d98..77a12ff489 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..292 - ItemList@0..292 +Root@0..358 + ItemList@0..358 Fn@0..30 ItemModifier@0..3 PubKw@0..3 "pub" @@ -152,7 +152,7 @@ Root@0..292 Newline@176..177 "\n" RBrace@177..178 "}" Newline@178..180 "\n\n" - Fn@180..240 + Fn@180..306 FnKw@180..182 "fn" WhiteSpace@182..183 " " Ident@183..192 "generics1" @@ -160,10 +160,10 @@ Root@0..292 Lt@192..193 "<" GenericParam@193..201 Ident@193..194 "T" - TraitBoundList@194..201 + TypeBoundList@194..201 Colon@194..195 ":" WhiteSpace@195..196 " " - TraitBound@196..201 + TypeBound@196..201 Path@196..201 PathSegment@196..201 Ident@196..201 "Trait" @@ -208,77 +208,125 @@ Root@0..292 Path@229..230 PathSegment@229..230 Ident@229..230 "T" - WhiteSpace@230..231 " " - BlockExpr@231..240 - LBrace@231..232 "{" - Newline@232..233 "\n" - WhiteSpace@233..237 " " - ExprStmt@237..238 - Path@237..238 - PathSegment@237..238 - Ident@237..238 "t" - Newline@238..239 "\n" - RBrace@239..240 "}" - Newline@240..242 "\n\n" - Fn@242..292 - FnKw@242..244 "fn" - WhiteSpace@244..245 " " - Ident@245..249 "decl" - GenericParamList@249..255 - Lt@249..250 "<" - GenericParam@250..251 - Ident@250..251 "T" - Comma@251..252 "," - WhiteSpace@252..253 " " - GenericParam@253..254 - Ident@253..254 "U" - Gt@254..255 ">" - FnArgList@255..274 - LParen@255..256 "(" - FnArg@256..273 - Ident@256..257 "t" - Colon@257..258 ":" - WhiteSpace@258..259 " " - PathType@259..273 - Path@259..267 - PathSegment@259..267 - Ident@259..267 "MyStruct" - GenericArgList@267..273 - Lt@267..268 "<" - GenericArg@268..269 - PathType@268..269 - Path@268..269 - PathSegment@268..269 - Ident@268..269 "T" - Comma@269..270 "," - WhiteSpace@270..271 " " - GenericArg@271..272 - PathType@271..272 - Path@271..272 - PathSegment@271..272 - Ident@271..272 "U" - Gt@272..273 ">" - RParen@273..274 ")" - WhiteSpace@274..275 " " - Arrow@275..277 "->" - WhiteSpace@277..278 " " - PathType@278..292 - Path@278..284 - PathSegment@278..284 - Ident@278..284 "Result" - GenericArgList@284..292 - Lt@284..285 "<" - GenericArg@285..286 - PathType@285..286 - Path@285..286 - PathSegment@285..286 - Ident@285..286 "T" - Comma@286..287 "," - WhiteSpace@287..288 " " - GenericArg@288..291 - PathType@288..291 - Path@288..291 - PathSegment@288..291 - Ident@288..291 "Err" - Gt@291..292 ">" + Newline@230..231 "\n" + WhiteSpace@231..235 " " + WhereClause@235..286 + WhereKw@235..240 "where" + WhiteSpace@240..241 " " + PathType@241..250 + Path@241..247 + PathSegment@241..247 + Ident@241..247 "Result" + GenericArgList@247..250 + Lt@247..248 "<" + GenericArg@248..249 + PathType@248..249 + Path@248..249 + PathSegment@248..249 + Ident@248..249 "T" + Gt@249..250 ">" + TypeBoundList@250..257 + Colon@250..251 ":" + WhiteSpace@251..252 " " + TypeBound@252..257 + Path@252..257 + PathSegment@252..257 + Ident@252..257 "Trait" + WhiteSpace@257..258 " " + Newline@258..259 "\n" + WhiteSpace@259..269 " " + PathType@269..278 + Path@269..275 + PathSegment@269..275 + Ident@269..275 "Option" + GenericArgList@275..278 + Lt@275..276 "<" + GenericArg@276..277 + PathType@276..277 + Path@276..277 + PathSegment@276..277 + Ident@276..277 "U" + Gt@277..278 ">" + TypeBoundList@278..285 + Colon@278..279 ":" + WhiteSpace@279..280 " " + TypeBound@280..285 + Path@280..285 + PathSegment@280..285 + Ident@280..285 "Clone" + Newline@285..286 "\n" + WhiteSpace@286..296 " " + Newline@296..297 "\n" + BlockExpr@297..306 + LBrace@297..298 "{" + Newline@298..299 "\n" + WhiteSpace@299..303 " " + ExprStmt@303..304 + Path@303..304 + PathSegment@303..304 + Ident@303..304 "t" + Newline@304..305 "\n" + RBrace@305..306 "}" + Newline@306..308 "\n\n" + Fn@308..358 + FnKw@308..310 "fn" + WhiteSpace@310..311 " " + Ident@311..315 "decl" + GenericParamList@315..321 + Lt@315..316 "<" + GenericParam@316..317 + Ident@316..317 "T" + Comma@317..318 "," + WhiteSpace@318..319 " " + GenericParam@319..320 + Ident@319..320 "U" + Gt@320..321 ">" + FnArgList@321..340 + LParen@321..322 "(" + FnArg@322..339 + Ident@322..323 "t" + Colon@323..324 ":" + WhiteSpace@324..325 " " + PathType@325..339 + Path@325..333 + PathSegment@325..333 + Ident@325..333 "MyStruct" + GenericArgList@333..339 + Lt@333..334 "<" + GenericArg@334..335 + PathType@334..335 + Path@334..335 + PathSegment@334..335 + Ident@334..335 "T" + Comma@335..336 "," + WhiteSpace@336..337 " " + GenericArg@337..338 + PathType@337..338 + Path@337..338 + PathSegment@337..338 + Ident@337..338 "U" + Gt@338..339 ">" + RParen@339..340 ")" + WhiteSpace@340..341 " " + Arrow@341..343 "->" + WhiteSpace@343..344 " " + PathType@344..358 + Path@344..350 + PathSegment@344..350 + Ident@344..350 "Result" + GenericArgList@350..358 + Lt@350..351 "<" + GenericArg@351..352 + PathType@351..352 + Path@351..352 + PathSegment@351..352 + Ident@351..352 "T" + Comma@352..353 "," + WhiteSpace@353..354 " " + GenericArg@354..357 + PathType@354..357 + Path@354..357 + PathSegment@354..357 + Ident@354..357 "Err" + Gt@357..358 ">" diff --git a/crates/parser2/test_files/syntax_node/items/impl_.fe b/crates/parser2/test_files/syntax_node/items/impl_.fe index 8942868db2..67c8e2b58c 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_.fe +++ b/crates/parser2/test_files/syntax_node/items/impl_.fe @@ -6,8 +6,12 @@ impl Foo::Bar { } } -impl Foo { - fn add>(self, rhs: U) { +impl Foo +where Foo: Clone +{ + fn add>(self, rhs: U) + where T: Copy + { (rhs - self.t) } } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/impl_.snap b/crates/parser2/test_files/syntax_node/items/impl_.snap index c8ed9691d8..0bbd47f2c7 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..218 - ItemList@0..218 +Root@0..266 + ItemList@0..266 Impl@0..134 ImplKw@0..4 "impl" WhiteSpace@4..5 " " @@ -21,10 +21,10 @@ Root@0..218 Path@14..15 PathSegment@14..15 Ident@14..15 "T" - TraitBoundList@15..20 + TypeBoundList@15..20 Colon@15..16 ":" WhiteSpace@16..17 " " - TraitBound@17..20 + TypeBound@17..20 Path@17..20 PathSegment@17..20 Ident@17..20 "Add" @@ -103,7 +103,7 @@ Root@0..218 Newline@132..133 "\n" RBrace@133..134 "}" Newline@134..136 "\n\n" - Impl@136..218 + Impl@136..266 ImplKw@136..140 "impl" WhiteSpace@140..141 " " PathType@141..147 @@ -119,74 +119,116 @@ Root@0..218 Ident@145..146 "T" Gt@146..147 ">" WhiteSpace@147..148 " " - ImplItemList@148..218 - LBrace@148..149 "{" - Newline@149..150 "\n" - WhiteSpace@150..154 " " - Fn@154..216 - FnKw@154..156 "fn" - WhiteSpace@156..157 " " - Ident@157..160 "add" - GenericParamList@160..171 - Lt@160..161 "<" - GenericParam@161..170 - Ident@161..162 "U" - TraitBoundList@162..170 - Colon@162..163 ":" - WhiteSpace@163..164 " " - TraitBound@164..170 - Path@164..167 - PathSegment@164..167 - Ident@164..167 "Add" - GenericArgList@167..170 - Lt@167..168 "<" - GenericArg@168..169 - PathType@168..169 - Path@168..169 - PathSegment@168..169 - Ident@168..169 "T" - Gt@169..170 ">" - Gt@170..171 ">" - FnArgList@171..185 - LParen@171..172 "(" - FnArg@172..176 - SelfKw@172..176 "self" - Comma@176..177 "," - WhiteSpace@177..178 " " - FnArg@178..184 - Ident@178..181 "rhs" - Colon@181..182 ":" - WhiteSpace@182..183 " " - PathType@183..184 - Path@183..184 - PathSegment@183..184 - Ident@183..184 "U" - RParen@184..185 ")" - WhiteSpace@185..186 " " - BlockExpr@186..216 - LBrace@186..187 "{" - Newline@187..188 "\n" - WhiteSpace@188..196 " " - ExprStmt@196..210 - ParenExpr@196..210 - LParen@196..197 "(" - BinExpr@197..209 - Path@197..200 - PathSegment@197..200 - Ident@197..200 "rhs" - WhiteSpace@200..201 " " - Minus@201..202 "-" - WhiteSpace@202..203 " " - FieldExpr@203..209 - Path@203..207 - PathSegment@203..207 - SelfKw@203..207 "self" - Dot@207..208 "." - Ident@208..209 "t" - RParen@209..210 ")" - Newline@210..211 "\n" - WhiteSpace@211..215 " " - RBrace@215..216 "}" - Newline@216..217 "\n" - RBrace@217..218 "}" + Newline@148..149 "\n" + WhereClause@149..169 + WhereKw@149..154 "where" + WhiteSpace@154..155 " " + PathType@155..161 + Path@155..158 + PathSegment@155..158 + Ident@155..158 "Foo" + GenericArgList@158..161 + Lt@158..159 "<" + GenericArg@159..160 + PathType@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "T" + Gt@160..161 ">" + TypeBoundList@161..168 + Colon@161..162 ":" + WhiteSpace@162..163 " " + TypeBound@163..168 + Path@163..168 + PathSegment@163..168 + Ident@163..168 "Clone" + Newline@168..169 "\n" + ImplItemList@169..266 + LBrace@169..170 "{" + Newline@170..171 "\n" + WhiteSpace@171..175 " " + Fn@175..264 + FnKw@175..177 "fn" + WhiteSpace@177..178 " " + Ident@178..181 "add" + GenericParamList@181..192 + Lt@181..182 "<" + GenericParam@182..191 + Ident@182..183 "U" + TypeBoundList@183..191 + Colon@183..184 ":" + WhiteSpace@184..185 " " + TypeBound@185..191 + Path@185..188 + PathSegment@185..188 + Ident@185..188 "Add" + GenericArgList@188..191 + Lt@188..189 "<" + GenericArg@189..190 + PathType@189..190 + Path@189..190 + PathSegment@189..190 + Ident@189..190 "T" + Gt@190..191 ">" + Gt@191..192 ">" + FnArgList@192..206 + LParen@192..193 "(" + FnArg@193..197 + SelfKw@193..197 "self" + Comma@197..198 "," + WhiteSpace@198..199 " " + FnArg@199..205 + Ident@199..202 "rhs" + Colon@202..203 ":" + WhiteSpace@203..204 " " + PathType@204..205 + Path@204..205 + PathSegment@204..205 + Ident@204..205 "U" + RParen@205..206 ")" + WhiteSpace@206..207 " " + Newline@207..208 "\n" + WhiteSpace@208..216 " " + WhereClause@216..230 + WhereKw@216..221 "where" + WhiteSpace@221..222 " " + PathType@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "T" + TypeBoundList@223..229 + Colon@223..224 ":" + WhiteSpace@224..225 " " + TypeBound@225..229 + Path@225..229 + PathSegment@225..229 + Ident@225..229 "Copy" + Newline@229..230 "\n" + WhiteSpace@230..234 " " + BlockExpr@234..264 + LBrace@234..235 "{" + Newline@235..236 "\n" + WhiteSpace@236..244 " " + ExprStmt@244..258 + ParenExpr@244..258 + LParen@244..245 "(" + BinExpr@245..257 + Path@245..248 + PathSegment@245..248 + Ident@245..248 "rhs" + WhiteSpace@248..249 " " + Minus@249..250 "-" + WhiteSpace@250..251 " " + FieldExpr@251..257 + Path@251..255 + PathSegment@251..255 + SelfKw@251..255 "self" + Dot@255..256 "." + Ident@256..257 "t" + RParen@257..258 ")" + Newline@258..259 "\n" + WhiteSpace@259..263 " " + RBrace@263..264 "}" + Newline@264..265 "\n" + RBrace@265..266 "}" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.fe b/crates/parser2/test_files/syntax_node/items/impl_trait.fe index f45fff9fb0..e3352c6cc7 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.fe +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.fe @@ -4,7 +4,10 @@ impl Trait for F { } } -impl Trait for F { +impl Trait for F +where T: Clone + U: Bar +{ fn foo>(t: T) { do_something(t) } diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index d4b47c2fd8..e307be7404 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..167 - ItemList@0..167 +Root@0..196 + ItemList@0..196 ImplTrait@0..64 ImplKw@0..4 "impl" WhiteSpace@4..5 " " @@ -62,7 +62,7 @@ Root@0..167 Newline@62..63 "\n" RBrace@63..64 "}" Newline@64..66 "\n\n" - ImplTrait@66..167 + ImplTrait@66..196 ImplKw@66..70 "impl" WhiteSpace@70..71 " " PathType@71..82 @@ -100,73 +100,102 @@ Root@0..167 Ident@89..90 "T" Gt@90..91 ">" WhiteSpace@91..92 " " - ImplTraitItemList@92..167 - LBrace@92..93 "{" - Newline@93..94 "\n" - WhiteSpace@94..98 " " - Fn@98..165 - FnKw@98..100 "fn" - WhiteSpace@100..101 " " - Ident@101..104 "foo" - GenericParamList@104..122 - Lt@104..105 "<" - GenericParam@105..121 - Ident@105..106 "T" - TraitBoundList@106..121 - Colon@106..107 ":" - WhiteSpace@107..108 " " - TraitBound@108..121 - Path@108..118 - PathSegment@108..118 - Ident@108..118 "OtherTrait" - GenericArgList@118..121 - Lt@118..119 "<" - GenericArg@119..120 - PathType@119..120 - Path@119..120 - PathSegment@119..120 - Ident@119..120 "U" - Gt@120..121 ">" - Gt@121..122 ">" - FnArgList@122..128 - LParen@122..123 "(" - FnArg@123..127 - Ident@123..124 "t" - Colon@124..125 ":" - WhiteSpace@125..126 " " - PathType@126..127 - Path@126..127 - PathSegment@126..127 - Ident@126..127 "T" - RParen@127..128 ")" - WhiteSpace@128..129 " " - BlockExpr@129..165 - LBrace@129..130 "{" - Newline@130..131 "\n" - WhiteSpace@131..139 " " - ExprStmt@139..159 - CallExpr@139..159 - Path@139..151 - PathSegment@139..151 - Ident@139..151 "do_something" - GenericArgList@151..156 - Lt@151..152 "<" - GenericArg@152..155 - PathType@152..155 - Path@152..155 - PathSegment@152..155 - Ident@152..155 "i32" - Gt@155..156 ">" - CallArgList@156..159 - LParen@156..157 "(" - CallArg@157..158 - Path@157..158 - PathSegment@157..158 - Ident@157..158 "t" - RParen@158..159 ")" + Newline@92..93 "\n" + WhereClause@93..121 + WhereKw@93..98 "where" + WhiteSpace@98..99 " " + PathType@99..100 + Path@99..100 + PathSegment@99..100 + Ident@99..100 "T" + TypeBoundList@100..107 + Colon@100..101 ":" + WhiteSpace@101..102 " " + TypeBound@102..107 + Path@102..107 + PathSegment@102..107 + Ident@102..107 "Clone" + Newline@107..108 "\n" + WhiteSpace@108..114 " " + PathType@114..115 + Path@114..115 + PathSegment@114..115 + Ident@114..115 "U" + TypeBoundList@115..120 + Colon@115..116 ":" + WhiteSpace@116..117 " " + TypeBound@117..120 + Path@117..120 + PathSegment@117..120 + Ident@117..120 "Bar" + Newline@120..121 "\n" + ImplTraitItemList@121..196 + LBrace@121..122 "{" + Newline@122..123 "\n" + WhiteSpace@123..127 " " + Fn@127..194 + FnKw@127..129 "fn" + WhiteSpace@129..130 " " + Ident@130..133 "foo" + GenericParamList@133..151 + Lt@133..134 "<" + GenericParam@134..150 + Ident@134..135 "T" + TypeBoundList@135..150 + Colon@135..136 ":" + WhiteSpace@136..137 " " + TypeBound@137..150 + Path@137..147 + PathSegment@137..147 + Ident@137..147 "OtherTrait" + GenericArgList@147..150 + Lt@147..148 "<" + GenericArg@148..149 + PathType@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "U" + Gt@149..150 ">" + Gt@150..151 ">" + FnArgList@151..157 + LParen@151..152 "(" + FnArg@152..156 + Ident@152..153 "t" + Colon@153..154 ":" + WhiteSpace@154..155 " " + PathType@155..156 + Path@155..156 + PathSegment@155..156 + Ident@155..156 "T" + RParen@156..157 ")" + WhiteSpace@157..158 " " + BlockExpr@158..194 + LBrace@158..159 "{" Newline@159..160 "\n" - WhiteSpace@160..164 " " - RBrace@164..165 "}" - Newline@165..166 "\n" - RBrace@166..167 "}" + WhiteSpace@160..168 " " + ExprStmt@168..188 + CallExpr@168..188 + Path@168..180 + PathSegment@168..180 + Ident@168..180 "do_something" + GenericArgList@180..185 + Lt@180..181 "<" + GenericArg@181..184 + PathType@181..184 + Path@181..184 + PathSegment@181..184 + Ident@181..184 "i32" + Gt@184..185 ">" + CallArgList@185..188 + LParen@185..186 "(" + CallArg@186..187 + Path@186..187 + PathSegment@186..187 + Ident@186..187 "t" + RParen@187..188 ")" + Newline@188..189 "\n" + WhiteSpace@189..193 " " + RBrace@193..194 "}" + Newline@194..195 "\n" + RBrace@195..196 "}" diff --git a/crates/parser2/test_files/syntax_node/items/trait_.fe b/crates/parser2/test_files/syntax_node/items/trait_.fe index e98f633817..df77c21a11 100644 --- a/crates/parser2/test_files/syntax_node/items/trait_.fe +++ b/crates/parser2/test_files/syntax_node/items/trait_.fe @@ -8,8 +8,10 @@ pub trait Foo { } } -pub trait Add { - fn add(self, rhs: Rhs) -> Self +pub trait Add +{ + fn add(self, rhs: Rhs) -> Self + where RHS: Sub } @@ -17,7 +19,9 @@ pub trait Parse { fn parse(mut self, mut parser: Parser) } -impl Parser { +impl Parser + where S: Clone +{ pub fn parse(mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) { (SyntaxNode::new_root(self.builder.finish()), self.errors) } diff --git a/crates/parser2/test_files/syntax_node/items/trait_.snap b/crates/parser2/test_files/syntax_node/items/trait_.snap index 459d6044bd..ca105c9cd2 100644 --- a/crates/parser2/test_files/syntax_node/items/trait_.snap +++ b/crates/parser2/test_files/syntax_node/items/trait_.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..538 - ItemList@0..538 +Root@0..588 + ItemList@0..588 Trait@0..15 TraitKw@0..5 "trait" WhiteSpace@5..6 " " @@ -33,10 +33,10 @@ Root@0..538 Lt@43..44 "<" GenericParam@44..52 Ident@44..45 "T" - TraitBoundList@45..52 + TypeBoundList@45..52 Colon@45..46 ":" WhiteSpace@46..47 " " - TraitBound@47..52 + TypeBound@47..52 Path@47..52 PathSegment@47..52 Ident@47..52 "Trait" @@ -46,10 +46,10 @@ Root@0..538 ConstKw@54..59 "const" WhiteSpace@59..60 " " Ident@60..61 "U" - TraitBoundList@61..66 + TypeBoundList@61..66 Colon@61..62 ":" WhiteSpace@62..63 " " - TraitBound@63..66 + TypeBound@63..66 Path@63..66 PathSegment@63..66 Ident@63..66 "i32" @@ -85,17 +85,17 @@ Root@0..538 Lt@102..103 "<" GenericParam@103..115 Ident@103..104 "T" - TraitBoundList@104..115 + TypeBoundList@104..115 Colon@104..105 ":" WhiteSpace@105..106 " " - TraitBound@106..109 + TypeBound@106..109 Path@106..109 PathSegment@106..109 Ident@106..109 "Add" WhiteSpace@109..110 " " Plus@110..111 "+" WhiteSpace@111..112 " " - TraitBound@112..115 + TypeBound@112..115 Path@112..115 PathSegment@112..115 Ident@112..115 "Sub" @@ -167,250 +167,292 @@ Root@0..538 Newline@180..181 "\n" RBrace@181..182 "}" Newline@182..184 "\n\n" - Trait@184..241 + Trait@184..271 ItemModifier@184..187 PubKw@184..187 "pub" WhiteSpace@187..188 " " TraitKw@188..193 "trait" WhiteSpace@193..194 " " Ident@194..197 "Add" - GenericParamList@197..202 + GenericParamList@197..207 Lt@197..198 "<" - GenericParam@198..201 + GenericParam@198..206 Ident@198..201 "RHS" - Gt@201..202 ">" - WhiteSpace@202..203 " " - TraitItemList@203..241 - LBrace@203..204 "{" - Newline@204..205 "\n" - WhiteSpace@205..209 " " - Fn@209..239 - FnKw@209..211 "fn" - WhiteSpace@211..212 " " - Ident@212..215 "add" - FnArgList@215..231 - LParen@215..216 "(" - FnArg@216..220 - SelfKw@216..220 "self" - Comma@220..221 "," - WhiteSpace@221..222 " " - FnArg@222..230 - Ident@222..225 "rhs" - Colon@225..226 ":" - WhiteSpace@226..227 " " - PathType@227..230 - Path@227..230 - PathSegment@227..230 - Ident@227..230 "Rhs" - RParen@230..231 ")" - WhiteSpace@231..232 " " - Arrow@232..234 "->" - WhiteSpace@234..235 " " - SelfType@235..239 - SelfTypeKw@235..239 "Self" - Newline@239..240 "\n" - RBrace@240..241 "}" - Newline@241..244 "\n\n\n" - Trait@244..325 - ItemModifier@244..247 - PubKw@244..247 "pub" - WhiteSpace@247..248 " " - TraitKw@248..253 "trait" - WhiteSpace@253..254 " " - Ident@254..259 "Parse" - WhiteSpace@259..260 " " - TraitItemList@260..325 - LBrace@260..261 "{" - Newline@261..262 "\n" - WhiteSpace@262..266 " " - Fn@266..323 - FnKw@266..268 "fn" - WhiteSpace@268..269 " " - Ident@269..274 "parse" - GenericParamList@274..290 - Lt@274..275 "<" - GenericParam@275..289 - Ident@275..276 "S" - TraitBoundList@276..289 - Colon@276..277 ":" - WhiteSpace@277..278 " " - TraitBound@278..289 - Path@278..289 - PathSegment@278..289 - Ident@278..289 "TokenStream" - Gt@289..290 ">" - FnArgList@290..323 - LParen@290..291 "(" - FnArg@291..299 - MutKw@291..294 "mut" - WhiteSpace@294..295 " " - SelfKw@295..299 "self" - Comma@299..300 "," - WhiteSpace@300..301 " " - FnArg@301..322 - MutKw@301..304 "mut" - WhiteSpace@304..305 " " - Ident@305..311 "parser" - Colon@311..312 ":" - WhiteSpace@312..313 " " - PathType@313..322 - Path@313..319 - PathSegment@313..319 - Ident@313..319 "Parser" - GenericArgList@319..322 - Lt@319..320 "<" - GenericArg@320..321 - PathType@320..321 - Path@320..321 - PathSegment@320..321 - Ident@320..321 "S" - Gt@321..322 ">" - RParen@322..323 ")" - Newline@323..324 "\n" - RBrace@324..325 "}" - Newline@325..327 "\n\n" - Impl@327..538 - ImplKw@327..331 "impl" - WhiteSpace@331..332 " " - PathType@332..354 - Path@332..338 - PathSegment@332..338 - Ident@332..338 "Parser" - GenericArgList@338..354 - Lt@338..339 "<" - GenericArg@339..353 - PathType@339..340 - Path@339..340 - PathSegment@339..340 - Ident@339..340 "S" - TraitBoundList@340..353 - Colon@340..341 ":" - WhiteSpace@341..342 " " - TraitBound@342..353 - Path@342..353 - PathSegment@342..353 - Ident@342..353 "TokenStream" - Gt@353..354 ">" - WhiteSpace@354..355 " " - ImplItemList@355..538 - LBrace@355..356 "{" - Newline@356..357 "\n" - WhiteSpace@357..361 " " - Fn@361..536 - ItemModifier@361..364 - PubKw@361..364 "pub" - WhiteSpace@364..365 " " - FnKw@365..367 "fn" - WhiteSpace@367..368 " " - Ident@368..373 "parse" - GenericParamList@373..383 - Lt@373..374 "<" - GenericParam@374..382 - Ident@374..375 "T" - TraitBoundList@375..382 - Colon@375..376 ":" - WhiteSpace@376..377 " " - TraitBound@377..382 - Path@377..382 - PathSegment@377..382 - Ident@377..382 "Parse" - Gt@382..383 ">" - FnArgList@383..439 - LParen@383..384 "(" - FnArg@384..392 - MutKw@384..387 "mut" - WhiteSpace@387..388 " " - SelfKw@388..392 "self" - Comma@392..393 "," - WhiteSpace@393..394 " " - FnArg@394..406 - MutKw@394..397 "mut" - WhiteSpace@397..398 " " - Ident@398..403 "scope" - Colon@403..404 ":" - WhiteSpace@404..405 " " - PathType@405..406 - Path@405..406 - PathSegment@405..406 - Ident@405..406 "T" - Comma@406..407 "," - WhiteSpace@407..408 " " - FnArg@408..438 - Ident@408..418 "checkpoint" - Colon@418..419 ":" - WhiteSpace@419..420 " " - PathType@420..438 - Path@420..426 - PathSegment@420..426 - Ident@420..426 "Option" - GenericArgList@426..438 - Lt@426..427 "<" - GenericArg@427..437 - PathType@427..437 - Path@427..437 - PathSegment@427..437 - Ident@427..437 "Checkpoint" - Gt@437..438 ">" - RParen@438..439 ")" - WhiteSpace@439..440 " " - Arrow@440..442 "->" - WhiteSpace@442..443 " " - TupleType@443..461 - LParen@443..444 "(" - PathType@444..448 - Path@444..448 - PathSegment@444..448 - Ident@444..448 "bool" - Comma@448..449 "," - WhiteSpace@449..450 " " - PathType@450..460 - Path@450..460 - PathSegment@450..460 - Ident@450..460 "Checkpoint" - RParen@460..461 ")" - WhiteSpace@461..462 " " - BlockExpr@462..536 - LBrace@462..463 "{" - Newline@463..464 "\n" - WhiteSpace@464..472 " " - ExprStmt@472..530 - TupleExpr@472..530 - LParen@472..473 "(" - CallExpr@473..516 - Path@473..493 - PathSegment@473..483 - Ident@473..483 "SyntaxNode" - Colon2@483..485 "::" - PathSegment@485..493 - Ident@485..493 "new_root" - CallArgList@493..516 - LParen@493..494 "(" - CallArg@494..515 - MethodCallExpr@494..515 - FieldExpr@494..506 - Path@494..498 - PathSegment@494..498 - SelfKw@494..498 "self" - Dot@498..499 "." - Ident@499..506 "builder" - Dot@506..507 "." - Ident@507..513 "finish" - CallArgList@513..515 - LParen@513..514 "(" - RParen@514..515 ")" - RParen@515..516 ")" - Comma@516..517 "," - WhiteSpace@517..518 " " - FieldExpr@518..529 - Path@518..522 - PathSegment@518..522 - SelfKw@518..522 "self" - Dot@522..523 "." - Ident@523..529 "errors" - RParen@529..530 ")" - Newline@530..531 "\n" - WhiteSpace@531..535 " " - RBrace@535..536 "}" - Newline@536..537 "\n" - RBrace@537..538 "}" + TypeBoundList@201..206 + Colon@201..202 ":" + WhiteSpace@202..203 " " + TypeBound@203..206 + Path@203..206 + PathSegment@203..206 + Ident@203..206 "Add" + Gt@206..207 ">" + WhiteSpace@207..208 " " + Newline@208..209 "\n" + TraitItemList@209..271 + LBrace@209..210 "{" + Newline@210..211 "\n" + WhiteSpace@211..215 " " + Fn@215..270 + FnKw@215..217 "fn" + WhiteSpace@217..218 " " + Ident@218..221 "add" + FnArgList@221..237 + LParen@221..222 "(" + FnArg@222..226 + SelfKw@222..226 "self" + Comma@226..227 "," + WhiteSpace@227..228 " " + FnArg@228..236 + Ident@228..231 "rhs" + Colon@231..232 ":" + WhiteSpace@232..233 " " + PathType@233..236 + Path@233..236 + PathSegment@233..236 + Ident@233..236 "Rhs" + RParen@236..237 ")" + WhiteSpace@237..238 " " + Arrow@238..240 "->" + WhiteSpace@240..241 " " + SelfType@241..245 + SelfTypeKw@241..245 "Self" + WhiteSpace@245..246 " " + Newline@246..247 "\n" + WhiteSpace@247..255 " " + WhereClause@255..270 + WhereKw@255..260 "where" + WhiteSpace@260..261 " " + PathType@261..264 + Path@261..264 + PathSegment@261..264 + Ident@261..264 "RHS" + TypeBoundList@264..269 + Colon@264..265 ":" + WhiteSpace@265..266 " " + TypeBound@266..269 + Path@266..269 + PathSegment@266..269 + Ident@266..269 "Sub" + Newline@269..270 "\n" + RBrace@270..271 "}" + Newline@271..274 "\n\n\n" + Trait@274..355 + ItemModifier@274..277 + PubKw@274..277 "pub" + WhiteSpace@277..278 " " + TraitKw@278..283 "trait" + WhiteSpace@283..284 " " + Ident@284..289 "Parse" + WhiteSpace@289..290 " " + TraitItemList@290..355 + LBrace@290..291 "{" + Newline@291..292 "\n" + WhiteSpace@292..296 " " + Fn@296..353 + FnKw@296..298 "fn" + WhiteSpace@298..299 " " + Ident@299..304 "parse" + GenericParamList@304..320 + Lt@304..305 "<" + GenericParam@305..319 + Ident@305..306 "S" + TypeBoundList@306..319 + Colon@306..307 ":" + WhiteSpace@307..308 " " + TypeBound@308..319 + Path@308..319 + PathSegment@308..319 + Ident@308..319 "TokenStream" + Gt@319..320 ">" + FnArgList@320..353 + LParen@320..321 "(" + FnArg@321..329 + MutKw@321..324 "mut" + WhiteSpace@324..325 " " + SelfKw@325..329 "self" + Comma@329..330 "," + WhiteSpace@330..331 " " + FnArg@331..352 + MutKw@331..334 "mut" + WhiteSpace@334..335 " " + Ident@335..341 "parser" + Colon@341..342 ":" + WhiteSpace@342..343 " " + PathType@343..352 + Path@343..349 + PathSegment@343..349 + Ident@343..349 "Parser" + GenericArgList@349..352 + Lt@349..350 "<" + GenericArg@350..351 + PathType@350..351 + Path@350..351 + PathSegment@350..351 + Ident@350..351 "S" + Gt@351..352 ">" + RParen@352..353 ")" + Newline@353..354 "\n" + RBrace@354..355 "}" + Newline@355..357 "\n\n" + Impl@357..588 + ImplKw@357..361 "impl" + WhiteSpace@361..362 " " + PathType@362..384 + Path@362..368 + PathSegment@362..368 + Ident@362..368 "Parser" + GenericArgList@368..384 + Lt@368..369 "<" + GenericArg@369..383 + PathType@369..370 + Path@369..370 + PathSegment@369..370 + Ident@369..370 "S" + TypeBoundList@370..383 + Colon@370..371 ":" + WhiteSpace@371..372 " " + TypeBound@372..383 + Path@372..383 + PathSegment@372..383 + Ident@372..383 "TokenStream" + Gt@383..384 ">" + WhiteSpace@384..385 " " + Newline@385..386 "\n" + WhiteSpace@386..390 " " + WhereClause@390..405 + WhereKw@390..395 "where" + WhiteSpace@395..396 " " + PathType@396..397 + Path@396..397 + PathSegment@396..397 + Ident@396..397 "S" + TypeBoundList@397..404 + Colon@397..398 ":" + WhiteSpace@398..399 " " + TypeBound@399..404 + Path@399..404 + PathSegment@399..404 + Ident@399..404 "Clone" + Newline@404..405 "\n" + ImplItemList@405..588 + LBrace@405..406 "{" + Newline@406..407 "\n" + WhiteSpace@407..411 " " + Fn@411..586 + ItemModifier@411..414 + PubKw@411..414 "pub" + WhiteSpace@414..415 " " + FnKw@415..417 "fn" + WhiteSpace@417..418 " " + Ident@418..423 "parse" + GenericParamList@423..433 + Lt@423..424 "<" + GenericParam@424..432 + Ident@424..425 "T" + TypeBoundList@425..432 + Colon@425..426 ":" + WhiteSpace@426..427 " " + TypeBound@427..432 + Path@427..432 + PathSegment@427..432 + Ident@427..432 "Parse" + Gt@432..433 ">" + FnArgList@433..489 + LParen@433..434 "(" + FnArg@434..442 + MutKw@434..437 "mut" + WhiteSpace@437..438 " " + SelfKw@438..442 "self" + Comma@442..443 "," + WhiteSpace@443..444 " " + FnArg@444..456 + MutKw@444..447 "mut" + WhiteSpace@447..448 " " + Ident@448..453 "scope" + Colon@453..454 ":" + WhiteSpace@454..455 " " + PathType@455..456 + Path@455..456 + PathSegment@455..456 + Ident@455..456 "T" + Comma@456..457 "," + WhiteSpace@457..458 " " + FnArg@458..488 + Ident@458..468 "checkpoint" + Colon@468..469 ":" + WhiteSpace@469..470 " " + PathType@470..488 + Path@470..476 + PathSegment@470..476 + Ident@470..476 "Option" + GenericArgList@476..488 + Lt@476..477 "<" + GenericArg@477..487 + PathType@477..487 + Path@477..487 + PathSegment@477..487 + Ident@477..487 "Checkpoint" + Gt@487..488 ">" + RParen@488..489 ")" + WhiteSpace@489..490 " " + Arrow@490..492 "->" + WhiteSpace@492..493 " " + TupleType@493..511 + LParen@493..494 "(" + PathType@494..498 + Path@494..498 + PathSegment@494..498 + Ident@494..498 "bool" + Comma@498..499 "," + WhiteSpace@499..500 " " + PathType@500..510 + Path@500..510 + PathSegment@500..510 + Ident@500..510 "Checkpoint" + RParen@510..511 ")" + WhiteSpace@511..512 " " + BlockExpr@512..586 + LBrace@512..513 "{" + Newline@513..514 "\n" + WhiteSpace@514..522 " " + ExprStmt@522..580 + TupleExpr@522..580 + LParen@522..523 "(" + CallExpr@523..566 + Path@523..543 + PathSegment@523..533 + Ident@523..533 "SyntaxNode" + Colon2@533..535 "::" + PathSegment@535..543 + Ident@535..543 "new_root" + CallArgList@543..566 + LParen@543..544 "(" + CallArg@544..565 + MethodCallExpr@544..565 + FieldExpr@544..556 + Path@544..548 + PathSegment@544..548 + SelfKw@544..548 "self" + Dot@548..549 "." + Ident@549..556 "builder" + Dot@556..557 "." + Ident@557..563 "finish" + CallArgList@563..565 + LParen@563..564 "(" + RParen@564..565 ")" + RParen@565..566 ")" + Comma@566..567 "," + WhiteSpace@567..568 " " + FieldExpr@568..579 + Path@568..572 + PathSegment@568..572 + SelfKw@568..572 "self" + Dot@572..573 "." + Ident@573..579 "errors" + RParen@579..580 ")" + Newline@580..581 "\n" + WhiteSpace@581..585 " " + RBrace@585..586 "}" + Newline@586..587 "\n" + RBrace@587..588 "}" diff --git a/crates/parser2/test_files/syntax_node/items/type_.snap b/crates/parser2/test_files/syntax_node/items/type_.snap index 0ca6e1b1f8..01facad8e9 100644 --- a/crates/parser2/test_files/syntax_node/items/type_.snap +++ b/crates/parser2/test_files/syntax_node/items/type_.snap @@ -60,10 +60,10 @@ Root@0..98 Lt@70..71 "<" GenericParam@71..84 Ident@71..72 "T" - TraitBoundList@72..84 + TypeBoundList@72..84 Colon@72..73 ":" WhiteSpace@73..74 " " - TraitBound@74..84 + TypeBound@74..84 Path@74..84 PathSegment@74..84 Ident@74..84 "TraitBound" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe index 80106a0984..b4bf28f6b0 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.fe +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -1,4 +1,5 @@ -pub struct StructWithGenericParam { +pub struct StructWithGenericParam +{ x: S y: T z: U @@ -18,7 +19,11 @@ pub struct StructWithGenericParam3< S: foo::Trait + bar::Trait, T, U: bar::Trait -> { +> where + T: Trait1 + Trait2 + Option: Trait1 + Trait2 + Result: Trait2 + Trait3 +{ x: S y: T z: U diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 5aaf5722b9..e731c5090e 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -2,9 +2,9 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..312 - ItemList@0..312 - Struct@0..73 +Root@0..404 + ItemList@0..404 + Struct@0..74 ItemModifier@0..3 PubKw@0..3 "pub" WhiteSpace@3..4 " " @@ -25,215 +25,295 @@ Root@0..312 Ident@40..41 "U" Gt@41..42 ">" WhiteSpace@42..43 " " - RecordFieldDefList@43..73 - LBrace@43..44 "{" - Newline@44..45 "\n" - WhiteSpace@45..49 " " - RecordFieldDef@49..53 - Ident@49..50 "x" - Colon@50..51 ":" - WhiteSpace@51..52 " " - PathType@52..53 - Path@52..53 - PathSegment@52..53 - Ident@52..53 "S" - Newline@53..54 "\n" - WhiteSpace@54..58 " " - RecordFieldDef@58..62 - Ident@58..59 "y" - Colon@59..60 ":" - WhiteSpace@60..61 " " - PathType@61..62 - Path@61..62 - PathSegment@61..62 - Ident@61..62 "T" - Newline@62..63 "\n" - WhiteSpace@63..67 " " - RecordFieldDef@67..71 - Ident@67..68 "z" - Colon@68..69 ":" - WhiteSpace@69..70 " " - PathType@70..71 - Path@70..71 - PathSegment@70..71 - Ident@70..71 "U" - Newline@71..72 "\n" - RBrace@72..73 "}" - Newline@73..74 "\n" - WhiteSpace@74..75 " " - Newline@75..76 "\n" - Struct@76..185 - ItemModifier@76..79 - PubKw@76..79 "pub" - WhiteSpace@79..80 " " - StructKw@80..86 "struct" - WhiteSpace@86..87 " " - Ident@87..110 "StructWithGenericParam2" - GenericParamList@110..145 - Lt@110..111 "<" - Newline@111..112 "\n" - WhiteSpace@112..116 " " - GenericParam@116..117 - Ident@116..117 "S" - Comma@117..118 "," - Newline@118..119 "\n" - WhiteSpace@119..123 " " - GenericParam@123..136 - Ident@123..124 "T" - TraitBoundList@124..136 - Colon@124..125 ":" - WhiteSpace@125..126 " " - TraitBound@126..136 - Path@126..136 - PathSegment@126..129 - Ident@126..129 "foo" - Colon2@129..131 "::" - PathSegment@131..136 - Ident@131..136 "Trait" - Comma@136..137 "," - Newline@137..138 "\n" - WhiteSpace@138..142 " " - GenericParam@142..143 - Ident@142..143 "U" - Newline@143..144 "\n" - Gt@144..145 ">" - WhiteSpace@145..146 " " - RecordFieldDefList@146..185 - LBrace@146..147 "{" - Newline@147..148 "\n" - WhiteSpace@148..152 " " - RecordFieldDef@152..165 - Ident@152..153 "x" - Colon@153..154 ":" - WhiteSpace@154..155 " " - PtrType@155..165 - Star@155..156 "*" - TupleType@156..165 - LParen@156..157 "(" - PathType@157..158 - Path@157..158 - PathSegment@157..158 - Ident@157..158 "S" - Comma@158..159 "," - WhiteSpace@159..160 " " - PtrType@160..164 - Star@160..161 "*" - PathType@161..164 - Path@161..164 - PathSegment@161..164 - Ident@161..164 "i32" - RParen@164..165 ")" - Newline@165..166 "\n" - WhiteSpace@166..170 " " - RecordFieldDef@170..174 - Ident@170..171 "y" - Colon@171..172 ":" - WhiteSpace@172..173 " " - PathType@173..174 - Path@173..174 - PathSegment@173..174 - Ident@173..174 "T" - Newline@174..175 "\n" - WhiteSpace@175..179 " " - RecordFieldDef@179..183 - Ident@179..180 "z" - Colon@180..181 ":" - WhiteSpace@181..182 " " - PathType@182..183 - Path@182..183 - PathSegment@182..183 - Ident@182..183 "U" - Newline@183..184 "\n" - RBrace@184..185 "}" - Newline@185..187 "\n\n" - Struct@187..312 - ItemModifier@187..190 - PubKw@187..190 "pub" - WhiteSpace@190..191 " " - StructKw@191..197 "struct" - WhiteSpace@197..198 " " - Ident@198..221 "StructWithGenericParam3" - GenericParamList@221..281 - Lt@221..222 "<" - Newline@222..223 "\n" - WhiteSpace@223..227 " " - GenericParam@227..253 - Ident@227..228 "S" - TraitBoundList@228..253 - Colon@228..229 ":" - WhiteSpace@229..230 " " - TraitBound@230..240 - Path@230..240 - PathSegment@230..233 - Ident@230..233 "foo" - Colon2@233..235 "::" - PathSegment@235..240 - Ident@235..240 "Trait" - WhiteSpace@240..241 " " - Plus@241..242 "+" - WhiteSpace@242..243 " " - TraitBound@243..253 - Path@243..253 - PathSegment@243..246 - Ident@243..246 "bar" - Colon2@246..248 "::" - PathSegment@248..253 - Ident@248..253 "Trait" - Comma@253..254 "," - Newline@254..255 "\n" - WhiteSpace@255..259 " " - GenericParam@259..260 - Ident@259..260 "T" - Comma@260..261 "," - Newline@261..262 "\n" - WhiteSpace@262..266 " " - GenericParam@266..279 - Ident@266..267 "U" - TraitBoundList@267..279 - Colon@267..268 ":" - WhiteSpace@268..269 " " - TraitBound@269..279 - Path@269..279 - PathSegment@269..272 - Ident@269..272 "bar" - Colon2@272..274 "::" - PathSegment@274..279 - Ident@274..279 "Trait" - Newline@279..280 "\n" - Gt@280..281 ">" - WhiteSpace@281..282 " " - RecordFieldDefList@282..312 - LBrace@282..283 "{" - Newline@283..284 "\n" - WhiteSpace@284..288 " " - RecordFieldDef@288..292 - Ident@288..289 "x" - Colon@289..290 ":" - WhiteSpace@290..291 " " - PathType@291..292 - Path@291..292 - PathSegment@291..292 - Ident@291..292 "S" - Newline@292..293 "\n" - WhiteSpace@293..297 " " - RecordFieldDef@297..301 - Ident@297..298 "y" - Colon@298..299 ":" - WhiteSpace@299..300 " " - PathType@300..301 - Path@300..301 - PathSegment@300..301 - Ident@300..301 "T" - Newline@301..302 "\n" - WhiteSpace@302..306 " " - RecordFieldDef@306..310 - Ident@306..307 "z" - Colon@307..308 ":" - WhiteSpace@308..309 " " - PathType@309..310 - Path@309..310 - PathSegment@309..310 - Ident@309..310 "U" - Newline@310..311 "\n" - RBrace@311..312 "}" + Newline@43..44 "\n" + RecordFieldDefList@44..74 + LBrace@44..45 "{" + Newline@45..46 "\n" + WhiteSpace@46..50 " " + RecordFieldDef@50..54 + Ident@50..51 "x" + Colon@51..52 ":" + WhiteSpace@52..53 " " + PathType@53..54 + Path@53..54 + PathSegment@53..54 + Ident@53..54 "S" + Newline@54..55 "\n" + WhiteSpace@55..59 " " + RecordFieldDef@59..63 + Ident@59..60 "y" + Colon@60..61 ":" + WhiteSpace@61..62 " " + PathType@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "T" + Newline@63..64 "\n" + WhiteSpace@64..68 " " + RecordFieldDef@68..72 + Ident@68..69 "z" + Colon@69..70 ":" + WhiteSpace@70..71 " " + PathType@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "U" + Newline@72..73 "\n" + RBrace@73..74 "}" + Newline@74..75 "\n" + WhiteSpace@75..76 " " + Newline@76..77 "\n" + Struct@77..186 + ItemModifier@77..80 + PubKw@77..80 "pub" + WhiteSpace@80..81 " " + StructKw@81..87 "struct" + WhiteSpace@87..88 " " + Ident@88..111 "StructWithGenericParam2" + GenericParamList@111..146 + Lt@111..112 "<" + Newline@112..113 "\n" + WhiteSpace@113..117 " " + GenericParam@117..118 + Ident@117..118 "S" + Comma@118..119 "," + Newline@119..120 "\n" + WhiteSpace@120..124 " " + GenericParam@124..137 + Ident@124..125 "T" + TypeBoundList@125..137 + Colon@125..126 ":" + WhiteSpace@126..127 " " + TypeBound@127..137 + Path@127..137 + PathSegment@127..130 + Ident@127..130 "foo" + Colon2@130..132 "::" + PathSegment@132..137 + Ident@132..137 "Trait" + Comma@137..138 "," + Newline@138..139 "\n" + WhiteSpace@139..143 " " + GenericParam@143..144 + Ident@143..144 "U" + Newline@144..145 "\n" + Gt@145..146 ">" + WhiteSpace@146..147 " " + RecordFieldDefList@147..186 + LBrace@147..148 "{" + Newline@148..149 "\n" + WhiteSpace@149..153 " " + RecordFieldDef@153..166 + Ident@153..154 "x" + Colon@154..155 ":" + WhiteSpace@155..156 " " + PtrType@156..166 + Star@156..157 "*" + TupleType@157..166 + LParen@157..158 "(" + PathType@158..159 + Path@158..159 + PathSegment@158..159 + Ident@158..159 "S" + Comma@159..160 "," + WhiteSpace@160..161 " " + PtrType@161..165 + Star@161..162 "*" + PathType@162..165 + Path@162..165 + PathSegment@162..165 + Ident@162..165 "i32" + RParen@165..166 ")" + Newline@166..167 "\n" + WhiteSpace@167..171 " " + RecordFieldDef@171..175 + Ident@171..172 "y" + Colon@172..173 ":" + WhiteSpace@173..174 " " + PathType@174..175 + Path@174..175 + PathSegment@174..175 + Ident@174..175 "T" + Newline@175..176 "\n" + WhiteSpace@176..180 " " + RecordFieldDef@180..184 + Ident@180..181 "z" + Colon@181..182 ":" + WhiteSpace@182..183 " " + PathType@183..184 + Path@183..184 + PathSegment@183..184 + Ident@183..184 "U" + Newline@184..185 "\n" + RBrace@185..186 "}" + Newline@186..188 "\n\n" + Struct@188..404 + ItemModifier@188..191 + PubKw@188..191 "pub" + WhiteSpace@191..192 " " + StructKw@192..198 "struct" + WhiteSpace@198..199 " " + Ident@199..222 "StructWithGenericParam3" + GenericParamList@222..282 + Lt@222..223 "<" + Newline@223..224 "\n" + WhiteSpace@224..228 " " + GenericParam@228..254 + Ident@228..229 "S" + TypeBoundList@229..254 + Colon@229..230 ":" + WhiteSpace@230..231 " " + TypeBound@231..241 + Path@231..241 + PathSegment@231..234 + Ident@231..234 "foo" + Colon2@234..236 "::" + PathSegment@236..241 + Ident@236..241 "Trait" + WhiteSpace@241..242 " " + Plus@242..243 "+" + WhiteSpace@243..244 " " + TypeBound@244..254 + Path@244..254 + PathSegment@244..247 + Ident@244..247 "bar" + Colon2@247..249 "::" + PathSegment@249..254 + Ident@249..254 "Trait" + Comma@254..255 "," + Newline@255..256 "\n" + WhiteSpace@256..260 " " + GenericParam@260..261 + Ident@260..261 "T" + Comma@261..262 "," + Newline@262..263 "\n" + WhiteSpace@263..267 " " + GenericParam@267..280 + Ident@267..268 "U" + TypeBoundList@268..280 + Colon@268..269 ":" + WhiteSpace@269..270 " " + TypeBound@270..280 + Path@270..280 + PathSegment@270..273 + Ident@270..273 "bar" + Colon2@273..275 "::" + PathSegment@275..280 + Ident@275..280 "Trait" + Newline@280..281 "\n" + Gt@281..282 ">" + WhiteSpace@282..283 " " + WhereClause@283..374 + WhereKw@283..288 "where" + Newline@288..289 "\n" + WhiteSpace@289..293 " " + PathType@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "T" + TypeBoundList@294..311 + Colon@294..295 ":" + WhiteSpace@295..296 " " + TypeBound@296..302 + Path@296..302 + PathSegment@296..302 + Ident@296..302 "Trait1" + WhiteSpace@302..303 " " + Plus@303..304 "+" + WhiteSpace@304..305 " " + TypeBound@305..311 + Path@305..311 + PathSegment@305..311 + Ident@305..311 "Trait2" + Newline@311..312 "\n" + WhiteSpace@312..316 " " + PathType@316..325 + Path@316..322 + PathSegment@316..322 + Ident@316..322 "Option" + GenericArgList@322..325 + Lt@322..323 "<" + GenericArg@323..324 + PathType@323..324 + Path@323..324 + PathSegment@323..324 + Ident@323..324 "T" + Gt@324..325 ">" + TypeBoundList@325..342 + Colon@325..326 ":" + WhiteSpace@326..327 " " + TypeBound@327..333 + Path@327..333 + PathSegment@327..333 + Ident@327..333 "Trait1" + WhiteSpace@333..334 " " + Plus@334..335 "+" + WhiteSpace@335..336 " " + TypeBound@336..342 + Path@336..342 + PathSegment@336..342 + Ident@336..342 "Trait2" + Newline@342..343 "\n" + WhiteSpace@343..347 " " + PathType@347..356 + Path@347..353 + PathSegment@347..353 + Ident@347..353 "Result" + GenericArgList@353..356 + Lt@353..354 "<" + GenericArg@354..355 + PathType@354..355 + Path@354..355 + PathSegment@354..355 + Ident@354..355 "U" + Gt@355..356 ">" + TypeBoundList@356..373 + Colon@356..357 ":" + WhiteSpace@357..358 " " + TypeBound@358..364 + Path@358..364 + PathSegment@358..364 + Ident@358..364 "Trait2" + WhiteSpace@364..365 " " + Plus@365..366 "+" + WhiteSpace@366..367 " " + TypeBound@367..373 + Path@367..373 + PathSegment@367..373 + Ident@367..373 "Trait3" + Newline@373..374 "\n" + RecordFieldDefList@374..404 + LBrace@374..375 "{" + Newline@375..376 "\n" + WhiteSpace@376..380 " " + RecordFieldDef@380..384 + Ident@380..381 "x" + Colon@381..382 ":" + WhiteSpace@382..383 " " + PathType@383..384 + Path@383..384 + PathSegment@383..384 + Ident@383..384 "S" + Newline@384..385 "\n" + WhiteSpace@385..389 " " + RecordFieldDef@389..393 + Ident@389..390 "y" + Colon@390..391 ":" + WhiteSpace@391..392 " " + PathType@392..393 + Path@392..393 + PathSegment@392..393 + Ident@392..393 "T" + Newline@393..394 "\n" + WhiteSpace@394..398 " " + RecordFieldDef@398..402 + Ident@398..399 "z" + Colon@399..400 ":" + WhiteSpace@400..401 " " + PathType@401..402 + Path@401..402 + PathSegment@401..402 + Ident@401..402 "U" + Newline@402..403 "\n" + RBrace@403..404 "}" From b3cd0a6af3c90e8a0bbcf629aa545a36e82e5c9e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 26 Jan 2023 19:14:40 +0100 Subject: [PATCH 043/678] Allow type bounds in trait implementation --- crates/parser2/src/parser/item.rs | 4 +- crates/parser2/tests/syntax_node.rs | 110 ++++++++++------------------ 2 files changed, 42 insertions(+), 72 deletions(-) diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 2a83ea45b2..922a4a3e9b 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -312,9 +312,9 @@ impl super::Parse for ImplScope { parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if is_trait_impl { self.set_kind(SyntaxKind::ImplTrait); - parse_type(parser, None, false); + parse_type(parser, None, true); parser.bump_expected(SyntaxKind::ForKw); - parse_type(parser, None, false); + parse_type(parser, None, true); } else { parse_type(parser, None, true); } diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 489c43bf9e..7555d19c65 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,21 +1,23 @@ use fe_parser2::{ - lexer, - parser::{ - expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt, Parser, RootScope, - }, + parser::{expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt}, syntax_node::SyntaxNode, - SyntaxKind, }; +mod test_runner; +use test_runner::*; + fe_compiler_test_utils::build_debug_snap_tests! { "parser2/test_files/syntax_node/structs", "parser2/test_files/syntax_node/structs", test_item_list } fn test_item_list(input: &str) -> SyntaxNode { - let runner = TestRunner::new(|parser| { - parser.parse(ItemListScope::default(), None); - }); + let runner = TestRunner::new( + |parser| { + parser.parse(ItemListScope::default(), None); + }, + true, + ); runner.run(input) } @@ -25,11 +27,14 @@ fe_compiler_test_utils::build_debug_snap_tests! { test_pat } fn test_pat(input: &str) -> SyntaxNode { - let runner = TestRunner::new(|parser| { - while parser.current_kind().is_some() { - parse_pat(parser); - } - }); + let runner = TestRunner::new( + |parser| { + while parser.current_kind().is_some() { + parse_pat(parser); + } + }, + true, + ); runner.run(input) } @@ -39,16 +44,19 @@ fe_compiler_test_utils::build_debug_snap_tests! { test_expr } fn test_expr(input: &str) -> SyntaxNode { - let runner = TestRunner::new(|parser| { - parser.set_newline_as_trivia(false); + let runner = TestRunner::new( + |parser| { + parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { - bump_newlines(parser); - parse_expr(parser); bump_newlines(parser); - } - }); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_expr(parser); + bump_newlines(parser); + } + }, + true, + ); runner.run(input) } @@ -59,16 +67,19 @@ fe_compiler_test_utils::build_debug_snap_tests! { } fn test_stmt(input: &str) -> SyntaxNode { - let runner = TestRunner::new(|parser| { - parser.set_newline_as_trivia(false); + let runner = TestRunner::new( + |parser| { + parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { bump_newlines(parser); - parse_stmt(parser, None); - bump_newlines(parser); - } - }); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_stmt(parser, None); + bump_newlines(parser); + } + }, + true, + ); runner.run(input) } @@ -77,44 +88,3 @@ fe_compiler_test_utils::build_debug_snap_tests!( "parser2/test_files/syntax_node/items", test_item_list ); - -struct TestRunner -where - F: Fn(&mut Parser), -{ - f: F, -} - -impl TestRunner -where - F: Fn(&mut Parser), -{ - fn new(f: F) -> Self { - Self { f } - } - - fn run(&self, input: &str) -> SyntaxNode { - let lexer = lexer::Lexer::new(input); - let mut parser = Parser::new(lexer); - - let checkpoint = parser.enter(RootScope::default(), None); - (self.f)(&mut parser); - parser.leave(checkpoint); - - let (cst, errors) = parser.finish(); - - for error in &errors { - println!("{}@{:?}", error.msg, error.range); - } - assert! {errors.is_empty()} - assert!(input == cst.to_string()); - - cst - } -} - -fn bump_newlines(parser: &mut Parser) { - while parser.current_kind() == Some(SyntaxKind::Newline) { - parser.bump(); - } -} From 7369ea9e23998f6c40bcd25f2251356e4a58602c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 26 Jan 2023 19:15:32 +0100 Subject: [PATCH 044/678] Add error recovery tests --- crates/parser2/src/parser/expr.rs | 74 ++++- crates/parser2/src/parser/expr_atom.rs | 44 ++- crates/parser2/src/parser/func.rs | 148 ++++++--- crates/parser2/src/parser/item.rs | 97 ++---- crates/parser2/src/parser/mod.rs | 70 +++- crates/parser2/src/parser/param.rs | 75 +++-- crates/parser2/src/parser/stmt.rs | 7 +- crates/parser2/src/parser/struct_.rs | 6 +- crates/parser2/src/parser/token_stream.rs | 1 + crates/parser2/src/syntax_kind.rs | 19 ++ .../test_files/error_recovery/exprs/array.fe | 2 + .../error_recovery/exprs/array.snap | 34 ++ .../test_files/error_recovery/exprs/block.fe | 5 + .../error_recovery/exprs/block.snap | 46 +++ .../test_files/error_recovery/exprs/call.fe | 3 + .../test_files/error_recovery/exprs/call.snap | 79 +++++ .../test_files/error_recovery/exprs/if_.fe | 14 + .../test_files/error_recovery/exprs/if_.snap | 107 +++++++ .../test_files/error_recovery/exprs/match_.fe | 9 + .../error_recovery/exprs/match_.snap | 109 +++++++ .../test_files/error_recovery/exprs/method.fe | 5 + .../error_recovery/exprs/method.snap | 105 ++++++ .../test_files/error_recovery/items/const_.fe | 5 + .../error_recovery/items/const_.snap | 53 ++++ .../test_files/error_recovery/items/enum_.fe | 6 + .../error_recovery/items/enum_.snap | 68 ++++ .../error_recovery/items/extern_.fe | 7 + .../error_recovery/items/extern_.snap | 62 ++++ .../test_files/error_recovery/items/func.fe | 10 + .../test_files/error_recovery/items/func.snap | 146 +++++++++ .../test_files/error_recovery/items/impl_.fe | 6 + .../error_recovery/items/impl_.snap | 79 +++++ .../error_recovery/items/impl_trait.fe | 5 + .../error_recovery/items/impl_trait.snap | 150 +++++++++ .../error_recovery/items/struct_.fe | 8 + .../error_recovery/items/struct_.snap | 71 +++++ .../test_files/error_recovery/items/trait_.fe | 13 + .../error_recovery/items/trait_.snap | 145 +++++++++ .../test_files/error_recovery/items/type_.fe | 1 + .../error_recovery/items/type_.snap | 42 +++ .../test_files/error_recovery/stmts/for_.fe | 7 + .../test_files/error_recovery/stmts/for_.snap | 76 +++++ .../test_files/error_recovery/stmts/while_.fe | 7 + .../error_recovery/stmts/while_.snap | 53 ++++ .../test_files/syntax_node/exprs/binop.fe | 1 + .../test_files/syntax_node/exprs/binop.snap | 300 ++++++++++-------- .../test_files/syntax_node/exprs/call.fe | 5 +- .../test_files/syntax_node/exprs/call.snap | 40 ++- .../syntax_node/exprs/struct_init.fe | 2 + .../syntax_node/exprs/struct_init.snap | 41 +++ .../test_files/syntax_node/items/func.fe | 2 +- .../test_files/syntax_node/items/func.snap | 10 +- .../syntax_node/items/impl_trait.fe | 7 + .../syntax_node/items/impl_trait.snap | 134 +++++++- crates/parser2/tests/error_recovery.rs | 64 ++++ crates/parser2/tests/main.rs | 1 - crates/parser2/tests/syntax_node.rs | 1 - crates/parser2/tests/test_runner.rs | 54 ++++ 58 files changed, 2411 insertions(+), 330 deletions(-) create mode 100644 crates/parser2/test_files/error_recovery/exprs/array.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/array.snap create mode 100644 crates/parser2/test_files/error_recovery/exprs/block.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/block.snap create mode 100644 crates/parser2/test_files/error_recovery/exprs/call.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/call.snap create mode 100644 crates/parser2/test_files/error_recovery/exprs/if_.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/if_.snap create mode 100644 crates/parser2/test_files/error_recovery/exprs/match_.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/match_.snap create mode 100644 crates/parser2/test_files/error_recovery/exprs/method.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/method.snap create mode 100644 crates/parser2/test_files/error_recovery/items/const_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/const_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/enum_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/enum_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/extern_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/extern_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/func.fe create mode 100644 crates/parser2/test_files/error_recovery/items/func.snap create mode 100644 crates/parser2/test_files/error_recovery/items/impl_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/impl_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/impl_trait.fe create mode 100644 crates/parser2/test_files/error_recovery/items/impl_trait.snap create mode 100644 crates/parser2/test_files/error_recovery/items/struct_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/struct_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/trait_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/trait_.snap create mode 100644 crates/parser2/test_files/error_recovery/items/type_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/type_.snap create mode 100644 crates/parser2/test_files/error_recovery/stmts/for_.fe create mode 100644 crates/parser2/test_files/error_recovery/stmts/for_.snap create mode 100644 crates/parser2/test_files/error_recovery/stmts/while_.fe create mode 100644 crates/parser2/test_files/error_recovery/stmts/while_.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/struct_init.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/struct_init.snap create mode 100644 crates/parser2/tests/error_recovery.rs delete mode 100644 crates/parser2/tests/main.rs create mode 100644 crates/parser2/tests/test_runner.rs diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 05f40f3ebc..7bbf62c3ac 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -44,26 +44,22 @@ fn parse_expr_with_min_bp( Some(_) => { match kind { SyntaxKind::LBracket => { - if parser.parse(IndexExprScope::default(), Some(checkpoint)).0 { - continue; - } else { - return false; - } + parser.parse(IndexExprScope::default(), Some(checkpoint)); + continue; } SyntaxKind::LParen => { if parser.parse(CallExprScope::default(), Some(checkpoint)).0 { continue; - } else { - return false; } } // `expr()`. SyntaxKind::Lt => { - let is_call_expr = - parser.dry_run(|parser| parser.parse(CallExprScope::default(), None).0); - if is_call_expr { + //let is_call_expr = + // parser.dry_run(|parser| parser.parse(CallExprScope::default(), + // None).0); + if is_call_expr(parser) { parser.parse(CallExprScope::default(), Some(checkpoint)); continue; } @@ -71,9 +67,7 @@ fn parse_expr_with_min_bp( // `expr.method()` SyntaxKind::Dot => { - let is_method_call = parser - .dry_run(|parser| parser.parse(MethodExprScope::default(), None).0); - if is_method_call { + if is_method_call(parser) { parser.parse(MethodExprScope::default(), Some(checkpoint)); continue; } @@ -218,7 +212,9 @@ impl super::Parse for CallExprScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::default(), None); + parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { + parser.parse(GenericArgListScope::default(), None); + }); } if parser.current_kind() != Some(SyntaxKind::LParen) { @@ -239,9 +235,11 @@ impl super::Parse for MethodExprScope { parser.error_and_recover("expected identifier", None); } - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::default(), None); - } + parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } + }); if parser.current_kind() != Some(SyntaxKind::LParen) { parser.error_and_recover("expected `(`", None); @@ -366,3 +364,45 @@ fn bump_bin_op(parser: &mut Parser) { } } } + +fn is_call_expr(parser: &mut Parser) -> bool { + parser.dry_run(|parser| { + parser.set_newline_as_trivia(false); + + let mut is_call = true; + if parser.current_kind() == Some(SyntaxKind::Lt) { + is_call &= parser.parse(GenericArgListScope::default(), None).0; + } + + if parser.current_kind() != Some(SyntaxKind::LParen) { + false + } else { + is_call && parser.parse(CallArgListScope::default(), None).0 + } + }) +} + +fn is_method_call(parser: &mut Parser) -> bool { + parser.dry_run(|parser| { + parser.set_newline_as_trivia(false); + if !parser.bump_if(SyntaxKind::Dot) { + return false; + } + + if !parser.bump_if(SyntaxKind::Ident) { + return false; + } + + if parser.current_kind() == Some(SyntaxKind::Lt) { + if !parser.parse(GenericArgListScope::default(), None).0 { + return false; + } + } + + if parser.current_kind() != Some(SyntaxKind::LParen) { + false + } else { + parser.parse(CallArgListScope::default(), None).0 + } + }) +} diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 7aad25a788..0a7c5f692d 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -91,7 +91,7 @@ impl super::Parse for IfExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::IfKw); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -100,7 +100,9 @@ impl super::Parse for IfExprScope { parser.parse(BlockExprScope::default(), None); if parser.current_kind() == Some(SyntaxKind::ElseKw) { - parser.bump_expected(SyntaxKind::ElseKw); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::IfKw], |parser| { + parser.bump_expected(SyntaxKind::ElseKw); + }); if !matches!( parser.current_kind(), @@ -119,7 +121,9 @@ impl super::Parse for MatchExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::MatchKw); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], |parser| { + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct) + }); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -162,7 +166,7 @@ impl super::Parse for MatchArmScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.with_recovery_tokens(&[SyntaxKind::FatArrow], parse_pat); + parser.with_next_expected_tokens(&[SyntaxKind::FatArrow], parse_pat); if !parser.bump_if(SyntaxKind::FatArrow) { parser.error_and_recover("expected `=>`", None); @@ -201,9 +205,14 @@ impl super::Parse for RecordFieldListScope { return; } - parser.parse(RecordFieldScope::default(), None); + parser.with_next_expected_tokens(&[SyntaxKind::RBrace, SyntaxKind::Comma], |parser| { + parser.parse(RecordFieldScope::default(), None) + }); + while parser.bump_if(SyntaxKind::Comma) { - parser.parse(RecordFieldScope::default(), None); + parser.with_next_expected_tokens(&[SyntaxKind::RBrace, SyntaxKind::Comma], |parser| { + parser.parse(RecordFieldScope::default(), None); + }) } if !parser.bump_if(SyntaxKind::RBrace) { @@ -215,15 +224,14 @@ impl super::Parse for RecordFieldListScope { define_scope! { RecordFieldScope, RecordField, Inheritance } impl super::Parse for RecordFieldScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected identifier", None); } - if !parser.bump_if(SyntaxKind::Colon) { - parser.error_and_recover("expected `:`", None); + if parser.bump_if(SyntaxKind::Colon) { + parse_expr(parser); } - - parse_expr(parser); } } @@ -265,14 +273,24 @@ impl super::Parse for ArrayScope { return; } - parse_expr(parser); + parser.with_next_expected_tokens( + &[ + SyntaxKind::SemiColon, + SyntaxKind::Comma, + SyntaxKind::RBracket, + ], + parse_expr, + ); if parser.bump_if(SyntaxKind::SemiColon) { self.set_kind(SyntaxKind::ArrayRepExpr); - parse_expr(parser); + parser.with_next_expected_tokens(&[SyntaxKind::RBracket], parse_expr); } else { while parser.bump_if(SyntaxKind::Comma) { - parse_expr(parser); + parser.with_next_expected_tokens( + &[SyntaxKind::Comma, SyntaxKind::RBracket], + parse_expr, + ); } } diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 344fa6f970..8da62d575d 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -11,62 +11,126 @@ use super::{ define_scope! { pub(crate) FnScope { - disallow_def: bool + fn_def_scope: FnDefScope }, Fn, Inheritance } -impl FnScope { - pub(crate) fn disallow_def() -> Self { - Self { - disallow_def: true, - ..Self::default() - } + +#[derive(Clone, Copy, Debug)] +pub(crate) enum FnDefScope { + Normal, + TraitDef, + Extern, +} +impl Default for FnDefScope { + fn default() -> Self { + Self::Normal } } + impl super::Parse for FnScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::FnKw); - parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the function name", None) - } - }); + match self.fn_def_scope { + FnDefScope::Normal => parse_normal_fn_def_impl(parser), + FnDefScope::TraitDef => parse_trait_fn_def_impl(parser), + FnDefScope::Extern => parse_extern_fn_def_impl(parser), + } + } +} - parser.with_recovery_tokens(&[SyntaxKind::LParen], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); - - parser.with_recovery_tokens( - &[ - SyntaxKind::LBrace, - SyntaxKind::Arrow, - SyntaxKind::WhereClause, - ], - |parser| { - if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); - } else { - parser.error_and_recover("expected `(` for the function arguments", None); - } - }, - ); - - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); +fn parse_normal_fn_def_impl(parser: &mut Parser) { + parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the function name", None) + } + }); + + parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); + + parser.with_next_expected_tokens( + &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], + |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); } - }); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + }, + ); + + parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); + } + }); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); - if parser.current_kind() == Some(SyntaxKind::LBrace) { - if self.disallow_def { - parser.error_and_recover("function definition is not allowed", None); + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(BlockExprScope::default(), None); + } else { + parser.error_and_recover("function body is required", None) + } +} + +fn parse_trait_fn_def_impl(parser: &mut Parser) { + parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the function name", None) + } + }); + + parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }); + + parser.with_recovery_tokens( + &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], + |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); } - parser.parse(BlockExprScope::default(), None); + }, + ); + + parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); } + }); + parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(BlockExprScope::default(), None); + } +} + +fn parse_extern_fn_def_impl(parser: &mut Parser) { + parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the function name", None) + } + }); + + parser.with_recovery_tokens(&[SyntaxKind::Arrow], |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); + } + }); + + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); } } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 922a4a3e9b..d182d588c8 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -5,6 +5,7 @@ use crate::{parser::func::FnScope, SyntaxKind}; use super::{ attr, define_scope, expr::parse_expr, + func::FnDefScope, param::{parse_where_clause_opt, GenericParamListScope}, struct_::RecordFieldDefListScope, token_stream::TokenStream, @@ -225,7 +226,7 @@ impl super::Parse for VariantDefListScope { } } -define_scope! { VariantDefScope, VariantDef, Override(RBrace, Newline) } +define_scope! { VariantDefScope, VariantDef, Inheritance } impl super::Parse for VariantDefScope { fn parse(&mut self, parser: &mut Parser) { if !parser.bump_if(SyntaxKind::Ident) { @@ -248,13 +249,13 @@ impl super::Parse for TraitScope { parser.error_and_recover("expected ident for the trait name", None) } - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { + parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } }); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected trait body", None); @@ -268,36 +269,7 @@ impl super::Parse for TraitScope { define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline, FnKw) } impl super::Parse for TraitItemListScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::LBrace); - loop { - parser.set_newline_as_trivia(true); - if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { - break; - } - - let checkpoint = attr::parse_attr_list(parser); - - match parser.current_kind() { - Some(SyntaxKind::FnKw) => { - parser.parse(FnScope::default(), checkpoint); - } - _ => { - parser.error_and_recover("trait item is restricted to `fn`", checkpoint); - } - } - - parser.set_newline_as_trivia(false); - if !matches!( - parser.current_kind(), - Some(SyntaxKind::RBrace | SyntaxKind::Newline) - ) { - parser.error_and_recover("expected newline after trait item definition", checkpoint) - } - } - - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover("expected `}` to close the trait body", None) - } + parse_fn_item_block(parser, false, FnDefScope::TraitDef) } } @@ -306,28 +278,34 @@ impl super::Parse for ImplScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ImplKw); - let is_trait_impl = parser - .dry_run(|parser| parse_type(parser, None, true) && parser.bump_if(SyntaxKind::ForKw)); - - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if is_trait_impl { - self.set_kind(SyntaxKind::ImplTrait); - parse_type(parser, None, true); - parser.bump_expected(SyntaxKind::ForKw); - parse_type(parser, None, true); - } else { + parser.with_next_expected_tokens( + &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], + |parser| { parse_type(parser, None, true); - } - }); + }, + ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + let is_impl_trait = parser.with_next_expected_tokens( + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + |parser| { + if parser.bump_if(SyntaxKind::ForKw) { + self.set_kind(SyntaxKind::ImplTrait); + parse_type(parser, None, true); + true + } else { + false + } + }, + ); + + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected impl body", None); return; } - if is_trait_impl { + if is_impl_trait { parser.parse(ImplTraitItemListScope::default(), None); } else { parser.parse(ImplItemListScope::default(), None); @@ -338,14 +316,14 @@ impl super::Parse for ImplScope { define_scope! { ImplTraitItemListScope, ImplTraitItemList, Override(RBrace, FnKw) } impl super::Parse for ImplTraitItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, false, true) + parse_fn_item_block(parser, false, FnDefScope::Normal) } } define_scope! { ImplItemListScope, ImplItemList, Override(RBrace, FnKw) } impl super::Parse for ImplItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, true, true) + parse_fn_item_block(parser, true, FnDefScope::Normal) } } @@ -364,13 +342,13 @@ impl super::Parse for ConstScope { parser.set_newline_as_trivia(false); - parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + parser.with_next_expected_tokens(&[SyntaxKind::Colon, SyntaxKind::Eq], |parser| { if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected identifier", None); } }); - parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + parser.with_next_expected_tokens(&[SyntaxKind::Eq], |parser| { if !parser.bump_if(SyntaxKind::Colon) { parser.error_and_recover("expected type annotation for `const`", None); } @@ -402,7 +380,7 @@ impl super::Parse for ExternScope { define_scope! { ExternItemListScope, ExternItemList, Override(RBrace, FnKw) } impl super::Parse for ExternItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, true, false); + parse_fn_item_block(parser, true, FnDefScope::Extern); } } @@ -412,13 +390,13 @@ impl super::Parse for TypeAliasScope { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::TypeKw); - parser.with_recovery_tokens(&[SyntaxKind::Lt, SyntaxKind::Eq], |parser| { + parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::Eq], |parser| { if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected identifier for type alias name", None) } }); - parser.with_recovery_tokens(&[SyntaxKind::Eq], |parser| { + parser.with_next_expected_tokens(&[SyntaxKind::Eq], |parser| { if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericParamListScope::default(), None); } @@ -441,7 +419,7 @@ impl super::Parse for TypeAliasScope { fn parse_fn_item_block( parser: &mut Parser, allow_modifier: bool, - allow_fn_def: bool, + fn_def_scope: FnDefScope, ) { parser.bump_expected(SyntaxKind::LBrace); loop { @@ -466,12 +444,7 @@ fn parse_fn_item_block( match parser.current_kind() { Some(SyntaxKind::FnKw) => { - let scope = if allow_fn_def { - FnScope::default() - } else { - FnScope::disallow_def() - }; - parser.parse(scope, checkpoint); + parser.parse(FnScope::new(fn_def_scope), checkpoint); } _ => { parser.error_and_recover("only `fn` is allowed in the block", checkpoint); @@ -483,7 +456,7 @@ fn parse_fn_item_block( parser.current_kind(), Some(SyntaxKind::RBrace | SyntaxKind::Newline) ) { - parser.error_and_recover("expected newline after item definition", checkpoint) + parser.error_and_recover("expected newline after item definition", None) } } diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 3eea476266..f93041a78d 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -37,6 +37,7 @@ pub struct Parser { /// The second element holds `is_newline_trivia` of the parent. parents: Vec<(Box, bool)>, errors: Vec, + is_err: bool, next_trivias: VecDeque, /// if `is_newline_trivia` is `true`, `Newline` is also regarded as a trivia @@ -59,6 +60,7 @@ impl Parser { builder: rowan::GreenNodeBuilder::new(), parents: Vec::new(), errors: Vec::new(), + is_err: false, current_pos: rowan::TextSize::from(0), is_newline_trivia: true, next_trivias: VecDeque::new(), @@ -93,8 +95,9 @@ impl Parser { (SyntaxNode::new_root(self.builder.finish()), self.errors) } - /// Passes the `recovery_tokens` to the parser temporarily. - /// The passed recovery tokens are removed when the closure returns. + /// Adds the `recovery_tokens` as a temporary recovery token set. + /// These tokens are used as a recovery token set in addition to scope's + /// recovery token set. /// /// This is useful when you want to specify auxiliary recovery tokens which /// are valid only in a limited part of the scope. @@ -115,6 +118,38 @@ impl Parser { r } + /// Adds `expected_tokens` as a temporary recovery token set, the invokes + /// the `f` closure. If the `f` closure fails to parse, + /// `expected_tokens` are also used as a recovery token set in addition to + /// scope's recovery token set. + /// + /// If `current_token()` is not in `expected_tokens` after `f` returns, an + /// error is reported and try to recover with `expected_tokens` and scope's + /// recovery token set. + pub fn with_next_expected_tokens(&mut self, expected_tokens: &[SyntaxKind], f: F) -> R + where + F: FnOnce(&mut Self) -> R, + { + for token in expected_tokens { + self.add_recovery_token(*token); + } + + let r = f(self); + + if self.current_kind().is_some() + && expected_tokens + .iter() + .all(|token| *token != self.current_kind().unwrap()) + { + self.error_and_recover("unexpected token", None); + } + + for token in expected_tokens { + self.remove_recovery_token(*token); + } + + r + } /// Invoke the scope to parse. The scope is wrapped up by the node specified /// by the scope. /// @@ -133,12 +168,13 @@ impl Parser { where T: Parse + 'static, { + let mut is_err = std::mem::take(&mut self.is_err); let checkpoint = self.enter(scope.clone(), checkpoint); - let error_len = self.errors.len(); let start_checkpoint = self.checkpoint(); scope.parse(self); self.leave(checkpoint); - (error_len == self.errors.len(), start_checkpoint) + std::mem::swap(&mut self.is_err, &mut is_err); + (!is_err, start_checkpoint) } #[doc(hidden)] @@ -231,6 +267,7 @@ impl Parser { err_num: self.errors.len(), next_trivias: self.next_trivias.clone(), auxiliary_recovery_set: self.auxiliary_recovery_set.clone(), + is_err: self.is_err, }); let r = f(self); @@ -242,6 +279,7 @@ impl Parser { self.current_pos = state.pos; self.next_trivias = state.next_trivias; self.auxiliary_recovery_set = state.auxiliary_recovery_set; + self.is_err = state.is_err; r } @@ -297,13 +335,29 @@ impl Parser { } } + let is_newline_trivia = self.set_newline_as_trivia(false); + self.auxiliary_recovery_set.insert(SyntaxKind::Newline, 1); + let mut open_brackets_in_error = FxHashMap::default(); while let Some(kind) = self.current_kind() { + if kind.is_open_bracket_kind() { + *open_brackets_in_error.entry(kind).or_insert(0) += 1; + } if recovery_set.contains(&kind) || self.auxiliary_recovery_set.contains_key(&kind) { - break; - } else { - self.bump(); + if let Some(open_bracket) = kind.corresponding_open_bracket_kind() { + if open_brackets_in_error.get(&open_bracket).unwrap_or(&0) != &0 { + *open_brackets_in_error.get_mut(&open_bracket).unwrap() -= 1; + } else { + break; + } + } else { + break; + } } + + self.bump(); } + + self.set_newline_as_trivia(is_newline_trivia); } fn checkpoint(&mut self) -> Checkpoint { @@ -367,6 +421,7 @@ impl Parser { /// Add the `msg` to the error list. fn error(&mut self, msg: &str) -> ErrorScope { + self.is_err = true; let start = self.current_pos; let end = if let Some(current_token) = self.current_token() { start + current_token.text_size() @@ -424,6 +479,7 @@ struct DryRunState { /// The stored trivias when the dry run started. next_trivias: VecDeque, auxiliary_recovery_set: FxHashMap, + is_err: bool, } /// Represents the recovery method of the current scope. diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 98bb5fdd32..b3218632ef 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -105,15 +105,20 @@ define_scope! { } impl super::Parse for GenericParamScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_if(SyntaxKind::ConstKw); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected type parameter", None); - } + parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::Gt], |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected type parameter", None); + } - if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.parse(TypeBoundListScope::default(), None); - } + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::default(), None); + } + + parser.set_newline_as_trivia(true); + }); } } @@ -180,26 +185,30 @@ define_scope! { } impl super::Parse for GenericArgScope { fn parse(&mut self, parser: &mut Parser) { - match parser.current_kind() { - Some(SyntaxKind::LBrace) => { - parser.parse(BlockExprScope::default(), None); - } + parser.set_newline_as_trivia(false); + parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::Gt], |parser| { + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + parser.parse(BlockExprScope::default(), None); + } - Some(kind) if kind.is_literal_leaf() => { - parser.parse(LitExprScope::default(), None); - } + Some(kind) if kind.is_literal_leaf() => { + parser.parse(LitExprScope::default(), None); + } - _ => { - parse_type(parser, None, self.allow_bounds); - if parser.current_kind() == Some(SyntaxKind::Colon) { - if !self.allow_bounds { - parser.error_and_recover("type bounds are not allowed here", None); - } else { - parser.parse(TypeBoundListScope::default(), None); + _ => { + parse_type(parser, None, self.allow_bounds); + if parser.current_kind() == Some(SyntaxKind::Colon) { + if !self.allow_bounds { + parser.error_and_recover("type bounds are not allowed here", None); + } else { + parser.parse(TypeBoundListScope::default(), None); + } } } } - } + parser.set_newline_as_trivia(true); + }); } } @@ -227,17 +236,19 @@ impl super::Parse for CallArgListScope { define_scope! { CallArgScope, CallArg, Inheritance } impl super::Parse for CallArgScope { fn parse(&mut self, parser: &mut Parser) { - parser.set_newline_as_trivia(false); - - let has_label = parser.dry_run(|parser| { - parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::RParen], |parser| { + parser.set_newline_as_trivia(false); + let has_label = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); + + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + parse_expr(parser); + parser.set_newline_as_trivia(true); }); - - if has_label { - parser.bump_expected(SyntaxKind::Ident); - parser.bump_expected(SyntaxKind::Colon); - } - parse_expr(parser); } } @@ -251,9 +262,9 @@ impl super::Parse for WhereClauseScope { match parser.current_kind() { Some(kind) if is_path_segment(kind) => { parse_type(parser, None, false); + parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Colon) { parser.parse(TypeBoundListScope::default(), None); - parser.set_newline_as_trivia(false); if !parser.bump_if(SyntaxKind::Newline) { parser.error_and_recover("expected newline after type bounds", None); } diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index e184669288..6bbe8299f9 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -59,14 +59,13 @@ impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); - parser.with_recovery_tokens(&[SyntaxKind::InKw], parse_pat); + parser.with_next_expected_tokens(&[SyntaxKind::InKw, SyntaxKind::LBrace], parse_pat); if !parser.bump_if(SyntaxKind::InKw) { parser.error_and_recover("expected `in` keyword", None); - return; } - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -81,7 +80,7 @@ impl super::Parse for WhileStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhileKw); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index fbf6b0209f..4a8eeeced6 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -87,15 +87,17 @@ define_scope! { } impl super::Parse for RecordFieldDefScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parse_attr_list(parser); parser.bump_if(SyntaxKind::PubKw); if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected ident for the field name", None); } - if !parser.bump_if(SyntaxKind::Colon) { + if parser.bump_if(SyntaxKind::Colon) { + parse_type(parser, None, false); + } else { parser.error_and_recover("expected `name: type` for the field definition", None); } - parse_type(parser, None, false); } } diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs index aa123ba84b..7043491a85 100644 --- a/crates/parser2/src/parser/token_stream.rs +++ b/crates/parser2/src/parser/token_stream.rs @@ -94,6 +94,7 @@ impl BackTrackableTokenStream { self.bt_points.push(self.bt_cursor.unwrap()); } else { self.bt_points.push(0); + self.bt_cursor = Some(0); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index d481f02d35..ab5b6fc0b5 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -454,6 +454,25 @@ impl SyntaxKind { SyntaxKind::Int | SyntaxKind::String | SyntaxKind::TrueKw | SyntaxKind::FalseKw ) } + + pub fn is_open_bracket_kind(self) -> bool { + matches!( + self, + SyntaxKind::LBrace | SyntaxKind::LParen | SyntaxKind::LBracket | SyntaxKind::Lt + ) + } + + /// Returns its corresponding open bracket kind if it is a close bracket + /// kind. + pub fn corresponding_open_bracket_kind(self) -> Option { + match self { + SyntaxKind::RBrace => Some(SyntaxKind::LBrace), + SyntaxKind::RParen => Some(SyntaxKind::LParen), + SyntaxKind::RBracket => Some(SyntaxKind::LBracket), + SyntaxKind::Gt => Some(SyntaxKind::Lt), + _ => None, + } + } } impl From for rowan::SyntaxKind { diff --git a/crates/parser2/test_files/error_recovery/exprs/array.fe b/crates/parser2/test_files/error_recovery/exprs/array.fe new file mode 100644 index 0000000000..603646659f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/array.fe @@ -0,0 +1,2 @@ +[1, 2 a, 3] +[1, 2,] \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/array.snap b/crates/parser2/test_files/error_recovery/exprs/array.snap new file mode 100644 index 0000000000..49038a9db8 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/array.snap @@ -0,0 +1,34 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..19 + ArrayExpr@0..11 + LBracket@0..1 "[" + LitExpr@1..2 + Int@1..2 "1" + Comma@2..3 "," + WhiteSpace@3..4 " " + LitExpr@4..5 + Int@4..5 "2" + WhiteSpace@5..6 " " + Error@6..7 + Ident@6..7 "a" + Comma@7..8 "," + WhiteSpace@8..9 " " + LitExpr@9..10 + Int@9..10 "3" + RBracket@10..11 "]" + Newline@11..12 "\n" + ArrayExpr@12..19 + LBracket@12..13 "[" + LitExpr@13..14 + Int@13..14 "1" + Comma@14..15 "," + WhiteSpace@15..16 " " + LitExpr@16..17 + Int@16..17 "2" + Comma@17..18 "," + Error@18..18 + RBracket@18..19 "]" + diff --git a/crates/parser2/test_files/error_recovery/exprs/block.fe b/crates/parser2/test_files/error_recovery/exprs/block.fe new file mode 100644 index 0000000000..f1e1ac3703 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/block.fe @@ -0,0 +1,5 @@ +{ + let x: i32 u32 = 10 + let y = 10 + +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/block.snap b/crates/parser2/test_files/error_recovery/exprs/block.snap new file mode 100644 index 0000000000..95d15e556e --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/block.snap @@ -0,0 +1,46 @@ +--- +source: crates/parser2/tests/errro_recovery.rs +expression: snapshot +--- +Root@0..43 + BlockExpr@0..43 + LBrace@0..1 "{" + Newline@1..2 "\n" + WhiteSpace@2..6 " " + LetStmt@6..16 + LetKw@6..9 "let" + WhiteSpace@9..10 " " + PathPat@10..11 + Path@10..11 + PathSegment@10..11 + Ident@10..11 "x" + Colon@11..12 ":" + WhiteSpace@12..13 " " + PathType@13..16 + Path@13..16 + PathSegment@13..16 + Ident@13..16 "i32" + WhiteSpace@16..17 " " + Error@17..25 + Ident@17..20 "u32" + WhiteSpace@20..21 " " + Eq@21..22 "=" + WhiteSpace@22..23 " " + Int@23..25 "10" + Newline@25..26 "\n" + WhiteSpace@26..30 " " + LetStmt@30..40 + LetKw@30..33 "let" + WhiteSpace@33..34 " " + PathPat@34..35 + Path@34..35 + PathSegment@34..35 + Ident@34..35 "y" + WhiteSpace@35..36 " " + Eq@36..37 "=" + WhiteSpace@37..38 " " + LitExpr@38..40 + Int@38..40 "10" + Newline@40..42 "\n\n" + RBrace@42..43 "}" + diff --git a/crates/parser2/test_files/error_recovery/exprs/call.fe b/crates/parser2/test_files/error_recovery/exprs/call.fe new file mode 100644 index 0000000000..ebd351f6f0 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/call.fe @@ -0,0 +1,3 @@ +foo(x, y a, z ;) + +foo(x, y) diff --git a/crates/parser2/test_files/error_recovery/exprs/call.snap b/crates/parser2/test_files/error_recovery/exprs/call.snap new file mode 100644 index 0000000000..91e5142937 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/call.snap @@ -0,0 +1,79 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..40 + CallExpr@0..16 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" + CallArgList@3..16 + LParen@3..4 "(" + CallArg@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "x" + Comma@5..6 "," + WhiteSpace@6..7 " " + CallArg@7..10 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "y" + WhiteSpace@8..9 " " + Error@9..10 + Ident@9..10 "a" + Comma@10..11 "," + WhiteSpace@11..12 " " + CallArg@12..15 + Path@12..13 + PathSegment@12..13 + Ident@12..13 "z" + WhiteSpace@13..14 " " + Error@14..15 + SemiColon@14..15 ";" + RParen@15..16 ")" + Newline@16..18 "\n\n" + CallExpr@18..39 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "foo" + GenericArgList@21..33 + Lt@21..22 "<" + GenericArg@22..25 + PathType@22..25 + Path@22..25 + PathSegment@22..25 + Ident@22..25 "i32" + Comma@25..26 "," + WhiteSpace@26..27 " " + GenericArg@27..30 + PathType@27..28 + Path@27..28 + PathSegment@27..28 + Ident@27..28 "T" + WhiteSpace@28..29 " " + Error@29..30 + Ident@29..30 "E" + Comma@30..31 "," + WhiteSpace@31..32 " " + GenericArg@32..32 + PathType@32..32 + Path@32..32 + PathSegment@32..32 + Error@32..32 + Gt@32..33 ">" + CallArgList@33..39 + LParen@33..34 "(" + CallArg@34..35 + Path@34..35 + PathSegment@34..35 + Ident@34..35 "x" + Comma@35..36 "," + WhiteSpace@36..37 " " + CallArg@37..38 + Path@37..38 + PathSegment@37..38 + Ident@37..38 "y" + RParen@38..39 ")" + Newline@39..40 "\n" + diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.fe b/crates/parser2/test_files/error_recovery/exprs/if_.fe new file mode 100644 index 0000000000..cb68659d3d --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/if_.fe @@ -0,0 +1,14 @@ +if x y { +} + +if x { + +} else x {} + +if x { } else x if x { } else { } + +if x { + 10 +else { + 1 +} diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.snap b/crates/parser2/test_files/error_recovery/exprs/if_.snap new file mode 100644 index 0000000000..9e36f6254d --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/if_.snap @@ -0,0 +1,107 @@ +--- +source: crates/parser2/tests/errro_recovery.rs +expression: snapshot +--- +Root@0..101 + IfExpr@0..10 + IfKw@0..2 "if" + WhiteSpace@2..3 " " + Path@3..4 + PathSegment@3..4 + Ident@3..4 "x" + WhiteSpace@4..5 " " + Error@5..6 + Ident@5..6 "y" + WhiteSpace@6..7 " " + BlockExpr@7..10 + LBrace@7..8 "{" + Newline@8..9 "\n" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + IfExpr@12..31 + IfKw@12..14 "if" + WhiteSpace@14..15 " " + Path@15..16 + PathSegment@15..16 + Ident@15..16 "x" + WhiteSpace@16..17 " " + BlockExpr@17..21 + LBrace@17..18 "{" + Newline@18..20 "\n\n" + RBrace@20..21 "}" + WhiteSpace@21..22 " " + ElseKw@22..26 "else" + WhiteSpace@26..27 " " + Error@27..28 + Ident@27..28 "x" + WhiteSpace@28..29 " " + BlockExpr@29..31 + LBrace@29..30 "{" + RBrace@30..31 "}" + Newline@31..33 "\n\n" + IfExpr@33..66 + IfKw@33..35 "if" + WhiteSpace@35..36 " " + Path@36..37 + PathSegment@36..37 + Ident@36..37 "x" + WhiteSpace@37..38 " " + BlockExpr@38..41 + LBrace@38..39 "{" + WhiteSpace@39..40 " " + RBrace@40..41 "}" + WhiteSpace@41..42 " " + ElseKw@42..46 "else" + WhiteSpace@46..47 " " + Error@47..48 + Ident@47..48 "x" + WhiteSpace@48..49 " " + IfExpr@49..66 + IfKw@49..51 "if" + WhiteSpace@51..52 " " + Path@52..53 + PathSegment@52..53 + Ident@52..53 "x" + WhiteSpace@53..54 " " + BlockExpr@54..57 + LBrace@54..55 "{" + WhiteSpace@55..56 " " + RBrace@56..57 "}" + WhiteSpace@57..58 " " + ElseKw@58..62 "else" + WhiteSpace@62..63 " " + BlockExpr@63..66 + LBrace@63..64 "{" + WhiteSpace@64..65 " " + RBrace@65..66 "}" + Newline@66..68 "\n\n" + IfExpr@68..100 + IfKw@68..70 "if" + WhiteSpace@70..71 " " + Path@71..72 + PathSegment@71..72 + Ident@71..72 "x" + WhiteSpace@72..73 " " + BlockExpr@73..100 + LBrace@73..74 "{" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + ExprStmt@79..81 + LitExpr@79..81 + Int@79..81 "10" + WhiteSpace@81..85 " " + Newline@85..86 "\n" + ExprStmt@86..92 + Error@86..92 + ElseKw@86..90 "else" + WhiteSpace@90..91 " " + LBrace@91..92 "{" + Newline@92..93 "\n" + WhiteSpace@93..97 " " + ExprStmt@97..98 + LitExpr@97..98 + Int@97..98 "1" + Newline@98..99 "\n" + RBrace@99..100 "}" + Newline@100..101 "\n" + diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.fe b/crates/parser2/test_files/error_recovery/exprs/match_.fe new file mode 100644 index 0000000000..43c649187c --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/match_.fe @@ -0,0 +1,9 @@ +match X => { + Foo() => true + Bar +} + +match X { + Foo(i, j, => true x + Bar => x +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap new file mode 100644 index 0000000000..746a0452a1 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -0,0 +1,109 @@ +--- +source: crates/parser2/tests/errro_recovery.rs +expression: snapshot +--- +Root@0..94 + MatchExpr@0..40 + MatchKw@0..5 "match" + WhiteSpace@5..6 " " + Path@6..7 + PathSegment@6..7 + Ident@6..7 "X" + WhiteSpace@7..9 " " + Error@9..11 + FatArrow@9..11 "=>" + WhiteSpace@11..12 " " + MatchArmList@12..40 + LBrace@12..13 "{" + WhiteSpace@13..14 " " + Newline@14..15 "\n" + WhiteSpace@15..18 " " + MatchArm@18..31 + PathTuplePat@18..23 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "Foo" + TuplePatElemList@21..23 + LParen@21..22 "(" + RParen@22..23 ")" + WhiteSpace@23..24 " " + FatArrow@24..26 "=>" + WhiteSpace@26..27 " " + LitExpr@27..31 + TrueKw@27..31 "true" + Newline@31..32 "\n" + WhiteSpace@32..35 " " + MatchArm@35..38 + PathPat@35..38 + Path@35..38 + PathSegment@35..38 + Ident@35..38 "Bar" + Error@38..38 + Error@38..38 + Newline@38..39 "\n" + RBrace@39..40 "}" + WhiteSpace@40..41 " " + Newline@41..43 "\n\n" + MatchExpr@43..93 + MatchKw@43..48 "match" + WhiteSpace@48..49 " " + Path@49..50 + PathSegment@49..50 + Ident@49..50 "X" + WhiteSpace@50..52 " " + MatchArmList@52..93 + LBrace@52..53 "{" + WhiteSpace@53..54 " " + Newline@54..55 "\n" + WhiteSpace@55..58 " " + MatchArm@58..77 + PathTuplePat@58..70 + Path@58..61 + PathSegment@58..61 + Ident@58..61 "Foo" + TuplePatElemList@61..70 + LParen@61..62 "(" + TuplePatElem@62..63 + PathPat@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "i" + Comma@63..64 "," + WhiteSpace@64..65 " " + TuplePatElem@65..66 + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "j" + Comma@66..67 "," + WhiteSpace@67..70 " " + TuplePatElem@70..70 + PathPat@70..70 + Path@70..70 + PathSegment@70..70 + Error@70..70 + Error@70..70 + FatArrow@70..72 "=>" + WhiteSpace@72..73 " " + LitExpr@73..77 + TrueKw@73..77 "true" + WhiteSpace@77..78 " " + Error@78..79 + Ident@78..79 "x" + Newline@79..80 "\n" + WhiteSpace@80..83 " " + MatchArm@83..91 + PathPat@83..86 + Path@83..86 + PathSegment@83..86 + Ident@83..86 "Bar" + WhiteSpace@86..87 " " + FatArrow@87..89 "=>" + WhiteSpace@89..90 " " + Path@90..91 + PathSegment@90..91 + Ident@90..91 "x" + Newline@91..92 "\n" + RBrace@92..93 "}" + WhiteSpace@93..94 " " + diff --git a/crates/parser2/test_files/error_recovery/exprs/method.fe b/crates/parser2/test_files/error_recovery/exprs/method.fe new file mode 100644 index 0000000000..577b390824 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/method.fe @@ -0,0 +1,5 @@ +foo::bar.baz(1, 2) + +foo::bar.x(1, 2 E,) + +foo::bar.baz() \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/method.snap b/crates/parser2/test_files/error_recovery/exprs/method.snap new file mode 100644 index 0000000000..7546669e3f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/method.snap @@ -0,0 +1,105 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..78 + MethodCallExpr@0..31 + Path@0..8 + PathSegment@0..3 + Ident@0..3 "foo" + Colon2@3..5 "::" + PathSegment@5..8 + Ident@5..8 "bar" + Dot@8..9 "." + Ident@9..12 "baz" + GenericArgList@12..25 + Lt@12..13 "<" + GenericArg@13..16 + PathType@13..16 + Path@13..16 + PathSegment@13..16 + Ident@13..16 "i32" + Comma@16..17 "," + WhiteSpace@17..18 " " + GenericArg@18..23 + PathType@18..21 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "u32" + WhiteSpace@21..22 " " + Error@22..23 + Ident@22..23 "T" + Comma@23..24 "," + GenericArg@24..24 + PathType@24..24 + Path@24..24 + PathSegment@24..24 + Error@24..24 + Gt@24..25 ">" + CallArgList@25..31 + LParen@25..26 "(" + CallArg@26..27 + LitExpr@26..27 + Int@26..27 "1" + Comma@27..28 "," + WhiteSpace@28..29 " " + CallArg@29..30 + LitExpr@29..30 + Int@29..30 "2" + RParen@30..31 ")" + Newline@31..33 "\n\n" + MethodCallExpr@33..52 + Path@33..41 + PathSegment@33..36 + Ident@33..36 "foo" + Colon2@36..38 "::" + PathSegment@38..41 + Ident@38..41 "bar" + Dot@41..42 "." + Ident@42..43 "x" + CallArgList@43..52 + LParen@43..44 "(" + CallArg@44..45 + LitExpr@44..45 + Int@44..45 "1" + Comma@45..46 "," + WhiteSpace@46..47 " " + CallArg@47..50 + LitExpr@47..48 + Int@47..48 "2" + WhiteSpace@48..49 " " + Error@49..50 + Ident@49..50 "E" + Comma@50..51 "," + CallArg@51..51 + Error@51..51 + RParen@51..52 ")" + Newline@52..54 "\n\n" + MethodCallExpr@54..78 + Path@54..62 + PathSegment@54..57 + Ident@54..57 "foo" + Colon2@57..59 "::" + PathSegment@59..62 + Ident@59..62 "bar" + Dot@62..63 "." + Ident@63..66 "baz" + GenericArgList@66..76 + Lt@66..67 "<" + GenericArg@67..70 + PathType@67..70 + Path@67..70 + PathSegment@67..70 + Ident@67..70 "i32" + Comma@70..71 "," + WhiteSpace@71..72 " " + GenericArg@72..75 + PathType@72..75 + Path@72..75 + PathSegment@72..75 + Ident@72..75 "u32" + Gt@75..76 ">" + CallArgList@76..78 + LParen@76..77 "(" + RParen@77..78 ")" + diff --git a/crates/parser2/test_files/error_recovery/items/const_.fe b/crates/parser2/test_files/error_recovery/items/const_.fe new file mode 100644 index 0000000000..a9ab53676f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/const_.fe @@ -0,0 +1,5 @@ +const X = 10 + +const X: i32 + +const X: ]@ = 1 \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/const_.snap b/crates/parser2/test_files/error_recovery/items/const_.snap new file mode 100644 index 0000000000..ebd87c8439 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/const_.snap @@ -0,0 +1,53 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..44 + ItemList@0..44 + Const@0..12 + ConstKw@0..5 "const" + WhiteSpace@5..6 " " + Ident@6..7 "X" + WhiteSpace@7..8 " " + Error@8..8 + PathType@8..8 + Path@8..8 + PathSegment@8..8 + Error@8..8 + Eq@8..9 "=" + WhiteSpace@9..10 " " + LitExpr@10..12 + Int@10..12 "10" + Newline@12..14 "\n\n" + Const@14..27 + ConstKw@14..19 "const" + WhiteSpace@19..20 " " + Ident@20..21 "X" + Colon@21..22 ":" + WhiteSpace@22..23 " " + PathType@23..26 + Path@23..26 + PathSegment@23..26 + Ident@23..26 "i32" + WhiteSpace@26..27 " " + Error@27..27 + Error@27..27 + Newline@27..29 "\n\n" + Const@29..44 + ConstKw@29..34 "const" + WhiteSpace@34..35 " " + Ident@35..36 "X" + Colon@36..37 ":" + WhiteSpace@37..38 " " + PathType@38..40 + Path@38..40 + PathSegment@38..40 + Error@38..40 + RBracket@38..39 "]" + InvalidToken@39..40 "@" + WhiteSpace@40..41 " " + Eq@41..42 "=" + WhiteSpace@42..43 " " + LitExpr@43..44 + Int@43..44 "1" + diff --git a/crates/parser2/test_files/error_recovery/items/enum_.fe b/crates/parser2/test_files/error_recovery/items/enum_.fe new file mode 100644 index 0000000000..753c314d28 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/enum_.fe @@ -0,0 +1,6 @@ +pub enum MyEnum { + X(u32, T + A + Y(T, u32) A + Z +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/enum_.snap b/crates/parser2/test_files/error_recovery/items/enum_.snap new file mode 100644 index 0000000000..1d34535f84 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/enum_.snap @@ -0,0 +1,68 @@ +--- +source: crates/parser2/tests/errro_recovery.rs +expression: snapshot +--- +Root@0..63 + ItemList@0..63 + Enum@0..63 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + EnumKw@4..8 "enum" + WhiteSpace@8..9 " " + Ident@9..15 "MyEnum" + GenericParamList@15..18 + Lt@15..16 "<" + GenericParam@16..17 + Ident@16..17 "T" + Gt@17..18 ">" + WhiteSpace@18..19 " " + VariantDefList@19..63 + LBrace@19..20 "{" + Newline@20..21 "\n" + WhiteSpace@21..25 " " + VariantDef@25..39 + Ident@25..26 "X" + TupleType@26..39 + LParen@26..27 "(" + PathType@27..30 + Path@27..30 + PathSegment@27..30 + Ident@27..30 "u32" + Comma@30..31 "," + WhiteSpace@31..32 " " + PathType@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "T" + Newline@33..34 "\n" + WhiteSpace@34..38 " " + Error@38..39 + Ident@38..39 "A" + Newline@39..40 "\n" + WhiteSpace@40..44 " " + VariantDef@44..53 + Ident@44..45 "Y" + TupleType@45..53 + LParen@45..46 "(" + PathType@46..47 + Path@46..47 + PathSegment@46..47 + Ident@46..47 "T" + Comma@47..48 "," + WhiteSpace@48..49 " " + PathType@49..52 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "u32" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Error@54..55 + Ident@54..55 "A" + Newline@55..56 "\n" + WhiteSpace@56..60 " " + VariantDef@60..61 + Ident@60..61 "Z" + Newline@61..62 "\n" + RBrace@62..63 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/extern_.fe b/crates/parser2/test_files/error_recovery/items/extern_.fe new file mode 100644 index 0000000000..12b50f7186 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/extern_.fe @@ -0,0 +1,7 @@ +extern { + pub unsafe fn Foo(x: *usize) + + struct Foo { + + pub unsafe fn foo() +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/extern_.snap b/crates/parser2/test_files/error_recovery/items/extern_.snap new file mode 100644 index 0000000000..8c017a1e0f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/extern_.snap @@ -0,0 +1,62 @@ +--- +source: crates/parser2/tests/errro_recovery.rs +expression: snapshot +--- +Root@0..90 + ItemList@0..90 + Extern@0..90 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..90 + LBrace@7..8 "{" + Newline@8..9 "\n" + WhiteSpace@9..13 " " + Fn@13..41 + ItemModifier@13..23 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UnsafeKw@17..23 "unsafe" + WhiteSpace@23..24 " " + FnKw@24..26 "fn" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + FnArgList@30..41 + LParen@30..31 "(" + FnArg@31..40 + Ident@31..32 "x" + Colon@32..33 ":" + WhiteSpace@33..34 " " + PtrType@34..40 + Star@34..35 "*" + PathType@35..40 + Path@35..40 + PathSegment@35..40 + Ident@35..40 "usize" + RParen@40..41 ")" + Newline@41..42 "\n" + WhiteSpace@42..46 " " + Newline@46..47 "\n" + WhiteSpace@47..51 " " + Error@51..63 + StructKw@51..57 "struct" + WhiteSpace@57..58 " " + Ident@58..61 "Foo" + WhiteSpace@61..62 " " + LBrace@62..63 "{" + Newline@63..65 "\n\n" + WhiteSpace@65..69 " " + Fn@69..88 + ItemModifier@69..79 + PubKw@69..72 "pub" + WhiteSpace@72..73 " " + UnsafeKw@73..79 "unsafe" + WhiteSpace@79..80 " " + FnKw@80..82 "fn" + WhiteSpace@82..83 " " + Ident@83..86 "foo" + FnArgList@86..88 + LParen@86..87 "(" + RParen@87..88 ")" + Newline@88..89 "\n" + RBrace@89..90 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/func.fe b/crates/parser2/test_files/error_recovery/items/func.fe new file mode 100644 index 0000000000..481ba1e975 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/func.fe @@ -0,0 +1,10 @@ +fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 +{ + +} + +fn foo<<(x: i32) + where T: Trait2 +{ + +} diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap new file mode 100644 index 0000000000..c85d4aef31 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -0,0 +1,146 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..133 + ItemList@0..133 + Fn@0..78 + FnKw@0..2 "fn" + WhiteSpace@2..3 " " + Ident@3..6 "foo" + GenericParamList@6..16 + Lt@6..7 "<" + GenericParam@7..15 + Ident@7..8 "T" + TypeBoundList@8..15 + Colon@8..9 ":" + WhiteSpace@9..10 " " + TypeBound@10..15 + Path@10..15 + PathSegment@10..15 + Ident@10..15 "Trait" + Gt@15..16 ">" + Error@16..17 + Gt@16..17 ">" + FnArgList@17..47 + LParen@17..18 "(" + FnArg@18..24 + Ident@18..19 "x" + Colon@19..20 ":" + WhiteSpace@20..21 " " + PathType@21..24 + Path@21..24 + PathSegment@21..24 + Ident@21..24 "i32" + Comma@24..25 "," + WhiteSpace@25..26 " " + FnArg@26..38 + Underscore@26..27 "_" + WhiteSpace@27..28 " " + Error@28..38 + MutKw@28..31 "mut" + WhiteSpace@31..32 " " + Ident@32..33 "y" + Colon@33..34 ":" + WhiteSpace@34..35 " " + Ident@35..38 "u32" + PathType@38..38 + Path@38..38 + PathSegment@38..38 + Error@38..38 + Comma@38..39 "," + WhiteSpace@39..40 " " + FnArg@40..46 + Ident@40..41 "z" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + WhiteSpace@47..48 " " + Arrow@48..50 "->" + WhiteSpace@50..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "T" + Error@53..56 + Comma@53..54 "," + WhiteSpace@54..55 " " + Ident@55..56 "u" + WhiteSpace@56..57 " " + WhereClause@57..74 + WhereKw@57..62 "where" + WhiteSpace@62..63 " " + PathType@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "T" + TypeBoundList@64..72 + Colon@64..65 ":" + WhiteSpace@65..66 " " + TypeBound@66..72 + Path@66..72 + PathSegment@66..72 + Ident@66..72 "Trait2" + WhiteSpace@72..73 " " + Newline@73..74 "\n" + BlockExpr@74..78 + LBrace@74..75 "{" + Newline@75..77 "\n\n" + RBrace@77..78 "}" + Newline@78..80 "\n\n" + Fn@80..132 + FnKw@80..82 "fn" + WhiteSpace@82..83 " " + Ident@83..86 "foo" + GenericParamList@86..98 + Lt@86..87 "<" + GenericParam@87..98 + Error@87..98 + Lt@87..88 "<" + Lt@88..89 "<" + Ident@89..90 "T" + Colon@90..91 ":" + WhiteSpace@91..92 " " + Ident@92..97 "Trait" + Gt@97..98 ">" + Error@98..98 + Error@98..98 + FnArgList@98..106 + LParen@98..99 "(" + FnArg@99..105 + Ident@99..100 "x" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..105 + Path@102..105 + PathSegment@102..105 + Ident@102..105 "i32" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + WhereClause@111..128 + WhereKw@111..116 "where" + WhiteSpace@116..117 " " + PathType@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "T" + TypeBoundList@118..126 + Colon@118..119 ":" + WhiteSpace@119..120 " " + TypeBound@120..126 + Path@120..126 + PathSegment@120..126 + Ident@120..126 "Trait2" + WhiteSpace@126..127 " " + Newline@127..128 "\n" + BlockExpr@128..132 + LBrace@128..129 "{" + Newline@129..131 "\n\n" + RBrace@131..132 "}" + Newline@132..133 "\n" + diff --git a/crates/parser2/test_files/error_recovery/items/impl_.fe b/crates/parser2/test_files/error_recovery/items/impl_.fe new file mode 100644 index 0000000000..f7192a5d44 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_.fe @@ -0,0 +1,6 @@ +impl Foo +{ } \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap new file mode 100644 index 0000000000..e954244bd6 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -0,0 +1,79 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..56 + ItemList@0..56 + Impl@0..37 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..17 + Path@5..8 + PathSegment@5..8 + Ident@5..8 "Foo" + GenericArgList@8..17 + Lt@8..9 "<" + GenericArg@9..10 + PathType@9..10 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "T" + Comma@10..11 "," + WhiteSpace@11..12 " " + Newline@12..13 "\n" + WhiteSpace@13..17 " " + GenericArg@17..17 + PathType@17..17 + Path@17..17 + PathSegment@17..17 + Error@17..17 + Error@17..17 + Error@17..17 + WhereClause@17..34 + WhereKw@17..22 "where" + WhiteSpace@22..23 " " + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + TypeBoundList@24..33 + Colon@24..25 ":" + WhiteSpace@25..26 " " + TypeBound@26..33 + Path@26..33 + PathSegment@26..33 + Ident@26..33 "Integer" + Newline@33..34 "\n" + ImplItemList@34..37 + LBrace@34..35 "{" + WhiteSpace@35..36 " " + RBrace@36..37 "}" + Newline@37..39 "\n\n" + Impl@39..56 + ImplKw@39..43 "impl" + WhiteSpace@43..44 " " + PathType@44..52 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "Foo" + GenericArgList@47..52 + Lt@47..48 "<" + GenericArg@48..49 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "T" + Comma@49..50 "," + WhiteSpace@50..51 " " + GenericArg@51..51 + PathType@51..51 + Path@51..51 + PathSegment@51..51 + Error@51..51 + Gt@51..52 ">" + Newline@52..53 "\n" + ImplItemList@53..56 + LBrace@53..54 "{" + WhiteSpace@54..55 " " + RBrace@55..56 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.fe b/crates/parser2/test_files/error_recovery/items/impl_trait.fe new file mode 100644 index 0000000000..19ceebbdb9 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.fe @@ -0,0 +1,5 @@ +impl X for Y" + WhiteSpace@14..15 " " + ForKw@15..18 "for" + WhiteSpace@18..19 " " + PathType@19..23 + Path@19..20 + PathSegment@19..20 + Ident@19..20 "Y" + GenericArgList@20..23 + Lt@20..21 "<" + GenericArg@21..23 + PathType@21..22 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "T" + WhiteSpace@22..23 " " + Error@23..23 + Error@23..23 + WhereClause@23..34 + WhereKw@23..28 "where" + WhiteSpace@28..29 " " + PathType@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "T" + TypeBoundList@30..33 + Colon@30..31 ":" + WhiteSpace@31..32 " " + TypeBound@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "X" + WhiteSpace@33..34 " " + Error@34..34 + ImplTraitItemList@34..36 + LBrace@34..35 "{" + RBrace@35..36 "}" + Newline@36..38 "\n\n" + ImplTrait@38..71 + ImplKw@38..42 "impl" + WhiteSpace@42..43 " " + PathType@43..50 + Path@43..44 + PathSegment@43..44 + Ident@43..44 "X" + GenericArgList@44..50 + Lt@44..45 "<" + GenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + GenericArg@48..50 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "u" + WhiteSpace@49..50 " " + Error@50..50 + Error@50..50 + ForKw@50..53 "for" + WhiteSpace@53..54 " " + PathType@54..58 + Path@54..55 + PathSegment@54..55 + Ident@54..55 "Y" + GenericArgList@55..58 + Lt@55..56 "<" + GenericArg@56..58 + PathType@56..57 + Path@56..57 + PathSegment@56..57 + Ident@56..57 "T" + WhiteSpace@57..58 " " + Error@58..58 + Error@58..58 + WhereClause@58..69 + WhereKw@58..63 "where" + WhiteSpace@63..64 " " + PathType@64..65 + Path@64..65 + PathSegment@64..65 + Ident@64..65 "T" + TypeBoundList@65..68 + Colon@65..66 ":" + WhiteSpace@66..67 " " + TypeBound@67..68 + Path@67..68 + PathSegment@67..68 + Ident@67..68 "X" + WhiteSpace@68..69 " " + Error@69..69 + ImplTraitItemList@69..71 + LBrace@69..70 "{" + RBrace@70..71 "}" + Newline@71..73 "\n\n" + ImplTrait@73..90 + ImplKw@73..77 "impl" + WhiteSpace@77..78 " " + PathType@78..79 + Path@78..79 + PathSegment@78..79 + Ident@78..79 "X" + WhiteSpace@79..80 " " + Error@80..81 + InvalidToken@80..81 "@" + WhiteSpace@81..82 " " + ForKw@82..85 "for" + WhiteSpace@85..86 " " + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "Y" + WhiteSpace@87..88 " " + ImplTraitItemList@88..90 + LBrace@88..89 "{" + RBrace@89..90 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/struct_.fe b/crates/parser2/test_files/error_recovery/items/struct_.fe new file mode 100644 index 0000000000..f9b312c988 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/struct_.fe @@ -0,0 +1,8 @@ +pub struct{} + +trait Bar + +trait Bar where T: Add {} + +trait Bar< + where T: Add +{ + +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap new file mode 100644 index 0000000000..7b7a047ef7 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -0,0 +1,145 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..133 + ItemList@0..133 + Trait@0..18 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..9 "Foo" + GenericParamList@9..16 + Lt@9..10 "<" + GenericParam@10..11 + Ident@10..11 "T" + Comma@11..12 "," + WhiteSpace@12..13 " " + GenericParam@13..14 + Ident@13..14 "Y" + Comma@14..15 "," + GenericParam@15..15 + Error@15..15 + Gt@15..16 ">" + TraitItemList@16..18 + LBrace@16..17 "{" + RBrace@17..18 "}" + Newline@18..20 "\n\n" + Trait@20..35 + TraitKw@20..25 "trait" + WhiteSpace@25..26 " " + Ident@26..29 "Bar" + GenericParamList@29..33 + Lt@29..30 "<" + GenericParam@30..31 + Ident@30..31 "Y" + Comma@31..32 "," + WhiteSpace@32..33 " " + GenericParam@33..33 + Error@33..33 + Error@33..33 + Error@33..33 + TraitItemList@33..35 + LBrace@33..34 "{" + RBrace@34..35 "}" + Newline@35..37 "\n\n" + Trait@37..53 + TraitKw@37..42 "trait" + WhiteSpace@42..43 " " + Ident@43..46 "Bar" + GenericParamList@46..51 + Lt@46..47 "<" + GenericParam@47..48 + Ident@47..48 "T" + Comma@48..49 "," + WhiteSpace@49..50 " " + GenericParam@50..50 + Error@50..50 + Gt@50..51 ">" + Newline@51..53 "\n\n" + Error@53..53 + Error@53..53 + Error@53..53 + Error@53..53 + Trait@53..85 + TraitKw@53..58 "trait" + WhiteSpace@58..59 " " + Ident@59..62 "Bar" + GenericParamList@62..69 + Lt@62..63 "<" + GenericParam@63..64 + Ident@63..64 "Y" + Comma@64..65 "," + WhiteSpace@65..66 " " + GenericParam@66..67 + Ident@66..67 "T" + Comma@67..68 "," + GenericParam@68..68 + Error@68..68 + Gt@68..69 ">" + WhiteSpace@69..70 " " + WhereClause@70..83 + WhereKw@70..75 "where" + WhiteSpace@75..76 " " + PathType@76..77 + Path@76..77 + PathSegment@76..77 + Ident@76..77 "T" + TypeBoundList@77..82 + Colon@77..78 ":" + WhiteSpace@78..79 " " + TypeBound@79..82 + Path@79..82 + PathSegment@79..82 + Ident@79..82 "Add" + WhiteSpace@82..83 " " + Error@83..83 + TraitItemList@83..85 + LBrace@83..84 "{" + RBrace@84..85 "}" + Newline@85..87 "\n\n" + Trait@87..133 + TraitKw@87..92 "trait" + WhiteSpace@92..93 " " + Ident@93..96 "Bar" + GenericParamList@96..108 + Lt@96..97 "<" + GenericParam@97..99 + Error@97..99 + Lt@97..98 "<" + Ident@98..99 "Y" + Comma@99..100 "," + WhiteSpace@100..101 " " + GenericParam@101..107 + Ident@101..102 "K" + TypeBoundList@102..107 + Colon@102..103 ":" + WhiteSpace@103..104 " " + TypeBound@104..107 + Path@104..107 + PathSegment@104..107 + Ident@104..107 "Sub" + Gt@107..108 ">" + WhiteSpace@108..110 " " + Newline@110..111 "\n" + WhiteSpace@111..115 " " + WhereClause@115..129 + WhereKw@115..120 "where" + WhiteSpace@120..121 " " + PathType@121..122 + Path@121..122 + PathSegment@121..122 + Ident@121..122 "T" + TypeBoundList@122..127 + Colon@122..123 ":" + WhiteSpace@123..124 " " + TypeBound@124..127 + Path@124..127 + PathSegment@124..127 + Ident@124..127 "Add" + WhiteSpace@127..128 " " + Newline@128..129 "\n" + TraitItemList@129..133 + LBrace@129..130 "{" + Newline@130..132 "\n\n" + RBrace@132..133 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/type_.fe b/crates/parser2/test_files/error_recovery/items/type_.fe new file mode 100644 index 0000000000..1de5391495 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/type_.fe @@ -0,0 +1 @@ +type Result \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap new file mode 100644 index 0000000000..47a1798b44 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -0,0 +1,42 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..29 + ItemList@0..29 + TypeAlias@0..29 + TypeKw@0..4 "type" + WhiteSpace@4..5 " " + Ident@5..11 "Result" + GenericParamList@11..15 + Lt@11..12 "<" + GenericParam@12..13 + Ident@12..13 "T" + Comma@13..14 "," + WhiteSpace@14..15 " " + GenericParam@15..15 + Error@15..15 + Error@15..15 + Error@15..15 + Eq@15..16 "=" + WhiteSpace@16..17 " " + PathType@17..29 + Path@17..23 + PathSegment@17..23 + Ident@17..23 "Result" + GenericArgList@23..29 + Lt@23..24 "<" + GenericArg@24..25 + PathType@24..25 + Path@24..25 + PathSegment@24..25 + Ident@24..25 "T" + Comma@25..26 "," + WhiteSpace@26..27 " " + GenericArg@27..28 + PathType@27..28 + Path@27..28 + PathSegment@27..28 + Ident@27..28 "E" + Gt@28..29 ">" + diff --git a/crates/parser2/test_files/error_recovery/stmts/for_.fe b/crates/parser2/test_files/error_recovery/stmts/for_.fe new file mode 100644 index 0000000000..764b7df3ee --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/for_.fe @@ -0,0 +1,7 @@ +for i arr { } + +for in arr { } + +for @ in arr {} + +for @ in arr x y {} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/stmts/for_.snap b/crates/parser2/test_files/error_recovery/stmts/for_.snap new file mode 100644 index 0000000000..739df48836 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/for_.snap @@ -0,0 +1,76 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..67 + ForStmt@0..29 + ForKw@0..3 "for" + WhiteSpace@3..4 " " + PathPat@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "i" + WhiteSpace@5..6 " " + Error@6..9 + Ident@6..9 "arr" + WhiteSpace@9..10 " " + Error@10..13 + LBrace@10..11 "{" + WhiteSpace@11..12 " " + RBrace@12..13 "}" + Newline@13..15 "\n\n" + Error@15..25 + ForKw@15..18 "for" + WhiteSpace@18..19 " " + InKw@19..21 "in" + WhiteSpace@21..22 " " + Ident@22..25 "arr" + WhiteSpace@25..26 " " + BlockExpr@26..29 + LBrace@26..27 "{" + WhiteSpace@27..28 " " + RBrace@28..29 "}" + Newline@29..31 "\n\n" + ForStmt@31..46 + ForKw@31..34 "for" + WhiteSpace@34..35 " " + PathPat@35..36 + Path@35..36 + PathSegment@35..36 + Error@35..36 + InvalidToken@35..36 "@" + WhiteSpace@36..37 " " + InKw@37..39 "in" + WhiteSpace@39..40 " " + Path@40..43 + PathSegment@40..43 + Ident@40..43 "arr" + WhiteSpace@43..44 " " + BlockExpr@44..46 + LBrace@44..45 "{" + RBrace@45..46 "}" + Newline@46..48 "\n\n" + ForStmt@48..67 + ForKw@48..51 "for" + WhiteSpace@51..52 " " + PathPat@52..53 + Path@52..53 + PathSegment@52..53 + Error@52..53 + InvalidToken@52..53 "@" + WhiteSpace@53..54 " " + InKw@54..56 "in" + WhiteSpace@56..57 " " + Path@57..60 + PathSegment@57..60 + Ident@57..60 "arr" + WhiteSpace@60..61 " " + Error@61..64 + Ident@61..62 "x" + WhiteSpace@62..63 " " + Ident@63..64 "y" + WhiteSpace@64..65 " " + BlockExpr@65..67 + LBrace@65..66 "{" + RBrace@66..67 "}" + diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.fe b/crates/parser2/test_files/error_recovery/stmts/while_.fe new file mode 100644 index 0000000000..ef4ccc095c --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/while_.fe @@ -0,0 +1,7 @@ +while @ {} + +while true { + x + 1 +}} + +while true {} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.snap b/crates/parser2/test_files/error_recovery/stmts/while_.snap new file mode 100644 index 0000000000..1f705ce8e5 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/while_.snap @@ -0,0 +1,53 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..56 + WhileStmt@0..10 + WhileKw@0..5 "while" + WhiteSpace@5..6 " " + Error@6..7 + InvalidToken@6..7 "@" + WhiteSpace@7..8 " " + BlockExpr@8..10 + LBrace@8..9 "{" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + WhileStmt@12..36 + WhileKw@12..17 "while" + WhiteSpace@17..18 " " + LitExpr@18..22 + TrueKw@18..22 "true" + WhiteSpace@22..23 " " + BlockExpr@23..36 + LBrace@23..24 "{" + Newline@24..25 "\n" + WhiteSpace@25..29 " " + ExprStmt@29..34 + BinExpr@29..34 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "x" + WhiteSpace@30..31 " " + Plus@31..32 "+" + WhiteSpace@32..33 " " + LitExpr@33..34 + Int@33..34 "1" + Newline@34..35 "\n" + RBrace@35..36 "}" + ExprStmt@36..37 + Error@36..37 + RBrace@36..37 "}" + Newline@37..38 "\n" + WhiteSpace@38..42 " " + Newline@42..43 "\n" + WhileStmt@43..56 + WhileKw@43..48 "while" + WhiteSpace@48..49 " " + LitExpr@49..53 + TrueKw@49..53 "true" + WhiteSpace@53..54 " " + BlockExpr@54..56 + LBrace@54..55 "{" + RBrace@55..56 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.fe b/crates/parser2/test_files/syntax_node/exprs/binop.fe index c3c4c31831..b364268cf4 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.fe +++ b/crates/parser2/test_files/syntax_node/exprs/binop.fe @@ -2,6 +2,7 @@ 1 * 2 + 3 1 < 2 1 < (2 + 3) +1 < a(foo) 1 <= 2 1 >= 2 true || false && 1 < 2 diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap index 185668907b..34f7f736de 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.snap +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -2,7 +2,7 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..153 +Root@0..164 BinExpr@0..9 LitExpr@0..1 Int@0..1 "1" @@ -60,152 +60,170 @@ Root@0..153 Int@35..36 "3" RParen@36..37 ")" Newline@37..38 "\n" - BinExpr@38..44 + BinExpr@38..48 LitExpr@38..39 Int@38..39 "1" WhiteSpace@39..40 " " - LtEq@40..42 - Lt@40..41 "<" - Eq@41..42 "=" - WhiteSpace@42..43 " " - LitExpr@43..44 - Int@43..44 "2" - Newline@44..45 "\n" - BinExpr@45..51 - LitExpr@45..46 - Int@45..46 "1" - WhiteSpace@46..47 " " - GtEq@47..49 - Gt@47..48 ">" - Eq@48..49 "=" - WhiteSpace@49..50 " " - LitExpr@50..51 - Int@50..51 "2" - Newline@51..52 "\n" - BinExpr@52..74 - LitExpr@52..56 - TrueKw@52..56 "true" - WhiteSpace@56..57 " " - Pipe2@57..59 "||" - WhiteSpace@59..60 " " - BinExpr@60..74 - LitExpr@60..65 - FalseKw@60..65 "false" - WhiteSpace@65..66 " " - Amp2@66..68 "&&" - WhiteSpace@68..69 " " - BinExpr@69..74 - LitExpr@69..70 - Int@69..70 "1" - WhiteSpace@70..71 " " - Lt@71..72 "<" - WhiteSpace@72..73 " " - LitExpr@73..74 - Int@73..74 "2" - Newline@74..75 "\n" - BinExpr@75..107 - LitExpr@75..79 - TrueKw@75..79 "true" - WhiteSpace@79..80 " " - Pipe2@80..82 "||" - WhiteSpace@82..83 " " - BinExpr@83..107 - LitExpr@83..88 - FalseKw@83..88 "false" - WhiteSpace@88..89 " " - Amp2@89..91 "&&" - WhiteSpace@91..92 " " - BinExpr@92..107 - ParenExpr@92..99 - LParen@92..93 "(" - BinExpr@93..98 - LitExpr@93..94 - Int@93..94 "1" - WhiteSpace@94..95 " " - Lt@95..96 "<" - WhiteSpace@96..97 " " - LitExpr@97..98 - Int@97..98 "2" - RParen@98..99 ")" - WhiteSpace@99..100 " " - Gt@100..101 ">" - WhiteSpace@101..102 " " - BinExpr@102..107 - LitExpr@102..103 - Int@102..103 "3" - WhiteSpace@103..104 " " - Hat@104..105 "^" - WhiteSpace@105..106 " " - LitExpr@106..107 - Int@106..107 "2" - Newline@107..108 "\n" - BinExpr@108..119 - Path@108..109 - PathSegment@108..109 - Ident@108..109 "a" - WhiteSpace@109..110 " " - Star2@110..112 "**" - WhiteSpace@112..113 " " - BinExpr@113..119 - LitExpr@113..114 - Int@113..114 "2" - WhiteSpace@114..115 " " - Star2@115..117 "**" - WhiteSpace@117..118 " " - LitExpr@118..119 - Int@118..119 "3" - Newline@119..120 "\n" - BinExpr@120..129 - BinExpr@120..125 - LitExpr@120..121 - Int@120..121 "1" - WhiteSpace@121..122 " " - Minus@122..123 "-" - WhiteSpace@123..124 " " + Lt@40..41 "<" + WhiteSpace@41..42 " " + CallExpr@42..48 + Path@42..43 + PathSegment@42..43 + Ident@42..43 "a" + CallArgList@43..48 + LParen@43..44 "(" + CallArg@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "foo" + RParen@47..48 ")" + Newline@48..49 "\n" + BinExpr@49..55 + LitExpr@49..50 + Int@49..50 "1" + WhiteSpace@50..51 " " + LtEq@51..53 + Lt@51..52 "<" + Eq@52..53 "=" + WhiteSpace@53..54 " " + LitExpr@54..55 + Int@54..55 "2" + Newline@55..56 "\n" + BinExpr@56..62 + LitExpr@56..57 + Int@56..57 "1" + WhiteSpace@57..58 " " + GtEq@58..60 + Gt@58..59 ">" + Eq@59..60 "=" + WhiteSpace@60..61 " " + LitExpr@61..62 + Int@61..62 "2" + Newline@62..63 "\n" + BinExpr@63..85 + LitExpr@63..67 + TrueKw@63..67 "true" + WhiteSpace@67..68 " " + Pipe2@68..70 "||" + WhiteSpace@70..71 " " + BinExpr@71..85 + LitExpr@71..76 + FalseKw@71..76 "false" + WhiteSpace@76..77 " " + Amp2@77..79 "&&" + WhiteSpace@79..80 " " + BinExpr@80..85 + LitExpr@80..81 + Int@80..81 "1" + WhiteSpace@81..82 " " + Lt@82..83 "<" + WhiteSpace@83..84 " " + LitExpr@84..85 + Int@84..85 "2" + Newline@85..86 "\n" + BinExpr@86..118 + LitExpr@86..90 + TrueKw@86..90 "true" + WhiteSpace@90..91 " " + Pipe2@91..93 "||" + WhiteSpace@93..94 " " + BinExpr@94..118 + LitExpr@94..99 + FalseKw@94..99 "false" + WhiteSpace@99..100 " " + Amp2@100..102 "&&" + WhiteSpace@102..103 " " + BinExpr@103..118 + ParenExpr@103..110 + LParen@103..104 "(" + BinExpr@104..109 + LitExpr@104..105 + Int@104..105 "1" + WhiteSpace@105..106 " " + Lt@106..107 "<" + WhiteSpace@107..108 " " + LitExpr@108..109 + Int@108..109 "2" + RParen@109..110 ")" + WhiteSpace@110..111 " " + Gt@111..112 ">" + WhiteSpace@112..113 " " + BinExpr@113..118 + LitExpr@113..114 + Int@113..114 "3" + WhiteSpace@114..115 " " + Hat@115..116 "^" + WhiteSpace@116..117 " " + LitExpr@117..118 + Int@117..118 "2" + Newline@118..119 "\n" + BinExpr@119..130 + Path@119..120 + PathSegment@119..120 + Ident@119..120 "a" + WhiteSpace@120..121 " " + Star2@121..123 "**" + WhiteSpace@123..124 " " + BinExpr@124..130 LitExpr@124..125 Int@124..125 "2" - WhiteSpace@125..126 " " - Minus@126..127 "-" - WhiteSpace@127..128 " " - LitExpr@128..129 - Int@128..129 "3" - Newline@129..130 "\n" - BinExpr@130..141 - BinExpr@130..136 - LitExpr@130..131 - Int@130..131 "1" - WhiteSpace@131..132 " " - LShift@132..134 - Lt@132..133 "<" - Lt@133..134 "<" + WhiteSpace@125..126 " " + Star2@126..128 "**" + WhiteSpace@128..129 " " + LitExpr@129..130 + Int@129..130 "3" + Newline@130..131 "\n" + BinExpr@131..140 + BinExpr@131..136 + LitExpr@131..132 + Int@131..132 "1" + WhiteSpace@132..133 " " + Minus@133..134 "-" WhiteSpace@134..135 " " LitExpr@135..136 - Int@135..136 "3" + Int@135..136 "2" WhiteSpace@136..137 " " - RShift@137..139 - Gt@137..138 ">" - Gt@138..139 ">" - WhiteSpace@139..140 " " - LitExpr@140..141 - Int@140..141 "2" - Newline@141..142 "\n" - FieldExpr@142..147 - FieldExpr@142..145 - Path@142..143 - PathSegment@142..143 - Ident@142..143 "a" - Dot@143..144 "." - Ident@144..145 "b" - Dot@145..146 "." - Ident@146..147 "c" - Newline@147..148 "\n" - FieldExpr@148..153 - FieldExpr@148..151 - Path@148..149 - PathSegment@148..149 - Ident@148..149 "a" - Dot@149..150 "." - Int@150..151 "0" - Dot@151..152 "." - Ident@152..153 "c" + Minus@137..138 "-" + WhiteSpace@138..139 " " + LitExpr@139..140 + Int@139..140 "3" + Newline@140..141 "\n" + BinExpr@141..152 + BinExpr@141..147 + LitExpr@141..142 + Int@141..142 "1" + WhiteSpace@142..143 " " + LShift@143..145 + Lt@143..144 "<" + Lt@144..145 "<" + WhiteSpace@145..146 " " + LitExpr@146..147 + Int@146..147 "3" + WhiteSpace@147..148 " " + RShift@148..150 + Gt@148..149 ">" + Gt@149..150 ">" + WhiteSpace@150..151 " " + LitExpr@151..152 + Int@151..152 "2" + Newline@152..153 "\n" + FieldExpr@153..158 + FieldExpr@153..156 + Path@153..154 + PathSegment@153..154 + Ident@153..154 "a" + Dot@154..155 "." + Ident@155..156 "b" + Dot@156..157 "." + Ident@157..158 "c" + Newline@158..159 "\n" + FieldExpr@159..164 + FieldExpr@159..162 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "a" + Dot@160..161 "." + Int@161..162 "0" + Dot@162..163 "." + Ident@163..164 "c" diff --git a/crates/parser2/test_files/syntax_node/exprs/call.fe b/crates/parser2/test_files/syntax_node/exprs/call.fe index 3b0fa29d55..c703e6fe57 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.fe +++ b/crates/parser2/test_files/syntax_node/exprs/call.fe @@ -6,4 +6,7 @@ foo(x: 1, 2, z: 3) foo(1, y: 2, z: 3) foo(val1: 2, val2: "String") -foo<[u32; 1], {3 + 4}>(x: 1, y: 2) \ No newline at end of file +foo<[u32; 1], {3 + 4}>(x: 1, y: 2) + +// Ths should be parsed as `(foo(1))`, not a tuple expression. +(foo < i32, (u32) > (1)) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap index 0fcbe962b9..d01e37b579 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.snap +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -2,7 +2,7 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..169 +Root@0..270 CallExpr@0..5 Path@0..3 PathSegment@0..3 @@ -211,4 +211,42 @@ Root@0..169 LitExpr@167..168 Int@167..168 "2" RParen@168..169 ")" + Newline@169..171 "\n\n" + Comment@171..245 "// Ths should be pars ..." + Newline@245..246 "\n" + ParenExpr@246..270 + LParen@246..247 "(" + CallExpr@247..269 + Path@247..250 + PathSegment@247..250 + Ident@247..250 "foo" + WhiteSpace@250..251 " " + GenericArgList@251..265 + Lt@251..252 "<" + WhiteSpace@252..253 " " + GenericArg@253..256 + PathType@253..256 + Path@253..256 + PathSegment@253..256 + Ident@253..256 "i32" + Comma@256..257 "," + WhiteSpace@257..258 " " + GenericArg@258..263 + TupleType@258..263 + LParen@258..259 "(" + PathType@259..262 + Path@259..262 + PathSegment@259..262 + Ident@259..262 "u32" + RParen@262..263 ")" + WhiteSpace@263..264 " " + Gt@264..265 ">" + WhiteSpace@265..266 " " + CallArgList@266..269 + LParen@266..267 "(" + CallArg@267..268 + LitExpr@267..268 + Int@267..268 "1" + RParen@268..269 ")" + RParen@269..270 ")" diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.fe b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe new file mode 100644 index 0000000000..75d6a3e907 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe @@ -0,0 +1,2 @@ +Struct {x, y} +Struct {x: 1 + 2} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap new file mode 100644 index 0000000000..ee44c822af --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -0,0 +1,41 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..31 + RecordInitExpr@0..13 + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Struct" + WhiteSpace@6..7 " " + RecordFieldList@7..13 + LBrace@7..8 "{" + RecordField@8..9 + Ident@8..9 "x" + Comma@9..10 "," + WhiteSpace@10..11 " " + RecordField@11..12 + Ident@11..12 "y" + RBrace@12..13 "}" + Newline@13..14 "\n" + RecordInitExpr@14..31 + Path@14..20 + PathSegment@14..20 + Ident@14..20 "Struct" + WhiteSpace@20..21 " " + RecordFieldList@21..31 + LBrace@21..22 "{" + RecordField@22..30 + Ident@22..23 "x" + Colon@23..24 ":" + WhiteSpace@24..25 " " + BinExpr@25..30 + LitExpr@25..26 + Int@25..26 "1" + WhiteSpace@26..27 " " + Plus@27..28 "+" + WhiteSpace@28..29 " " + LitExpr@29..30 + Int@29..30 "2" + RBrace@30..31 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/func.fe b/crates/parser2/test_files/syntax_node/items/func.fe index 9e61b76925..c8d6f445f6 100644 --- a/crates/parser2/test_files/syntax_node/items/func.fe +++ b/crates/parser2/test_files/syntax_node/items/func.fe @@ -18,4 +18,4 @@ fn generics1(t: T, u: Option) -> T t } -fn decl(t: MyStruct) -> Result \ No newline at end of file +fn decl(t: MyStruct) -> Result {} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 77a12ff489..60a23426c5 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..358 - ItemList@0..358 +Root@0..361 + ItemList@0..361 Fn@0..30 ItemModifier@0..3 PubKw@0..3 "pub" @@ -268,7 +268,7 @@ Root@0..358 Newline@304..305 "\n" RBrace@305..306 "}" Newline@306..308 "\n\n" - Fn@308..358 + Fn@308..361 FnKw@308..310 "fn" WhiteSpace@310..311 " " Ident@311..315 "decl" @@ -329,4 +329,8 @@ Root@0..358 PathSegment@354..357 Ident@354..357 "Err" Gt@357..358 ">" + WhiteSpace@358..359 " " + BlockExpr@359..361 + LBrace@359..360 "{" + RBrace@360..361 "}" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.fe b/crates/parser2/test_files/syntax_node/items/impl_trait.fe index e3352c6cc7..1010c81148 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.fe +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.fe @@ -7,6 +7,13 @@ impl Trait for F { impl Trait for F where T: Clone U: Bar +{ + fn foo>(t: T) { + do_something(t) + } +} + +impl Trait for F { fn foo>(t: T) { do_something(t) diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index e307be7404..c6210832ad 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -2,8 +2,8 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..196 - ItemList@0..196 +Root@0..317 + ItemList@0..317 ImplTrait@0..64 ImplKw@0..4 "impl" WhiteSpace@4..5 " " @@ -198,4 +198,134 @@ Root@0..196 RBrace@193..194 "}" Newline@194..195 "\n" RBrace@195..196 "}" + Newline@196..198 "\n\n" + ImplTrait@198..317 + ImplKw@198..202 "impl" + WhiteSpace@202..203 " " + PathType@203..226 + Path@203..208 + PathSegment@203..208 + Ident@203..208 "Trait" + GenericArgList@208..226 + Lt@208..209 "<" + GenericArg@209..217 + PathType@209..210 + Path@209..210 + PathSegment@209..210 + Ident@209..210 "T" + TypeBoundList@210..217 + Colon@210..211 ":" + WhiteSpace@211..212 " " + TypeBound@212..217 + Path@212..217 + PathSegment@212..217 + Ident@212..217 "Clone" + Comma@217..218 "," + WhiteSpace@218..219 " " + GenericArg@219..225 + PathType@219..220 + Path@219..220 + PathSegment@219..220 + Ident@219..220 "U" + TypeBoundList@220..225 + Colon@220..221 ":" + WhiteSpace@221..222 " " + TypeBound@222..225 + Path@222..225 + PathSegment@222..225 + Ident@222..225 "Bar" + Gt@225..226 ">" + WhiteSpace@226..227 " " + ForKw@227..230 "for" + WhiteSpace@230..231 " " + PathType@231..240 + Path@231..232 + PathSegment@231..232 + Ident@231..232 "F" + GenericArgList@232..240 + Lt@232..233 "<" + GenericArg@233..239 + PathType@233..234 + Path@233..234 + PathSegment@233..234 + Ident@233..234 "U" + TypeBoundList@234..239 + Colon@234..235 ":" + WhiteSpace@235..236 " " + TypeBound@236..239 + Path@236..239 + PathSegment@236..239 + Ident@236..239 "Bar" + Gt@239..240 ">" + WhiteSpace@240..241 " " + Newline@241..242 "\n" + ImplTraitItemList@242..317 + LBrace@242..243 "{" + Newline@243..244 "\n" + WhiteSpace@244..248 " " + Fn@248..315 + FnKw@248..250 "fn" + WhiteSpace@250..251 " " + Ident@251..254 "foo" + GenericParamList@254..272 + Lt@254..255 "<" + GenericParam@255..271 + Ident@255..256 "T" + TypeBoundList@256..271 + Colon@256..257 ":" + WhiteSpace@257..258 " " + TypeBound@258..271 + Path@258..268 + PathSegment@258..268 + Ident@258..268 "OtherTrait" + GenericArgList@268..271 + Lt@268..269 "<" + GenericArg@269..270 + PathType@269..270 + Path@269..270 + PathSegment@269..270 + Ident@269..270 "U" + Gt@270..271 ">" + Gt@271..272 ">" + FnArgList@272..278 + LParen@272..273 "(" + FnArg@273..277 + Ident@273..274 "t" + Colon@274..275 ":" + WhiteSpace@275..276 " " + PathType@276..277 + Path@276..277 + PathSegment@276..277 + Ident@276..277 "T" + RParen@277..278 ")" + WhiteSpace@278..279 " " + BlockExpr@279..315 + LBrace@279..280 "{" + Newline@280..281 "\n" + WhiteSpace@281..289 " " + ExprStmt@289..309 + CallExpr@289..309 + Path@289..301 + PathSegment@289..301 + Ident@289..301 "do_something" + GenericArgList@301..306 + Lt@301..302 "<" + GenericArg@302..305 + PathType@302..305 + Path@302..305 + PathSegment@302..305 + Ident@302..305 "i32" + Gt@305..306 ">" + CallArgList@306..309 + LParen@306..307 "(" + CallArg@307..308 + Path@307..308 + PathSegment@307..308 + Ident@307..308 "t" + RParen@308..309 ")" + Newline@309..310 "\n" + WhiteSpace@310..314 " " + RBrace@314..315 "}" + Newline@315..316 "\n" + RBrace@316..317 "}" diff --git a/crates/parser2/tests/error_recovery.rs b/crates/parser2/tests/error_recovery.rs new file mode 100644 index 0000000000..1154c0a2ad --- /dev/null +++ b/crates/parser2/tests/error_recovery.rs @@ -0,0 +1,64 @@ +use fe_parser2::{ + parser::{expr::parse_expr, item::ItemListScope, stmt::parse_stmt}, + syntax_node::SyntaxNode, +}; +mod test_runner; +use test_runner::*; +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/error_recovery/items", + "parser2/test_files/error_recovery/items", + test_item_list +} +fn test_item_list(input: &str) -> SyntaxNode { + let runner = TestRunner::new( + |parser| { + parser.parse(ItemListScope::default(), None); + }, + false, + ); + runner.run(input) +} + +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/error_recovery/exprs", + "parser2/test_files/error_recovery/exprs", + test_expr +} +fn test_expr(input: &str) -> SyntaxNode { + let runner = TestRunner::new( + |parser| { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_expr(parser); + bump_newlines(parser); + } + }, + false, + ); + runner.run(input) +} + +fe_compiler_test_utils::build_debug_snap_tests! { + "parser2/test_files/error_recovery/stmts", + "parser2/test_files/error_recovery/stmts", + test_stmt +} +fn test_stmt(input: &str) -> SyntaxNode { + let runner = TestRunner::new( + |parser| { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_stmt(parser, None); + bump_newlines(parser); + } + }, + false, + ); + runner.run(input) +} diff --git a/crates/parser2/tests/main.rs b/crates/parser2/tests/main.rs deleted file mode 100644 index f893f25815..0000000000 --- a/crates/parser2/tests/main.rs +++ /dev/null @@ -1 +0,0 @@ -mod syntax_node; diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 7555d19c65..8c645760b5 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -65,7 +65,6 @@ fe_compiler_test_utils::build_debug_snap_tests! { "parser2/test_files/syntax_node/stmts", test_stmt } - fn test_stmt(input: &str) -> SyntaxNode { let runner = TestRunner::new( |parser| { diff --git a/crates/parser2/tests/test_runner.rs b/crates/parser2/tests/test_runner.rs new file mode 100644 index 0000000000..88cac2888c --- /dev/null +++ b/crates/parser2/tests/test_runner.rs @@ -0,0 +1,54 @@ +#![allow(unused)] + +use fe_parser2::{ + lexer, + parser::{Parser, RootScope}, + syntax_node::SyntaxNode, + SyntaxKind, +}; + +pub struct TestRunner +where + F: Fn(&mut Parser), +{ + f: F, + should_success: bool, +} + +impl TestRunner +where + F: Fn(&mut Parser), +{ + pub fn new(f: F, should_success: bool) -> Self { + Self { f, should_success } + } + + pub fn run(&self, input: &str) -> SyntaxNode { + let lexer = lexer::Lexer::new(input); + let mut parser = Parser::new(lexer); + + let checkpoint = parser.enter(RootScope::default(), None); + (self.f)(&mut parser); + parser.leave(checkpoint); + + let (cst, errors) = parser.finish(); + + for error in &errors { + println!("{}@{:?}", error.msg, error.range); + } + if self.should_success { + assert! {errors.is_empty()} + } else { + assert! {!errors.is_empty()} + } + assert!(input == cst.to_string()); + + cst + } +} + +pub fn bump_newlines(parser: &mut Parser) { + while parser.current_kind() == Some(SyntaxKind::Newline) { + parser.bump(); + } +} From 8f9698b4f1b28852b71113ec4562dfa56e5199cf Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 27 Jan 2023 20:02:06 +0100 Subject: [PATCH 045/678] Make clippy happy --- crates/analyzer/src/context.rs | 3 +- crates/analyzer/src/errors.rs | 43 +++++++++++--------- crates/analyzer/src/namespace/types.rs | 11 ++--- crates/analyzer/src/traversal/expressions.rs | 14 ++++--- crates/analyzer/src/traversal/types.rs | 5 ++- crates/codegen/src/yul/runtime/revert.rs | 3 +- crates/common/src/db.rs | 3 +- crates/common/src/numeric.rs | 3 +- crates/mir/src/db/queries/function.rs | 4 +- crates/mir/src/lower/function.rs | 4 +- crates/parser/src/grammar/types.rs | 3 +- crates/parser/src/lib.rs | 4 +- crates/parser2/src/parser/expr.rs | 8 ++-- crates/parser2/src/parser/item.rs | 2 +- crates/tests/src/differential.rs | 3 +- 15 files changed, 64 insertions(+), 49 deletions(-) diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index ef72c8d125..235b0db777 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -54,7 +54,8 @@ pub trait AnalyzerContext { fn resolve_name(&self, name: &str, span: Span) -> Result, IncompleteItem>; /// Resolves the given path and registers all errors fn resolve_path(&self, path: &ast::Path, span: Span) -> Result; - /// Resolves the given path only if it is visible. Does not register any errors + /// Resolves the given path only if it is visible. Does not register any + /// errors fn resolve_visible_path(&self, path: &ast::Path) -> Option; /// Resolves the given path. Does not register any errors fn resolve_any_path(&self, path: &ast::Path) -> Option; diff --git a/crates/analyzer/src/errors.rs b/crates/analyzer/src/errors.rs index 480b509da3..baf4712f7a 100644 --- a/crates/analyzer/src/errors.rs +++ b/crates/analyzer/src/errors.rs @@ -11,18 +11,21 @@ use std::fmt::Display; /// in [`crate::namespace::types`] is sometimes represented as a /// `Result`. /// -/// If, for example, a function parameter has an undefined type, we emit a [`Diagnostic`] message, -/// give that parameter a "type" of `Err(TypeError)`, and carry on. If/when that parameter is -/// used in the function body, we assume that a diagnostic message about the undefined type -/// has already been emitted, and halt the analysis of the function body. +/// If, for example, a function parameter has an undefined type, we emit a +/// [`Diagnostic`] message, give that parameter a "type" of `Err(TypeError)`, +/// and carry on. If/when that parameter is used in the function body, we assume +/// that a diagnostic message about the undefined type has already been emitted, +/// and halt the analysis of the function body. /// -/// To ensure that that assumption is sound, a diagnostic *must* be emitted before creating -/// a `TypeError`. So that the rust compiler can help us enforce this rule, a `TypeError` -/// cannot be constructed without providing a [`DiagnosticVoucher`]. A voucher can be obtained -/// by calling an error function on an [`AnalyzerContext`](crate::context::AnalyzerContext). +/// To ensure that that assumption is sound, a diagnostic *must* be emitted +/// before creating a `TypeError`. So that the rust compiler can help us enforce +/// this rule, a `TypeError` cannot be constructed without providing a +/// [`DiagnosticVoucher`]. A voucher can be obtained by calling an error +/// function on an [`AnalyzerContext`](crate::context::AnalyzerContext). /// Please don't try to work around this restriction. /// -/// Example: `TypeError::new(context.error("something is wrong", some_span, "this thing"))` +/// Example: `TypeError::new(context.error("something is wrong", some_span, +/// "this thing"))` #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeError(DiagnosticVoucher); impl TypeError { @@ -44,15 +47,16 @@ impl From for TypeError { } } -/// Error to be returned when otherwise no meaningful information can be returned. -/// Can't be created unless a diagnostic has been emitted, and thus a [`DiagnosticVoucher`] -/// has been obtained. (See comment on [`TypeError`]) +/// Error to be returned when otherwise no meaningful information can be +/// returned. Can't be created unless a diagnostic has been emitted, and thus a +/// [`DiagnosticVoucher`] has been obtained. (See comment on [`TypeError`]) #[derive(Debug)] pub struct FatalError(DiagnosticVoucher); impl FatalError { /// Create a `FatalError` instance, given a "voucher" - /// obtained by emitting an error via an [`AnalyzerContext`](crate::context::AnalyzerContext). + /// obtained by emitting an error via an + /// [`AnalyzerContext`](crate::context::AnalyzerContext). pub fn new(voucher: DiagnosticVoucher) -> Self { Self(voucher) } @@ -77,8 +81,8 @@ impl From for FatalError { /// 2. arithmetic overflow occurred during evaluation /// 3. zero division is detected during evaluation /// -/// Can't be created unless a diagnostic has been emitted, and thus a [`DiagnosticVoucher`] -/// has been obtained. (See comment on [`TypeError`]) +/// Can't be created unless a diagnostic has been emitted, and thus a +/// [`DiagnosticVoucher`] has been obtained. (See comment on [`TypeError`]) /// /// NOTE: `Clone` is required because these are stored in a salsa db. /// Please don't clone these manually. @@ -109,10 +113,11 @@ impl From for ConstEvalError { } } -/// Error returned by `ModuleId::resolve_name` if the name is not found, and parsing of the module -/// failed. In this case, emitting an error message about failure to resolve the name might be misleading, -/// because the file may in fact contain an item with the given name, somewhere after the syntax error that caused -/// parsing to fail. +/// Error returned by `ModuleId::resolve_name` if the name is not found, and +/// parsing of the module failed. In this case, emitting an error message about +/// failure to resolve the name might be misleading, because the file may in +/// fact contain an item with the given name, somewhere after the syntax error +/// that caused parsing to fail. #[derive(Debug)] pub struct IncompleteItem(DiagnosticVoucher); impl IncompleteItem { diff --git a/crates/analyzer/src/namespace/types.rs b/crates/analyzer/src/namespace/types.rs index e2a8808407..abdef2a3de 100644 --- a/crates/analyzer/src/namespace/types.rs +++ b/crates/analyzer/src/namespace/types.rs @@ -182,9 +182,10 @@ impl TypeId { db.impl_for(*self, trait_) } - /// Looks up all possible candidates of the given function name that are implemented via traits. - /// Groups results in two lists, the first contains all theoretical possible candidates and - /// the second contains only those that are actually callable because the trait is in scope. + /// Looks up all possible candidates of the given function name that are + /// implemented via traits. Groups results in two lists, the first + /// contains all theoretical possible candidates and the second contains + /// only those that are actually callable because the trait is in scope. pub fn trait_function_candidates( &self, context: &mut dyn AnalyzerContext, @@ -215,8 +216,8 @@ impl TypeId { (candidates, in_scope_candidates) } - /// Signature for the function with the given name defined directly on the type. - /// Does not consider trait impls. + /// Signature for the function with the given name defined directly on the + /// type. Does not consider trait impls. pub fn function_sig(&self, db: &dyn AnalyzerDb, name: &str) -> Option { match self.typ(db) { Type::SPtr(inner) => inner.function_sig(db, name), diff --git a/crates/analyzer/src/traversal/expressions.rs b/crates/analyzer/src/traversal/expressions.rs index 3199eb5a81..078e605505 100644 --- a/crates/analyzer/src/traversal/expressions.rs +++ b/crates/analyzer/src/traversal/expressions.rs @@ -981,8 +981,8 @@ fn expr_call_path( validate_has_no_conflicting_trait_in_scope(context, &named_thing, path, func)?; expr_call_named_thing(context, named_thing, func, generic_args, args) } - // If we we can't resolve a call to a path e.g. `foo::Bar::do_thing()` there is a chance that `do_thing` - // still exists as as a trait associated function for `foo::Bar`. + // If we we can't resolve a call to a path e.g. `foo::Bar::do_thing()` there is a chance + // that `do_thing` still exists as as a trait associated function for `foo::Bar`. None => expr_call_trait_associated_function(context, path, func, generic_args, args), } } @@ -1079,7 +1079,8 @@ fn expr_call_trait_associated_function( .into(), ], ); - // We arbitrarily carry on with the first candidate since the error doesn't need to be fatal + // We arbitrarily carry on with the first candidate since the error doesn't need + // to be fatal let (fun, _) = in_scope_candidates[0]; return expr_call_pure(context, fun, func.span, generic_args, args); } else if in_scope_candidates.is_empty() && !candidates.is_empty() { @@ -1094,7 +1095,8 @@ fn expr_call_trait_associated_function( }).collect(), vec!["Hint: Bring one of these candidates in scope via `use module_name::trait_name`".into()], ); - // We arbitrarily carry on with an applicable candidate since the error doesn't need to be fatal + // We arbitrarily carry on with an applicable candidate since the error doesn't + // need to be fatal let (fun, _) = candidates[0]; return expr_call_pure(context, fun, func.span, generic_args, args); } else if in_scope_candidates.len() == 1 { @@ -1103,8 +1105,8 @@ fn expr_call_trait_associated_function( } } - // At this point, we will have an error so we run `resolve_path` to register any errors that we - // did not report yet + // At this point, we will have an error so we run `resolve_path` to register any + // errors that we did not report yet context.resolve_path(path, func.span)?; Err(FatalError::new(context.error( diff --git a/crates/analyzer/src/traversal/types.rs b/crates/analyzer/src/traversal/types.rs index 0ef4172d39..23f8e0502c 100644 --- a/crates/analyzer/src/traversal/types.rs +++ b/crates/analyzer/src/traversal/types.rs @@ -18,8 +18,9 @@ use fe_parser::ast; use fe_parser::node::{Node, Span}; use std::cmp::Ordering; -/// Try to perform an explicit type cast, eg `u256(my_address)` or `address(my_contract)`. -/// Returns nothing. Emits an error if the cast fails; explicit cast failures are not fatal. +/// Try to perform an explicit type cast, eg `u256(my_address)` or +/// `address(my_contract)`. Returns nothing. Emits an error if the cast fails; +/// explicit cast failures are not fatal. pub fn try_cast_type( context: &mut dyn AnalyzerContext, from: TypeId, diff --git a/crates/codegen/src/yul/runtime/revert.rs b/crates/codegen/src/yul/runtime/revert.rs index e884755b8e..1f8fa9eb7b 100644 --- a/crates/codegen/src/yul/runtime/revert.rs +++ b/crates/codegen/src/yul/runtime/revert.rs @@ -76,7 +76,8 @@ fn type_signature_for_revert(db: &dyn CodegenDb, name: &str, ty: TypeId) -> yul: } }; - // selector and state mutability is independent we can set has_self and has_ctx any value. + // selector and state mutability is independent we can set has_self and has_ctx + // any value. let selector = AbiFunction::new( AbiFunctionType::Function, name.to_string(), diff --git a/crates/common/src/db.rs b/crates/common/src/db.rs index 79b6faf75e..0b06596423 100644 --- a/crates/common/src/db.rs +++ b/crates/common/src/db.rs @@ -17,7 +17,8 @@ pub trait SourceDb { #[salsa::interned] fn intern_file(&self, file: File) -> SourceFileId; - /// Set with `fn set_file_content(&mut self, file: SourceFileId, content: Rc) + /// Set with `fn set_file_content(&mut self, file: SourceFileId, content: + /// Rc) #[salsa::input] fn file_content(&self, file: SourceFileId) -> Rc; diff --git a/crates/common/src/numeric.rs b/crates/common/src/numeric.rs index 77e83a4801..9cded623fb 100644 --- a/crates/common/src/numeric.rs +++ b/crates/common/src/numeric.rs @@ -62,7 +62,8 @@ impl<'a> Literal<'a> { } } -// Converts any positive or negative `BigInt` into a hex str using 2s complement representation for negative values. +// Converts any positive or negative `BigInt` into a hex str using 2s complement +// representation for negative values. pub fn to_hex_str(val: &BigInt) -> String { format!( "0x{}", diff --git a/crates/mir/src/db/queries/function.rs b/crates/mir/src/db/queries/function.rs index b6e7461425..27d69e2dea 100644 --- a/crates/mir/src/db/queries/function.rs +++ b/crates/mir/src/db/queries/function.rs @@ -28,8 +28,8 @@ pub fn mir_lowered_monomorphized_func_signature( lower_monomorphized_func_signature(db, analyzer_func, resolved_generics) } -/// Generate MIR function and monomorphize generic parameters as if they were called with unit type -/// NOTE: THIS SHOULD ONLY BE USED IN TEST CODE +/// Generate MIR function and monomorphize generic parameters as if they were +/// called with unit type NOTE: THIS SHOULD ONLY BE USED IN TEST CODE pub fn mir_lowered_pseudo_monomorphized_func_signature( db: &dyn MirDb, analyzer_func: analyzer_items::FunctionId, diff --git a/crates/mir/src/lower/function.rs b/crates/mir/src/lower/function.rs index dacfe593f2..15dba949e0 100644 --- a/crates/mir/src/lower/function.rs +++ b/crates/mir/src/lower/function.rs @@ -264,8 +264,8 @@ impl<'db, 'a> BodyLowerHelper<'db, 'a> { let value = self.declare_var(name, ty, var.into()); if let Some(init) = init { let (init, _init_ty) = self.lower_expr(init); - // debug_assert_eq!(ty.deref(self.db), init_ty, "vardecl init type mismatch: {} != {}", - // ty.as_string(self.db), + // debug_assert_eq!(ty.deref(self.db), init_ty, "vardecl init type mismatch: {} + // != {}", ty.as_string(self.db), // init_ty.as_string(self.db)); self.builder.map_result(init, value.into()); } diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index 8953c79fce..54adf78a28 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -34,7 +34,8 @@ pub fn parse_struct_def( let attributes = if let Some(attr) = par.optional(TokenKind::Hash) { let attr_name = par.expect_with_notes(TokenKind::Name, "failed to parse attribute definition", |_| vec!["Note: an attribute name must start with a letter or underscore, and contain letters, numbers, or underscores".into()])?; - // This hints to a future where we would support multiple attributes per field. For now we don't need it. + // This hints to a future where we would support multiple attributes per field. + // For now we don't need it. vec![Node::new(attr_name.text.into(), attr.span + attr_name.span)] } else { vec![] diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index fbd217b1b5..a11e77aed8 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -18,8 +18,8 @@ use fe_common::files::SourceFileId; /// /// If a fatal parse error occurred, the last element of the `Module::body` will /// be a `ModuleStmt::ParseError`. The parser currently has very limited ability -/// to recover from syntax errors; this is just a first meager attempt at returning a -/// useful AST when there are syntax errors. +/// to recover from syntax errors; this is just a first meager attempt at +/// returning a useful AST when there are syntax errors. /// /// A [`SourceFileId`] is required to associate any diagnostics with the /// underlying file. diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 7bbf62c3ac..797f57fac2 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -393,10 +393,10 @@ fn is_method_call(parser: &mut Parser) -> bool { return false; } - if parser.current_kind() == Some(SyntaxKind::Lt) { - if !parser.parse(GenericArgListScope::default(), None).0 { - return false; - } + if parser.current_kind() == Some(SyntaxKind::Lt) + && !parser.parse(GenericArgListScope::default(), None).0 + { + return false; } if parser.current_kind() != Some(SyntaxKind::LParen) { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index d182d588c8..d1506f25b3 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -94,7 +94,7 @@ impl super::Parse for ItemListScope { parser.parse(TypeAliasScope::default(), checkpoint); } tok => parser - .error_and_recover(&format! {"expected item: but got {:?}", tok}, checkpoint), + .error_and_recover(&format! {"expected item: but got {tok:?}"}, checkpoint), } parser.set_newline_as_trivia(false); diff --git a/crates/tests/src/differential.rs b/crates/tests/src/differential.rs index d26f963454..e7c3864fdc 100644 --- a/crates/tests/src/differential.rs +++ b/crates/tests/src/differential.rs @@ -1,4 +1,5 @@ -//! Tests that check for differences between Solidity and Fe implementations of similar contracts +//! Tests that check for differences between Solidity and Fe implementations of +//! similar contracts #![cfg(feature = "solc-backend")] use proptest::prelude::*; From 1be3243bf2ec71e92e20643cffdb99c83c4debe2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 27 Jan 2023 20:42:38 +0100 Subject: [PATCH 046/678] Modify `assert_harness_gas_report` macro so it can work with `rstest` correctly --- Cargo.lock | 1 + .../fe_compiler_tests__features__associated_fns.snap | 1 - .../fe_compiler_tests__features__aug_assign-10.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-11.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-12.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-13.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-2.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-3.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-4.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-5.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-6.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-7.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-8.snap | 6 ------ .../fe_compiler_tests__features__aug_assign-9.snap | 6 ------ .../fe_compiler_tests__features__aug_assign.snap | 6 ------ .../snapshots/fe_compiler_tests__features__call_fn.snap | 1 - .../fe_compiler_tests__features__const_local.snap | 1 - .../fe_compiler_tests__features__constructor.snap | 1 - .../fe_compiler_tests__features__create2_contract.snap | 1 - .../fe_compiler_tests__features__create_contract.snap | 1 - ...piler_tests__features__create_contract_from_init.snap | 1 - ...er_tests__features__ctx_param_internal_func_call.snap | 1 - .../fe_compiler_tests__features__ctx_param_simple.snap | 1 - .../fe_compiler_tests__features__execution_tests-2.snap | 6 ------ .../fe_compiler_tests__features__execution_tests-3.snap | 6 ------ .../fe_compiler_tests__features__execution_tests-4.snap | 6 ------ .../fe_compiler_tests__features__execution_tests-5.snap | 6 ------ .../fe_compiler_tests__features__execution_tests.snap | 6 ------ .../fe_compiler_tests__features__if_statement_2.snap | 1 - .../fe_compiler_tests__features__if_statement_a.snap | 1 - .../fe_compiler_tests__features__if_statement_b.snap | 1 - ...iler_tests__features__if_statement_test_from_sto.snap | 1 - ...s__features__if_statement_with_block_declaration.snap | 1 - ...e_compiler_tests__features__int_literal_coercion.snap | 1 - .../snapshots/fe_compiler_tests__features__map-2.snap | 9 --------- .../snapshots/fe_compiler_tests__features__map-3.snap | 9 --------- .../snapshots/fe_compiler_tests__features__map-4.snap | 9 --------- .../snapshots/fe_compiler_tests__features__map-5.snap | 9 --------- .../snapshots/fe_compiler_tests__features__map-6.snap | 9 --------- .../src/snapshots/fe_compiler_tests__features__map.snap | 9 --------- .../fe_compiler_tests__features__map_tuple.snap | 1 - .../fe_compiler_tests__features__multi_param.snap | 1 - .../fe_compiler_tests__features__numeric_casts.snap | 1 - .../fe_compiler_tests__features__numeric_sizes.snap | 1 - .../snapshots/fe_compiler_tests__features__pure_fn.snap | 1 - ..._compiler_tests__features__pure_fn_internal_call.snap | 1 - .../fe_compiler_tests__features__radix_binary.snap | 1 - .../fe_compiler_tests__features__radix_hex.snap | 1 - .../fe_compiler_tests__features__radix_octal.snap | 1 - ...e_compiler_tests__features__return_addition_u256.snap | 1 - .../fe_compiler_tests__features__return_array.snap | 1 - ...compiler_tests__features__return_bitwiseand_u256.snap | 1 - ..._compiler_tests__features__return_bitwiseor_u256.snap | 1 - ...r_tests__features__return_bitwiseshl_i64_coerced.snap | 1 - ...mpiler_tests__features__return_bitwiseshl_u256_a.snap | 1 - ...mpiler_tests__features__return_bitwiseshl_u256_b.snap | 1 - ...mpiler_tests__features__return_bitwiseshr_i256_a.snap | 1 - ...mpiler_tests__features__return_bitwiseshr_i256_b.snap | 1 - ...mpiler_tests__features__return_bitwiseshr_u256_c.snap | 1 - ...mpiler_tests__features__return_bitwiseshr_u256_d.snap | 1 - ...compiler_tests__features__return_bitwisexor_u256.snap | 1 - .../fe_compiler_tests__features__return_bool_false.snap | 1 - .../fe_compiler_tests__features__return_bool_true.snap | 1 - ..._compiler_tests__features__return_complex_struct.snap | 1 - ...e_compiler_tests__features__return_division_u256.snap | 1 - .../fe_compiler_tests__features__return_eq_u256_a.snap | 1 - .../fe_compiler_tests__features__return_eq_u256_b.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_a.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_b.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_c.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_d.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_e.snap | 1 - .../fe_compiler_tests__features__return_gt_i256_f.snap | 1 - .../fe_compiler_tests__features__return_gt_u256_a.snap | 1 - .../fe_compiler_tests__features__return_gt_u256_b.snap | 1 - .../fe_compiler_tests__features__return_gt_u256_c.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_a.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_b.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_c.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_d.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_e.snap | 1 - .../fe_compiler_tests__features__return_gte_i256_f.snap | 1 - .../fe_compiler_tests__features__return_gte_u256_a.snap | 1 - .../fe_compiler_tests__features__return_gte_u256_b.snap | 1 - .../fe_compiler_tests__features__return_gte_u256_c.snap | 1 - .../fe_compiler_tests__features__return_i128_cast.snap | 1 - .../fe_compiler_tests__features__return_i256.snap | 1 - ...e_compiler_tests__features__return_identity_u256.snap | 1 - .../fe_compiler_tests__features__return_invert_i256.snap | 1 - .../fe_compiler_tests__features__return_invert_u256.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_a.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_b.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_c.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_d.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_e.snap | 1 - .../fe_compiler_tests__features__return_lt_i256_f.snap | 1 - .../fe_compiler_tests__features__return_lt_u256_a.snap | 1 - .../fe_compiler_tests__features__return_lt_u256_b.snap | 1 - .../fe_compiler_tests__features__return_lt_u256_c.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_a.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_b.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_c.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_d.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_e.snap | 1 - .../fe_compiler_tests__features__return_lte_i256_f.snap | 1 - .../fe_compiler_tests__features__return_lte_u256.snap | 1 - .../fe_compiler_tests__features__return_lte_u256_a.snap | 1 - .../fe_compiler_tests__features__return_lte_u256_b.snap | 1 - .../fe_compiler_tests__features__return_mod_i256_a.snap | 1 - .../fe_compiler_tests__features__return_mod_i256_b.snap | 1 - .../fe_compiler_tests__features__return_mod_i256_c.snap | 1 - .../fe_compiler_tests__features__return_mod_u256_a.snap | 1 - .../fe_compiler_tests__features__return_mod_u256_b.snap | 1 - .../fe_compiler_tests__features__return_mod_u256_c.snap | 1 - .../fe_compiler_tests__features__return_msg_sig.snap | 1 - ...iler_tests__features__return_multiplication_u256.snap | 1 - .../fe_compiler_tests__features__return_noteq_u256a.snap | 1 - .../fe_compiler_tests__features__return_noteq_u256b.snap | 1 - .../fe_compiler_tests__features__return_pow_i256.snap | 1 - .../fe_compiler_tests__features__return_pow_u256_a.snap | 1 - ...ompiler_tests__features__return_subtraction_u256.snap | 1 - .../fe_compiler_tests__features__return_u128_cast.snap | 1 - .../fe_compiler_tests__features__return_u256.snap | 1 - ...iler_tests__features__return_u256_from_called_fn.snap | 1 - ...__features__return_u256_from_called_fn_with_args.snap | 1 - ...e_compiler_tests__features__signext_int_array1-2.snap | 6 ------ .../fe_compiler_tests__features__signext_int_array1.snap | 6 ------ ...e_compiler_tests__features__signext_int_array2-2.snap | 6 ------ .../fe_compiler_tests__features__signext_int_array2.snap | 6 ------ ...e_compiler_tests__features__ternary_expression_a.snap | 1 - ...e_compiler_tests__features__ternary_expression_b.snap | 1 - .../fe_compiler_tests__features__value_semantics.snap | 1 - .../fe_compiler_tests__features__while_loop.snap | 1 - ...mpiler_tests__features__while_loop_test_from_sto.snap | 1 - ..._compiler_tests__features__while_loop_with_break.snap | 1 - ...ompiler_tests__features__while_loop_with_break_2.snap | 1 - ...mpiler_tests__features__while_loop_with_continue.snap | 1 - 137 files changed, 1 insertion(+), 294 deletions(-) delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__map.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap delete mode 100644 crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap diff --git a/Cargo.lock b/Cargo.lock index 28f92e70c3..1384dc0d0d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -745,6 +745,7 @@ dependencies = [ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ + "fe-compiler-test-utils", "fxhash", "lazy_static", "logos", diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__associated_fns.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__associated_fns.snap index eaf7fdb7c5..41153e6453 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__associated_fns.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__associated_fns.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(12)]) used 22698 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap deleted file mode 100644 index d4a3f5deae..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-10.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -bit_xor([Uint(26), Uint(42)]) used 496 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap deleted file mode 100644 index 1de8ef91c5..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-11.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -bit_and([Uint(26), Uint(42)]) used 512 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap deleted file mode 100644 index 8951446b54..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-12.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -add_from_sto([Uint(2), Uint(5)]) used 22618 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap deleted file mode 100644 index 2995cf5704..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-13.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -add_from_mem([Uint(2), Uint(5)]) used 807 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap deleted file mode 100644 index 6f7c291973..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-2.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -sub([Uint(42), Uint(26)]) used 288 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap deleted file mode 100644 index 03cc0eab67..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-3.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -mul([Uint(10), Uint(42)]) used 335 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap deleted file mode 100644 index 909c2eaced..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-4.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -div([Uint(43), Uint(5)]) used 331 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap deleted file mode 100644 index 2a113b358b..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-5.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -mod([Uint(43), Uint(5)]) used 353 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap deleted file mode 100644 index 077c7542cd..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-6.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -pow([Uint(3), Uint(5)]) used 618 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap deleted file mode 100644 index 75e0b47ec1..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-7.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -lshift([Uint(1), Uint(7)]) used 433 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap deleted file mode 100644 index 9299651af8..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-8.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -rshift([Uint(128), Uint(7)]) used 455 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap deleted file mode 100644 index 32352fa398..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign-9.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -bit_or([Uint(26), Uint(42)]) used 477 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap deleted file mode 100644 index b29f4fc6b9..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__aug_assign.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -add([Uint(2), Uint(5)]) used 269 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__call_fn.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__call_fn.snap index 715961b21e..9b6fed818e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__call_fn.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__call_fn.snap @@ -1,6 +1,5 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap index eedfa52178..266f1a5ebf 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 2323 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__constructor.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__constructor.snap index 419ba186fe..95ccc3663d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__constructor.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__constructor.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- read_bar([]) used 295 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__create2_contract.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__create2_contract.snap index 5e05ab81fe..e75cfe7b46 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__create2_contract.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__create2_contract.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", factory_harness.gas_reporter)" - --- create2_foo([]) used 40474 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract.snap index ddb1ff7d2b..095f493576 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", factory_harness.gas_reporter)" - --- create_foo([]) used 40459 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract_from_init.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract_from_init.snap index 91f78725dc..0499ec9aed 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract_from_init.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__create_contract_from_init.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", factory_harness.gas_reporter)" - --- get_foo_addr([]) used 211 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_internal_func_call.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_internal_func_call.snap index 432b5ee3ea..64762e4b98 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_internal_func_call.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_internal_func_call.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 2328 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_simple.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_simple.snap index 53b31b3c46..51edf0384c 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_simple.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__ctx_param_simple.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 101 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap deleted file mode 100644 index 2ae9708e79..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-2.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -run_test([]) used 32 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap deleted file mode 100644 index 2ae9708e79..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-3.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -run_test([]) used 32 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap deleted file mode 100644 index 2ae9708e79..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-4.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -run_test([]) used 32 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap deleted file mode 100644 index 47753d7dd7..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests-5.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -run_test([]) used 35 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap deleted file mode 100644 index 7bee56fc1a..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__execution_tests.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -run_test([]) used 1361 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_2.snap index bb1c9742fe..4da4dc3fe8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_2.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_2.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(6)]) used 225 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_a.snap index bb1c9742fe..4da4dc3fe8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(6)]) used 225 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_b.snap index ad8cc426b7..1a5fbe84ce 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(4)]) used 239 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_test_from_sto.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_test_from_sto.snap index 1941cb3e88..de0bb82aec 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_test_from_sto.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_test_from_sto.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 22418 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_with_block_declaration.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_with_block_declaration.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_with_block_declaration.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__if_statement_with_block_declaration.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__int_literal_coercion.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__int_literal_coercion.snap index 461dfde510..96211f11df 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__int_literal_coercion.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__int_literal_coercion.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 311 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap deleted file mode 100644 index cd73de23d2..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map-2.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22494 gas -write_bar([Uint(26), Uint(12)]) used 22494 gas -read_bar([Uint(4)]) used 468 gas -read_bar([Uint(26)]) used 468 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap deleted file mode 100644 index cd73de23d2..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map-3.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22494 gas -write_bar([Uint(26), Uint(12)]) used 22494 gas -read_bar([Uint(4)]) used 468 gas -read_bar([Uint(26)]) used 468 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap deleted file mode 100644 index 845b4aff1c..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map-4.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22482 gas -write_bar([Uint(26), Uint(12)]) used 22482 gas -read_bar([Uint(4)]) used 438 gas -read_bar([Uint(26)]) used 438 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap deleted file mode 100644 index 845b4aff1c..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map-5.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22482 gas -write_bar([Uint(26), Uint(12)]) used 22482 gas -read_bar([Uint(4)]) used 438 gas -read_bar([Uint(26)]) used 438 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap deleted file mode 100644 index 845b4aff1c..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map-6.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22482 gas -write_bar([Uint(26), Uint(12)]) used 22482 gas -read_bar([Uint(4)]) used 438 gas -read_bar([Uint(26)]) used 438 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap deleted file mode 100644 index 38a4c5cec1..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -write_bar([Uint(4), Uint(42)]) used 22401 gas -write_bar([Uint(26), Uint(12)]) used 22401 gas -read_bar([Uint(4)]) used 383 gas -read_bar([Uint(26)]) used 383 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__map_tuple.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__map_tuple.snap index cc13957724..8348836dd4 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__map_tuple.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__map_tuple.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1234)]) used 44425 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__multi_param.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__multi_param.snap index 6d8f811d38..9dffe4cb89 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__multi_param.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__multi_param.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(4), Uint(42), Uint(420)]) used 976 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_casts.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_casts.snap index 3a0cedd9de..b617e87afe 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_casts.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_casts.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 32 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_sizes.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_sizes.snap index a63a18d23c..adec4e5272 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_sizes.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__numeric_sizes.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- assert_min_max([]) used 35 gas get_u8_min([]) used 145 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn.snap index 37b6559a96..78eb171f84 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(26)]) used 202 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn_internal_call.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn_internal_call.snap index 37b6559a96..78eb171f84 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn_internal_call.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__pure_fn_internal_call.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(26)]) used 202 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_binary.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_binary.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_binary.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_binary.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_hex.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_hex.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_hex.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_hex.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_octal.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_octal.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__radix_octal.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__radix_octal.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_addition_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_addition_u256.snap index 9d74add1b8..4b4928b04c 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_addition_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_addition_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(42)]) used 202 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_array.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_array.snap index 23ed776d11..3181c79ed3 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_array.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_array.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42)]) used 1356 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseand_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseand_u256.snap index 9602984579..e83b582b0e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseand_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseand_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(12), Uint(25)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseor_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseor_u256.snap index 9602984579..e83b582b0e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseor_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseor_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(12), Uint(25)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_i64_coerced.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_i64_coerced.snap index 7cc6f39a22..56bd5e2cb8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_i64_coerced.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_i64_coerced.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 105 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_a.snap index f56ac8f4ae..35b1560c5e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(212), Uint(0)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_b.snap index dadca765c6..cf28919b55 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshl_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(212), Uint(1)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_a.snap index ea223619ce..79893cc29a 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(212), Uint(0)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_b.snap index ed5ae87851..5e7abb4d7c 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(212), Uint(1)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_c.snap index f56ac8f4ae..35b1560c5e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(212), Uint(0)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_d.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_d.snap index dadca765c6..cf28919b55 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_d.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwiseshr_u256_d.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(212), Uint(1)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwisexor_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwisexor_u256.snap index 9602984579..e83b582b0e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwisexor_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bitwisexor_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(12), Uint(25)]) used 153 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_complex_struct.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_complex_struct.snap index 99feffbfa1..ae7aecb5a1 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_complex_struct.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_complex_struct.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- static_complex([]) used 948 gas string_complex([]) used 1724 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_division_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_division_u256.snap index 450cdedeb8..f493e92994 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_division_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_division_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(42)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_a.snap index f5bd05192e..849f63a534 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_b.snap index ad9f0c3835..a43a037024 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_eq_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_a.snap index 9f4238c5b5..bfbdb2b5c6 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(2), Int(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_b.snap index 003b4f3bf0..eb91722ee0 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_c.snap index 6d4809cbf4..c0c4ce4d4f 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(2)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_d.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_d.snap index 59d0ae645a..c510e5198e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_d.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_d.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639934)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_e.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_e.snap index 6358676e01..5f596a4e87 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_e.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_e.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_f.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_f.snap index eef6e8fdee..61e6b067c2 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_f.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_i256_f.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639934), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_a.snap index 34d0886984..7d21463f00 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(2), Uint(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_b.snap index f5bd05192e..849f63a534 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_c.snap index ad9f0c3835..a43a037024 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gt_u256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_a.snap index de5be278eb..efd98718e5 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(2), Int(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_b.snap index 3f9a02838b..136f1518ca 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_c.snap index 72aeb7ac6a..86d0ff0021 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(2)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_d.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_d.snap index b7404c95d4..cba0533848 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_d.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_d.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639934)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_e.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_e.snap index f017373183..11fc590e26 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_e.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_e.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_f.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_f.snap index 2f9cd4658d..d588782850 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_f.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_i256_f.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639934), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_a.snap index 287805cf5a..a9ca0164e5 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(2), Uint(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_b.snap index 3d2ab29400..eee961ec56 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_c.snap index c1d530678c..94c9941410 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_gte_u256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_i128_cast.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_i128_cast.snap index 7cc6f39a22..56bd5e2cb8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_i128_cast.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_i128_cast.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 105 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_i256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_i256.snap index 7cc6f39a22..56bd5e2cb8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_i256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_i256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 105 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_identity_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_identity_u256.snap index db2f444418..4866238592 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_identity_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_identity_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42)]) used 144 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_i256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_i256.snap index 8b4d121034..9f209397c9 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_i256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_i256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1)]) used 147 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_u256.snap index a7abea0a1b..6cbd760f1c 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_invert_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1)]) used 147 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_a.snap index 6d4809cbf4..c0c4ce4d4f 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(2)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_b.snap index 003b4f3bf0..eb91722ee0 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_c.snap index 9f4238c5b5..bfbdb2b5c6 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(2), Int(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_d.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_d.snap index eef6e8fdee..61e6b067c2 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_d.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_d.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639934), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_e.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_e.snap index 6358676e01..5f596a4e87 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_e.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_e.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_f.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_f.snap index 59d0ae645a..c510e5198e 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_f.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_i256_f.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639934)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_a.snap index ad9f0c3835..a43a037024 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_b.snap index f5bd05192e..849f63a534 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_c.snap index 34d0886984..7d21463f00 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lt_u256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(2), Uint(1)]) used 159 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_a.snap index 72aeb7ac6a..86d0ff0021 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(2)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_b.snap index 3f9a02838b..136f1518ca 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(1), Int(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_c.snap index de5be278eb..efd98718e5 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(2), Int(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_d.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_d.snap index 2f9cd4658d..d588782850 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_d.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_d.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639934), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_e.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_e.snap index f017373183..11fc590e26 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_e.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_e.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639935)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_f.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_f.snap index b7404c95d4..cba0533848 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_f.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_i256_f.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639935), Int(115792089237316195423570985008687907853269984665640564039457584007913129639934)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256.snap index 287805cf5a..a9ca0164e5 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(2), Uint(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_a.snap index c1d530678c..94c9941410 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_b.snap index 3d2ab29400..eee961ec56 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_lte_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_a.snap index 7c755c4d15..8cea4288f2 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(5), Int(2)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_b.snap index 92b5fb24db..df6d2fdbc7 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(5), Int(3)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_c.snap index 5eca984912..4caa50a883 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_i256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(5), Int(5)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_a.snap index 2a4d5286cb..b834335b40 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(5), Uint(2)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_b.snap index 095ef49d17..a5bd3b12fc 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(5), Uint(3)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_c.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_c.snap index 737a1b9d94..d67479f058 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_c.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_mod_u256_c.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(5), Uint(5)]) used 201 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_multiplication_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_multiplication_u256.snap index 956259ec52..7dbef6f882 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_multiplication_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_multiplication_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(42)]) used 224 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256a.snap index 3d2ab29400..eee961ec56 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(1)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256b.snap index c1d530678c..94c9941410 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_noteq_u256b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(1), Uint(2)]) used 162 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_i256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_i256.snap index 4d1172543e..7634f5ff0b 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_i256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_i256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639934), Uint(3)]) used 653 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_u256_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_u256_a.snap index 6ce7c46219..e772e7319d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_u256_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_pow_u256_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(2), Uint(0)]) used 218 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_subtraction_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_subtraction_u256.snap index bdebc273ac..695902938a 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_subtraction_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_subtraction_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(42), Uint(42)]) used 199 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn.snap index 664bb8c16e..cc11350efe 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 120 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn_with_args.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn_with_args.snap index 6706f0f9e1..a37a183336 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn_with_args.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256_from_called_fn_with_args.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 22307 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap deleted file mode 100644 index a5f9a97382..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1-2.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -bar([Int(100)]) used 22635 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap deleted file mode 100644 index 5b6c0372db..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array1.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -bar([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926)]) used 22635 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap deleted file mode 100644 index 97511dc7aa..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2-2.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -i32_array([FixedArray([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926), Int(100), Int(115792089237316195423570985008687907853269984665640564039457584007910982156288), Int(2147483647)])]) used 1380 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap deleted file mode 100644 index c48b5606a5..0000000000 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__signext_int_array2.snap +++ /dev/null @@ -1,6 +0,0 @@ ---- -source: crates/tests/src/features.rs -expression: "format!(\"{}\", harness.gas_reporter)" ---- -i8_array([FixedArray([Int(115792089237316195423570985008687907853269984665640564039457584007913129639926), Int(100), Int(115792089237316195423570985008687907853269984665640564039457584007913129639808), Int(127)])]) used 1312 gas - diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_a.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_a.snap index d13acaa896..fdf970d3b8 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_a.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_a.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(6)]) used 210 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_b.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_b.snap index 08a2ba5cc0..354d8f9b77 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_b.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__ternary_expression_b.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([Uint(4)]) used 221 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__value_semantics.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__value_semantics.snap index 45d23e5732..30e8fd20aa 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__value_semantics.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__value_semantics.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 29962 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop.snap index 6fd076b2c1..98960cede2 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 423 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_test_from_sto.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_test_from_sto.snap index 6bc84f9235..dbbc053bbd 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_test_from_sto.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_test_from_sto.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 2329 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap index 963565f5bd..258d191e3d 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 102 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break_2.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break_2.snap index 4c577f0733..29bec9b751 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break_2.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break_2.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 236 gas diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_continue.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_continue.snap index 3f55331bf0..02dd65110b 100644 --- a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_continue.snap +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_continue.snap @@ -1,7 +1,6 @@ --- source: crates/tests/src/features.rs expression: "format!(\"{}\", harness.gas_reporter)" - --- bar([]) used 446 gas From 914d123522e9d19ae85504ea7d33a8378e4a807b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 28 Jan 2023 11:12:53 +0100 Subject: [PATCH 047/678] Reflect reviews --- crates/parser2/build.rs | 2 +- crates/parser2/src/parser/attr.rs | 56 +++--- crates/parser2/src/parser/expr.rs | 65 +++---- crates/parser2/src/parser/expr_atom.rs | 88 +++++----- crates/parser2/src/parser/func.rs | 122 +++++++++----- crates/parser2/src/parser/item.rs | 159 ++++++++++-------- crates/parser2/src/parser/mod.rs | 18 +- crates/parser2/src/parser/param.rs | 150 +++++++++-------- crates/parser2/src/parser/pat.rs | 14 +- crates/parser2/src/parser/path.rs | 3 +- crates/parser2/src/parser/stmt.rs | 23 +-- crates/parser2/src/parser/struct_.rs | 35 ++-- crates/parser2/src/parser/type_.rs | 15 +- crates/parser2/src/parser/use_tree.rs | 18 +- .../test_files/error_recovery/exprs/index.fe | 3 + .../error_recovery/exprs/index.snap | 42 +++++ .../error_recovery/exprs/match_.snap | 12 +- .../test_files/error_recovery/items/func.snap | 15 +- .../error_recovery/items/struct_.snap | 1 + .../test_files/syntax_node/exprs/index.fe | 2 + .../test_files/syntax_node/exprs/index.snap | 44 +++++ .../syntax_node/exprs/struct_init.fe | 3 +- .../syntax_node/exprs/struct_init.snap | 11 +- 23 files changed, 540 insertions(+), 361 deletions(-) create mode 100644 crates/parser2/test_files/error_recovery/exprs/index.fe create mode 100644 crates/parser2/test_files/error_recovery/exprs/index.snap create mode 100644 crates/parser2/test_files/syntax_node/exprs/index.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/index.snap diff --git a/crates/parser2/build.rs b/crates/parser2/build.rs index e041896d56..8e048f9218 100644 --- a/crates/parser2/build.rs +++ b/crates/parser2/build.rs @@ -1,4 +1,4 @@ fn main() { #[cfg(test)] - println!("cargo:rerun-if-changed=./std"); + println!("cargo:rerun-if-changed=./test_files"); } diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index 420c4234dd..e4426c9e36 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -27,9 +27,11 @@ impl super::Parse for AttrListScope { _ => break, }; parser.set_newline_as_trivia(false); - if !parser.bump_if(SyntaxKind::Newline) { - parser.error_and_recover("expected newline after Attribute", None) - } + parser.bump_or_recover( + SyntaxKind::Newline, + "expected newline after Attribute", + None, + ) } } } @@ -43,10 +45,12 @@ impl super::Parse for AttrScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Pound); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected attribute name", None); - return; - } + parser.with_recovery_tokens( + |parser| { + parser.bump_or_recover(SyntaxKind::Ident, "expected attribute name", None); + }, + &[SyntaxKind::LParen], + ); if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(AttrParamListScope::default(), None); @@ -68,14 +72,18 @@ impl super::Parse for AttrParamListScope { return; } - parser.parse(AttrParam::default(), None); + parser.with_next_expected_tokens( + |parser| parser.parse(AttrParam::default(), None), + &[SyntaxKind::Comma, SyntaxKind::RParen], + ); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(AttrParam::default(), None); + parser.with_next_expected_tokens( + |parser| parser.parse(AttrParam::default(), None), + &[SyntaxKind::Comma, SyntaxKind::RParen], + ); } - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_recover("expected `)`", None); - } + parser.bump_or_recover(SyntaxKind::RParen, "expected `)`", None); } } @@ -89,23 +97,17 @@ define_scope! { } impl super::Parse for AttrParam { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected `key: value`", None); - } + parser.with_next_expected_tokens( + |parser| parser.bump_or_recover(SyntaxKind::Ident, "Expected `key: value`", None), + &[SyntaxKind::Colon], + ); - if !parser.bump_if(SyntaxKind::Colon) { - parser.error_and_recover("expected `key: value`", None); - } + parser.with_next_expected_tokens( + |parser| parser.bump_or_recover(SyntaxKind::Colon, "Expected `key: value`", None), + &[SyntaxKind::Ident], + ); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected `ident`", None) - } - - match parser.current_kind() { - Some(SyntaxKind::Comma) | Some(SyntaxKind::RParen) | None => {} - - _ => parser.error_and_recover("unexpected token", None), - } + parser.bump_or_recover(SyntaxKind::Ident, "Expected `key: value`", None); } } diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 797f57fac2..e71743f822 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -199,11 +199,10 @@ impl super::Parse for BinExprScope { define_scope! { IndexExprScope, IndexExpr, Override(RBracket) } impl super::Parse for IndexExprScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::LBracket); - parse_expr(parser); - if !parser.bump_if(SyntaxKind::RBracket) { - parser.error_and_recover("expected `]`", None); - } + parser.with_next_expected_tokens(parse_expr, &[SyntaxKind::RBracket]); + parser.bump_or_recover(SyntaxKind::RBracket, "expected `]`", None); } } @@ -212,9 +211,12 @@ impl super::Parse for CallExprScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { - parser.parse(GenericArgListScope::default(), None); - }); + parser.with_next_expected_tokens( + |parser| { + parser.parse(GenericArgListScope::default(), None); + }, + &[SyntaxKind::LParen], + ); } if parser.current_kind() != Some(SyntaxKind::LParen) { @@ -231,15 +233,16 @@ impl super::Parse for MethodExprScope { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Dot); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier", None); - } + parser.bump_or_recover(SyntaxKind::Ident, "expected identifier", None); - parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } + }, + &[SyntaxKind::LParen], + ); if parser.current_kind() != Some(SyntaxKind::LParen) { parser.error_and_recover("expected `(`", None); @@ -278,48 +281,32 @@ impl super::Parse for FieldExprScope { define_scope! { pub(super) LShiftScope, LShift, Inheritance } impl super::Parse for LShiftScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Lt) { - parser.error_and_recover("expected `<<`", None); - } - if !parser.bump_if(SyntaxKind::Lt) { - parser.error_and_recover("expected `<<`", None); - } + parser.bump_or_recover(SyntaxKind::Lt, "expected `<<`", None); + parser.bump_or_recover(SyntaxKind::Lt, "expected `<<`", None); } } define_scope! { pub(super) RShiftScope, RShift, Inheritance } impl super::Parse for RShiftScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected `>>`", None); - } - if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected `>>`", None); - } + parser.bump_or_recover(SyntaxKind::Gt, "expected `>>`", None); + parser.bump_or_recover(SyntaxKind::Gt, "expected `>>`", None); } } define_scope! { pub(super) LtEqScope, LtEq, Inheritance } impl super::Parse for LtEqScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Lt) { - parser.error_and_recover("expected `<=`", None); - } - if !parser.bump_if(SyntaxKind::Eq) { - parser.error_and_recover("expected `<=`", None); - } + parser.bump_or_recover(SyntaxKind::Lt, "expected `<=`", None); + parser.bump_or_recover(SyntaxKind::Eq, "expected `<=`", None); } } define_scope! { pub(super) GtEqScope, GtEq, Inheritance } impl super::Parse for GtEqScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected `>=`", None); - } - if !parser.bump_if(SyntaxKind::Eq) { - parser.error_and_recover("expected `>=`", None); - } + parser.bump_or_recover(SyntaxKind::Gt, "expected `>=`", None); + parser.bump_or_recover(SyntaxKind::Eq, "expected `>=`", None); } } diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 0a7c5f692d..8b76c00c8e 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -91,7 +91,7 @@ impl super::Parse for IfExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::IfKw); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(parse_expr_no_struct, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -100,9 +100,12 @@ impl super::Parse for IfExprScope { parser.parse(BlockExprScope::default(), None); if parser.current_kind() == Some(SyntaxKind::ElseKw) { - parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::IfKw], |parser| { - parser.bump_expected(SyntaxKind::ElseKw); - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_expected(SyntaxKind::ElseKw); + }, + &[SyntaxKind::LBrace, SyntaxKind::IfKw], + ); if !matches!( parser.current_kind(), @@ -121,9 +124,7 @@ impl super::Parse for MatchExprScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::MatchKw); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], |parser| { - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct) - }); + parser.with_next_expected_tokens(parse_expr_no_struct, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected `{`", None); @@ -148,16 +149,16 @@ impl super::Parse for MatchArmListScope { parser.parse(MatchArmScope::default(), None); parser.set_newline_as_trivia(false); - if !parser.bump_if(SyntaxKind::Newline) - && parser.current_kind() != Some(SyntaxKind::RBrace) - { - parser.error_and_recover("expected newline after match arm", None); + if parser.current_kind() != Some(SyntaxKind::RBrace) { + parser.bump_or_recover( + SyntaxKind::Newline, + "expected newline after match arm", + None, + ); } } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_bump_until("expected }", None, SyntaxKind::RBrace) - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}`", None); } } @@ -166,14 +167,10 @@ impl super::Parse for MatchArmScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.with_next_expected_tokens(&[SyntaxKind::FatArrow], parse_pat); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::FatArrow]); + parser.bump_or_recover(SyntaxKind::FatArrow, "expected `=>`", None); - if !parser.bump_if(SyntaxKind::FatArrow) { - parser.error_and_recover("expected `=>`", None); - return; - } - - parse_expr(parser); + parser.with_next_expected_tokens(parse_expr, &[SyntaxKind::RBrace, SyntaxKind::Newline]); } } @@ -201,23 +198,25 @@ impl super::Parse for RecordFieldListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - if parser.bump_if(SyntaxKind::LBrace) { + if parser.bump_if(SyntaxKind::RBrace) { return; } - parser.with_next_expected_tokens(&[SyntaxKind::RBrace, SyntaxKind::Comma], |parser| { - parser.parse(RecordFieldScope::default(), None) - }); + parser.with_next_expected_tokens( + |parser| parser.parse(RecordFieldScope::default(), None), + &[SyntaxKind::RBrace, SyntaxKind::Comma], + ); while parser.bump_if(SyntaxKind::Comma) { - parser.with_next_expected_tokens(&[SyntaxKind::RBrace, SyntaxKind::Comma], |parser| { - parser.parse(RecordFieldScope::default(), None); - }) + parser.with_next_expected_tokens( + |parser| { + parser.parse(RecordFieldScope::default(), None); + }, + &[SyntaxKind::RBrace, SyntaxKind::Comma], + ) } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_bump_until("expected `}`", None, SyntaxKind::RBrace); - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}`", None); } } @@ -225,9 +224,7 @@ define_scope! { RecordFieldScope, RecordField, Inheritance } impl super::Parse for RecordFieldScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier", None); - } + parser.bump_or_recover(SyntaxKind::Ident, "expected identifier", None); if parser.bump_if(SyntaxKind::Colon) { parse_expr(parser); @@ -241,22 +238,17 @@ impl super::Parse for ParenScope { parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { - self.set_kind(SyntaxKind::TupleExpr); + self.set_kind(SyntaxKind::ParenExpr); return; } - parse_expr(parser); + parser.with_next_expected_tokens(parse_expr, &[SyntaxKind::RParen, SyntaxKind::Comma]); while parser.bump_if(SyntaxKind::Comma) { self.set_kind(SyntaxKind::TupleExpr); - if parser.current_kind() == Some(SyntaxKind::RParen) { - break; - } - parse_expr(parser); + parser.with_next_expected_tokens(parse_expr, &[SyntaxKind::RParen, SyntaxKind::Comma]); } - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_bump_until("expected `)`", None, SyntaxKind::RParen); - } + parser.bump_or_recover(SyntaxKind::RParen, "expected `)`", None); } } @@ -274,28 +266,26 @@ impl super::Parse for ArrayScope { } parser.with_next_expected_tokens( + parse_expr, &[ SyntaxKind::SemiColon, SyntaxKind::Comma, SyntaxKind::RBracket, ], - parse_expr, ); if parser.bump_if(SyntaxKind::SemiColon) { self.set_kind(SyntaxKind::ArrayRepExpr); - parser.with_next_expected_tokens(&[SyntaxKind::RBracket], parse_expr); + parser.with_next_expected_tokens(parse_expr, &[SyntaxKind::RBracket]); } else { while parser.bump_if(SyntaxKind::Comma) { parser.with_next_expected_tokens( - &[SyntaxKind::Comma, SyntaxKind::RBracket], parse_expr, + &[SyntaxKind::Comma, SyntaxKind::RBracket], ); } } - if !parser.bump_if(SyntaxKind::RBracket) { - parser.error_and_bump_until("expected `]`", None, SyntaxKind::RBracket); - } + parser.bump_or_recover(SyntaxKind::RBracket, "expected `]`", None); } } diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 8da62d575d..b2491d882c 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -42,20 +42,27 @@ impl super::Parse for FnScope { } fn parse_normal_fn_def_impl(parser: &mut Parser) { - parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the function name", None) - } - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected ident for the function name", + None, + ) + }, + &[SyntaxKind::Lt, SyntaxKind::LParen], + ); - parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }, + &[SyntaxKind::LParen], + ); parser.with_next_expected_tokens( - &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(FnArgListScope::default(), None); @@ -63,14 +70,18 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { parser.error_and_recover("expected `(` for the function arguments", None); } }, + &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], ); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); - } - }); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens( + |parser| { + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); + } + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(BlockExprScope::default(), None); @@ -80,20 +91,27 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { } fn parse_trait_fn_def_impl(parser: &mut Parser) { - parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::LParen], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the function name", None) - } - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected ident for the function name", + None, + ) + }, + &[SyntaxKind::Lt, SyntaxKind::LParen], + ); - parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }, + &[SyntaxKind::LParen], + ); parser.with_recovery_tokens( - &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(FnArgListScope::default(), None); @@ -101,14 +119,18 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { parser.error_and_recover("expected `(` for the function arguments", None); } }, + &[SyntaxKind::LBrace, SyntaxKind::Arrow, SyntaxKind::WhereKw], ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); - } - }); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_recovery_tokens( + |parser| { + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None, false); + } + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); + parser.with_recovery_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(BlockExprScope::default(), None); @@ -116,19 +138,27 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { } fn parse_extern_fn_def_impl(parser: &mut Parser) { - parser.with_next_expected_tokens(&[SyntaxKind::LParen], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the function name", None) - } - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for the function name", + None, + ) + }, + &[SyntaxKind::LParen], + ); - parser.with_recovery_tokens(&[SyntaxKind::Arrow], |parser| { - if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); - } else { - parser.error_and_recover("expected `(` for the function arguments", None); - } - }); + parser.with_recovery_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(FnArgListScope::default(), None); + } else { + parser.error_and_recover("expected `(` for the function arguments", None); + } + }, + &[SyntaxKind::Arrow], + ); if parser.bump_if(SyntaxKind::Arrow) { parse_type(parser, None, false); diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index d1506f25b3..fd4ef86216 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -99,7 +99,11 @@ impl super::Parse for ItemListScope { parser.set_newline_as_trivia(false); if parser.current_kind().is_some() && !parser.bump_if(SyntaxKind::Newline) { - parser.error_and_recover("expected newline after item definition", checkpoint) + parser.bump_or_recover( + SyntaxKind::Newline, + "expected newline after item definition", + checkpoint, + ) } } } @@ -153,9 +157,16 @@ impl super::Parse for ContractScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ContractKw); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the struct name", None) - } + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for the struct name", + None, + ) + }, + &[SyntaxKind::LBrace], + ); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(RecordFieldDefListScope::default(), None); @@ -170,22 +181,27 @@ impl super::Parse for EnumScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::EnumKw); - parser.with_recovery_tokens( + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for the enum name", + None, + ); + }, &[SyntaxKind::Lt, SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); + + parser.with_next_expected_tokens( |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the enum name", None) + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); } }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); - - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected enum body", None); @@ -209,30 +225,27 @@ impl super::Parse for VariantDefListScope { } parser.parse(VariantDefScope::default(), None); parser.set_newline_as_trivia(false); - if !parser.bump_if(SyntaxKind::Newline) - && parser.current_kind() != Some(SyntaxKind::RBrace) - { - parser.error_and_recover("expected newline after variant definition", None); + if parser.current_kind() != Some(SyntaxKind::RBrace) { + parser.bump_or_recover( + SyntaxKind::Newline, + "expected newline after variant definition", + None, + ); } } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover( - "expected the closing brace of the enum variants definition", - None, - ); - parser.bump_if(SyntaxKind::RBrace); - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}`", None); } } define_scope! { VariantDefScope, VariantDef, Inheritance } impl super::Parse for VariantDefScope { fn parse(&mut self, parser: &mut Parser) { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the variant name", None); - return; - } + parser.bump_or_recover( + SyntaxKind::Ident, + "expected ident for the variant name", + None, + ); if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(TupleTypeScope::default(), None); @@ -245,17 +258,22 @@ impl super::Parse for TraitScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::TraitKw); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the trait name", None) - } + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for the trait name", + None, + ); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected trait body", None); @@ -279,14 +297,13 @@ impl super::Parse for ImplScope { parser.bump_expected(SyntaxKind::ImplKw); parser.with_next_expected_tokens( - &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], |parser| { parse_type(parser, None, true); }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], ); let is_impl_trait = parser.with_next_expected_tokens( - &[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { if parser.bump_if(SyntaxKind::ForKw) { self.set_kind(SyntaxKind::ImplTrait); @@ -296,9 +313,10 @@ impl super::Parse for ImplScope { false } }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected impl body", None); @@ -342,18 +360,22 @@ impl super::Parse for ConstScope { parser.set_newline_as_trivia(false); - parser.with_next_expected_tokens(&[SyntaxKind::Colon, SyntaxKind::Eq], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier", None); - } - }); + parser.with_next_expected_tokens( + |parser| parser.bump_or_recover(SyntaxKind::Ident, "expected identifier", None), + &[SyntaxKind::Colon, SyntaxKind::Eq], + ); - parser.with_next_expected_tokens(&[SyntaxKind::Eq], |parser| { - if !parser.bump_if(SyntaxKind::Colon) { - parser.error_and_recover("expected type annotation for `const`", None); - } - parse_type(parser, None, false); - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Colon, + "expected type annotation for `const`", + None, + ); + parse_type(parser, None, false); + }, + &[SyntaxKind::Eq], + ); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` for const value definition", None); @@ -390,17 +412,25 @@ impl super::Parse for TypeAliasScope { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::TypeKw); - parser.with_next_expected_tokens(&[SyntaxKind::Lt, SyntaxKind::Eq], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected identifier for type alias name", None) - } - }); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for type alias name", + None, + ); + }, + &[SyntaxKind::Lt, SyntaxKind::Eq], + ); - parser.with_next_expected_tokens(&[SyntaxKind::Eq], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }, + &[SyntaxKind::Eq], + ); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=` for type alias definition", None); @@ -460,10 +490,7 @@ fn parse_fn_item_block( } } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover("expected `}` to close the block", None); - parser.bump_if(SyntaxKind::RBrace); - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}` to close the block", None); } fn is_modifier_head(kind: SyntaxKind) -> bool { diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index f93041a78d..3d729e99e3 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -101,7 +101,7 @@ impl Parser { /// /// This is useful when you want to specify auxiliary recovery tokens which /// are valid only in a limited part of the scope. - pub fn with_recovery_tokens(&mut self, recovery_tokens: &[SyntaxKind], f: F) -> R + pub fn with_recovery_tokens(&mut self, f: F, recovery_tokens: &[SyntaxKind]) -> R where F: FnOnce(&mut Self) -> R, { @@ -126,7 +126,7 @@ impl Parser { /// If `current_token()` is not in `expected_tokens` after `f` returns, an /// error is reported and try to recover with `expected_tokens` and scope's /// recovery token set. - pub fn with_next_expected_tokens(&mut self, expected_tokens: &[SyntaxKind], f: F) -> R + pub fn with_next_expected_tokens(&mut self, f: F, expected_tokens: &[SyntaxKind]) -> R where F: FnOnce(&mut Self) -> R, { @@ -360,6 +360,20 @@ impl Parser { self.set_newline_as_trivia(is_newline_trivia); } + /// Bumps the current token if the current token is the `expected` kind. + /// Otherwise, reports an error and proceeds the parser to the recovery + /// tokens. + pub fn bump_or_recover( + &mut self, + expected: SyntaxKind, + msg: &str, + checkpoint: Option, + ) { + if !self.bump_if(expected) { + self.error_and_recover(msg, checkpoint); + } + } + fn checkpoint(&mut self) -> Checkpoint { self.builder.checkpoint() } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index b3218632ef..4d52be64f6 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -22,15 +22,18 @@ impl super::Parse for FnArgListScope { return; } - parser.parse(FnArgScope::default(), None); + parser.with_next_expected_tokens( + |parser| parser.parse(FnArgScope::default(), None), + &[SyntaxKind::Comma, SyntaxKind::RParen], + ); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(FnArgScope::default(), None); + parser.with_next_expected_tokens( + |parser| parser.parse(FnArgScope::default(), None), + &[SyntaxKind::Comma, SyntaxKind::RParen], + ); } - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_recover("expected closing `)`", None); - parser.bump_if(SyntaxKind::LParen); - } + parser.bump_or_recover(SyntaxKind::RParen, "expected closing `)`", None); } } @@ -43,32 +46,35 @@ impl super::Parse for FnArgScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_if(SyntaxKind::MutKw); - let is_self = parser.with_recovery_tokens(&[SyntaxKind::Colon], |parser| { - match parser.current_kind() { + let is_self = parser.with_recovery_tokens( + |parser| match parser.current_kind() { Some(SyntaxKind::SelfKw) => { parser.bump_expected(SyntaxKind::SelfKw); true } - Some(SyntaxKind::Ident | SyntaxKind::Underscore) => { - parser.bump(); - if !parser.bump_if(SyntaxKind::Ident) { - parser.bump_if(SyntaxKind::Underscore); - } - false - } + Some(SyntaxKind::Ident | SyntaxKind::Underscore) => parser + .with_next_expected_tokens( + |parser| { + parser.bump(); + if !parser.bump_if(SyntaxKind::Ident) { + parser.bump_if(SyntaxKind::Underscore); + } + false + }, + &[SyntaxKind::Colon], + ), _ => { parser.error_and_recover("expected identifier for argument name", None); false } - } - }); + }, + &[SyntaxKind::Colon], + ); if is_self { return; } - if !parser.bump_if(SyntaxKind::Colon) { - parser.error_and_recover("expected `:` after argument name", None); - } + parser.bump_or_recover(SyntaxKind::Colon, "expected `:` after argument name", None); parse_type(parser, None, false); } @@ -91,10 +97,7 @@ impl super::Parse for GenericParamListScope { parser.parse(GenericParamScope::default(), None); } - if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected closing `>`", None); - parser.bump_if(SyntaxKind::Gt); - } + parser.bump_or_recover(SyntaxKind::Gt, "expected closing `>`", None); } } @@ -108,17 +111,20 @@ impl super::Parse for GenericParamScope { parser.set_newline_as_trivia(false); parser.bump_if(SyntaxKind::ConstKw); - parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::Gt], |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected type parameter", None); - } + parser.with_next_expected_tokens( + |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected type parameter", None); + } - if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.parse(TypeBoundListScope::default(), None); - } + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::default(), None); + } - parser.set_newline_as_trivia(true); - }); + parser.set_newline_as_trivia(true); + }, + &[SyntaxKind::Comma, SyntaxKind::Gt], + ); } } @@ -171,10 +177,7 @@ impl super::Parse for GenericArgListScope { parser.parse(GenericArgScope::new(self.allow_bounds), None); } - if !parser.bump_if(SyntaxKind::Gt) { - parser.error_and_recover("expected closing `>`", None); - parser.bump_if(SyntaxKind::Gt); - } + parser.bump_or_recover(SyntaxKind::Gt, "expected closing `>`", None); } } @@ -186,29 +189,32 @@ define_scope! { impl super::Parse for GenericArgScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::Gt], |parser| { - match parser.current_kind() { - Some(SyntaxKind::LBrace) => { - parser.parse(BlockExprScope::default(), None); - } + parser.with_next_expected_tokens( + |parser| { + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + parser.parse(BlockExprScope::default(), None); + } - Some(kind) if kind.is_literal_leaf() => { - parser.parse(LitExprScope::default(), None); - } + Some(kind) if kind.is_literal_leaf() => { + parser.parse(LitExprScope::default(), None); + } - _ => { - parse_type(parser, None, self.allow_bounds); - if parser.current_kind() == Some(SyntaxKind::Colon) { - if !self.allow_bounds { - parser.error_and_recover("type bounds are not allowed here", None); - } else { - parser.parse(TypeBoundListScope::default(), None); + _ => { + parse_type(parser, None, self.allow_bounds); + if parser.current_kind() == Some(SyntaxKind::Colon) { + if !self.allow_bounds { + parser.error_and_recover("type bounds are not allowed here", None); + } else { + parser.parse(TypeBoundListScope::default(), None); + } } } } - } - parser.set_newline_as_trivia(true); - }); + parser.set_newline_as_trivia(true); + }, + &[SyntaxKind::Comma, SyntaxKind::Gt], + ); } } @@ -226,29 +232,29 @@ impl super::Parse for CallArgListScope { parser.parse(CallArgScope::default(), None); } - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_recover("expected closing `)`", None); - parser.bump_if(SyntaxKind::RParen); - } + parser.bump_or_recover(SyntaxKind::RParen, "expected closing `)`", None); } } define_scope! { CallArgScope, CallArg, Inheritance } impl super::Parse for CallArgScope { fn parse(&mut self, parser: &mut Parser) { - parser.with_next_expected_tokens(&[SyntaxKind::Comma, SyntaxKind::RParen], |parser| { - parser.set_newline_as_trivia(false); - let has_label = parser.dry_run(|parser| { - parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) - }); - - if has_label { - parser.bump_expected(SyntaxKind::Ident); - parser.bump_expected(SyntaxKind::Colon); - } - parse_expr(parser); - parser.set_newline_as_trivia(true); - }); + parser.with_next_expected_tokens( + |parser| { + parser.set_newline_as_trivia(false); + let has_label = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); + + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + parse_expr(parser); + parser.set_newline_as_trivia(true); + }, + &[SyntaxKind::Comma, SyntaxKind::RParen], + ); } } diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index 57c3e1e10b..05a57dcaea 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -66,17 +66,14 @@ impl super::Parse for TuplePatElemListScope { parser.parse(TuplePatElemScope::default(), None); } - if !parser.bump_if(SyntaxKind::RParen) { - parser.error_and_recover("expected `)`", None); - parser.bump_if(SyntaxKind::RParen); - } + parser.bump_or_recover(SyntaxKind::RParen, "expected `)`", None); } } define_scope! { TuplePatElemScope, TuplePatElem, Inheritance } impl super::Parse for TuplePatElemScope { fn parse(&mut self, parser: &mut Parser) { - parse_pat(parser); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::RParen, SyntaxKind::Comma]); } } @@ -111,10 +108,7 @@ impl super::Parse for RecordPatFieldListScope { parser.parse(RecordPatFieldScope::default(), None); } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover("expected `}`", None); - parser.bump_if(SyntaxKind::RBrace); - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}`", None); } } @@ -128,7 +122,7 @@ impl super::Parse for RecordPatFieldScope { parser.bump_expected(SyntaxKind::Ident); parser.bump_expected(SyntaxKind::Colon); } - parse_pat(parser); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::Comma, SyntaxKind::RBrace]); } } diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs index 11551183f5..278734966b 100644 --- a/crates/parser2/src/parser/path.rs +++ b/crates/parser2/src/parser/path.rs @@ -12,8 +12,7 @@ impl super::Parse for PathScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); parser.parse(PathSegmentScope::default(), None); - while parser.current_kind() == Some(SyntaxKind::Colon2) { - parser.bump_expected(SyntaxKind::Colon2); + while parser.bump_if(SyntaxKind::Colon2) { parser.parse(PathSegmentScope::default(), None); } } diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 6bbe8299f9..f60668f28a 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -59,13 +59,11 @@ impl super::Parse for ForStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ForKw); - parser.with_next_expected_tokens(&[SyntaxKind::InKw, SyntaxKind::LBrace], parse_pat); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::InKw, SyntaxKind::LBrace]); - if !parser.bump_if(SyntaxKind::InKw) { - parser.error_and_recover("expected `in` keyword", None); - } + parser.bump_or_recover(SyntaxKind::InKw, "expected `in` keyword", None); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(parse_expr_no_struct, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -80,7 +78,7 @@ impl super::Parse for WhileStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhileKw); - parser.with_next_expected_tokens(&[SyntaxKind::LBrace], parse_expr_no_struct); + parser.with_next_expected_tokens(parse_expr_no_struct, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected block", None); @@ -129,12 +127,17 @@ impl super::Parse for ReturnStmtScope { define_scope! { AssignStmtScope, AssignStmt, Inheritance } impl super::Parse for AssignStmtScope { fn parse(&mut self, parser: &mut Parser) { - parser.with_recovery_tokens(&[SyntaxKind::Eq], parse_pat); + parser.with_recovery_tokens(parse_pat, &[SyntaxKind::Eq]); parser.set_newline_as_trivia(false); - if bump_aug_assign_op_opt(parser) { - self.set_kind(SyntaxKind::AugAssignStmt); - } + parser.with_next_expected_tokens( + |parser| { + if bump_aug_assign_op_opt(parser) { + self.set_kind(SyntaxKind::AugAssignStmt); + } + }, + &[SyntaxKind::Eq], + ); if !parser.bump_if(SyntaxKind::Eq) { parser.error_and_recover("expected `=`", None); diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 4a8eeeced6..54bf08c2e0 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -18,22 +18,25 @@ impl super::Parse for StructScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::StructKw); - parser.with_recovery_tokens( - &[SyntaxKind::Lt, SyntaxKind::LBrace, SyntaxKind::WhereKw], + parser.with_next_expected_tokens( |parser| { if !parser.bump_if(SyntaxKind::Ident) { parser.error_and_recover("expected ident for the struct name", None) } }, + &[SyntaxKind::Lt, SyntaxKind::LBrace, SyntaxKind::WhereKw], ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace, SyntaxKind::WhereKw], |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }); + parser.with_next_expected_tokens( + |parser| { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); - parser.with_recovery_tokens(&[SyntaxKind::LBrace], parse_where_clause_opt); + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() == Some(SyntaxKind::LBrace) { parser.parse(RecordFieldDefListScope::default(), None); @@ -91,11 +94,19 @@ impl super::Parse for RecordFieldDefScope { parse_attr_list(parser); parser.bump_if(SyntaxKind::PubKw); - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected ident for the field name", None); - } + parser.with_next_expected_tokens( + |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected ident for the field name", None); + } + }, + &[SyntaxKind::Colon], + ); if parser.bump_if(SyntaxKind::Colon) { - parse_type(parser, None, false); + parser.with_next_expected_tokens( + |parser| parse_type(parser, None, false), + &[SyntaxKind::Newline, SyntaxKind::RBrace], + ); } else { parser.error_and_recover("expected `name: type` for the field definition", None); } diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index ed47da81ec..8c3296c1eb 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -23,8 +23,8 @@ pub(super) fn parse_type( define_scope!(PtrTypeScope { allow_bounds: bool }, PtrType, Inheritance); impl super::Parse for PtrTypeScope { fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::Star); parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Star); parse_type(parser, None, self.allow_bounds); } } @@ -32,11 +32,11 @@ impl super::Parse for PtrTypeScope { define_scope!(PathTypeScope { allow_bounds: bool }, PathType, Inheritance); impl super::Parse for PathTypeScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); if !parser.parse(PathScope::default(), None).0 { return; } - parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Lt) { parser.parse(GenericArgListScope::new(self.allow_bounds), None); } @@ -46,6 +46,7 @@ impl super::Parse for PathTypeScope { define_scope!(SelfTypeScope, SelfType, Inheritance); impl super::Parse for SelfTypeScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::SelfTypeKw); } } @@ -59,10 +60,12 @@ define_scope! { } impl super::Parse for TupleTypeScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { return; } + parser.set_newline_as_trivia(true); parse_type(parser, None, self.allow_bounds); while parser.bump_if(SyntaxKind::Comma) { @@ -83,11 +86,13 @@ define_scope! { } impl super::Parse for ArrayTypeScope { fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::LBracket); - parser.with_recovery_tokens(&[SyntaxKind::SemiColon], |parser| { - parse_type(parser, None, self.allow_bounds) - }); + parser.with_next_expected_tokens( + |parser| parse_type(parser, None, self.allow_bounds), + &[SyntaxKind::SemiColon], + ); if !parser.bump_if(SyntaxKind::SemiColon) { parser.error_and_recover("expected `;`", None); diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index 06aef60f5d..6be45d11a7 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -53,15 +53,23 @@ define_scope! { impl super::Parse for UseTreeListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); - parser.parse(UseTreeScope::default(), None); + parser.with_next_expected_tokens( + |parser| { + parser.parse(UseTreeScope::default(), None); + }, + &[SyntaxKind::RBrace, SyntaxKind::Comma], + ); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(UseTreeScope::default(), None); + parser.with_next_expected_tokens( + |parser| { + parser.parse(UseTreeScope::default(), None); + }, + &[SyntaxKind::RBrace, SyntaxKind::Comma], + ); } - if !parser.bump_if(SyntaxKind::RBrace) { - parser.error_and_recover("expected `}`", None); - } + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}`", None); } } diff --git a/crates/parser2/test_files/error_recovery/exprs/index.fe b/crates/parser2/test_files/error_recovery/exprs/index.fe new file mode 100644 index 0000000000..afd62778b9 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/index.fe @@ -0,0 +1,3 @@ +x[1 a] +x[2 + 3 +x[41] \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/index.snap b/crates/parser2/test_files/error_recovery/exprs/index.snap new file mode 100644 index 0000000000..6b93380f86 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/index.snap @@ -0,0 +1,42 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: snapshot +--- +Root@0..20 + IndexExpr@0..6 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + LBracket@1..2 "[" + LitExpr@2..3 + Int@2..3 "1" + WhiteSpace@3..4 " " + Error@4..5 + Ident@4..5 "a" + RBracket@5..6 "]" + Newline@6..7 "\n" + IndexExpr@7..14 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "x" + LBracket@8..9 "[" + BinExpr@9..14 + LitExpr@9..10 + Int@9..10 "2" + WhiteSpace@10..11 " " + Plus@11..12 "+" + WhiteSpace@12..13 " " + LitExpr@13..14 + Int@13..14 "3" + Error@14..14 + Error@14..14 + Newline@14..15 "\n" + IndexExpr@15..20 + Path@15..16 + PathSegment@15..16 + Ident@15..16 "x" + LBracket@16..17 "[" + LitExpr@17..19 + Int@17..19 "41" + RBracket@19..20 "]" + diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap index 746a0452a1..0b908cafc9 100644 --- a/crates/parser2/test_files/error_recovery/exprs/match_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -1,5 +1,5 @@ --- -source: crates/parser2/tests/errro_recovery.rs +source: crates/parser2/tests/error_recovery.rs expression: snapshot --- Root@0..94 @@ -40,6 +40,7 @@ Root@0..94 Ident@35..38 "Bar" Error@38..38 Error@38..38 + Error@38..38 Newline@38..39 "\n" RBrace@39..40 "}" WhiteSpace@40..41 " " @@ -56,7 +57,7 @@ Root@0..94 WhiteSpace@53..54 " " Newline@54..55 "\n" WhiteSpace@55..58 " " - MatchArm@58..77 + MatchArm@58..79 PathTuplePat@58..70 Path@58..61 PathSegment@58..61 @@ -82,14 +83,15 @@ Root@0..94 Path@70..70 PathSegment@70..70 Error@70..70 + Error@70..70 Error@70..70 FatArrow@70..72 "=>" WhiteSpace@72..73 " " LitExpr@73..77 TrueKw@73..77 "true" - WhiteSpace@77..78 " " - Error@78..79 - Ident@78..79 "x" + WhiteSpace@77..78 " " + Error@78..79 + Ident@78..79 "x" Newline@79..80 "\n" WhiteSpace@80..83 " " MatchArm@83..91 diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index c85d4aef31..5078a99599 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -37,17 +37,16 @@ Root@0..133 FnArg@26..38 Underscore@26..27 "_" WhiteSpace@27..28 " " - Error@28..38 + Error@28..33 MutKw@28..31 "mut" WhiteSpace@31..32 " " Ident@32..33 "y" - Colon@33..34 ":" - WhiteSpace@34..35 " " - Ident@35..38 "u32" - PathType@38..38 - Path@38..38 - PathSegment@38..38 - Error@38..38 + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..38 + Path@35..38 + PathSegment@35..38 + Ident@35..38 "u32" Comma@38..39 "," WhiteSpace@39..40 " " FnArg@40..46 diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index 27b63f18e5..093c9de961 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -53,6 +53,7 @@ Root@0..74 RecordFieldDef@51..54 Ident@51..54 "foo" Error@54..54 + Error@54..54 Newline@54..55 "\n" WhiteSpace@55..59 " " RecordFieldDef@59..72 diff --git a/crates/parser2/test_files/syntax_node/exprs/index.fe b/crates/parser2/test_files/syntax_node/exprs/index.fe new file mode 100644 index 0000000000..1545b60d77 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/index.fe @@ -0,0 +1,2 @@ +x[1 + 2] +x[foo.y(1, 2)] \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/index.snap b/crates/parser2/test_files/syntax_node/exprs/index.snap new file mode 100644 index 0000000000..b9dd501fce --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/index.snap @@ -0,0 +1,44 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..23 + IndexExpr@0..8 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + LBracket@1..2 "[" + BinExpr@2..7 + LitExpr@2..3 + Int@2..3 "1" + WhiteSpace@3..4 " " + Plus@4..5 "+" + WhiteSpace@5..6 " " + LitExpr@6..7 + Int@6..7 "2" + RBracket@7..8 "]" + Newline@8..9 "\n" + IndexExpr@9..23 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "x" + LBracket@10..11 "[" + MethodCallExpr@11..22 + Path@11..14 + PathSegment@11..14 + Ident@11..14 "foo" + Dot@14..15 "." + Ident@15..16 "y" + CallArgList@16..22 + LParen@16..17 "(" + CallArg@17..18 + LitExpr@17..18 + Int@17..18 "1" + Comma@18..19 "," + WhiteSpace@19..20 " " + CallArg@20..21 + LitExpr@20..21 + Int@20..21 "2" + RParen@21..22 ")" + RBracket@22..23 "]" + diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.fe b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe index 75d6a3e907..4578b13dc2 100644 --- a/crates/parser2/test_files/syntax_node/exprs/struct_init.fe +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe @@ -1,2 +1,3 @@ Struct {x, y} -Struct {x: 1 + 2} \ No newline at end of file +Struct {x: 1 + 2} +Empty {} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap index ee44c822af..291a3e6872 100644 --- a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -2,7 +2,7 @@ source: crates/parser2/tests/syntax_node.rs expression: snapshot --- -Root@0..31 +Root@0..40 RecordInitExpr@0..13 Path@0..6 PathSegment@0..6 @@ -38,4 +38,13 @@ Root@0..31 LitExpr@29..30 Int@29..30 "2" RBrace@30..31 "}" + Newline@31..32 "\n" + RecordInitExpr@32..40 + Path@32..37 + PathSegment@32..37 + Ident@32..37 "Empty" + WhiteSpace@37..38 " " + RecordFieldList@38..40 + LBrace@38..39 "{" + RBrace@39..40 "}" From 8e22965cf0bb94a3959bfcdbeb1cab2a6f865a08 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 30 Jan 2023 19:14:48 +0100 Subject: [PATCH 048/678] Update solc for Apple silicon --- crates/test-utils/Cargo.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index 1dc56a0a0a..e9ecb879d3 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -14,8 +14,8 @@ fe-common = {path = "../common", version = "^0.20.0-alpha"} fe-driver = {path = "../driver", version = "^0.20.0-alpha"} fe-yulc = {path = "../yulc", version = "^0.20.0-alpha", optional = true, features = ["solc-backend"]} fe-analyzer = {path = "../analyzer", version = "^0.20.0-alpha"} -fe-test-utils-macros = { path = "macros", version = "0.20.0-alpha" } test-files = {path = "../test-files", package = "fe-test-files" } +fe-test-utils-macros = {path = "macros", version = "^0.20.0-alpha"} hex = "0.4" primitive-types = {version = "0.12", default-features = false, features = ["rlp"]} serde_json = "1.0.64" @@ -27,6 +27,5 @@ insta = { default-features = false, version = "1.26" } # used by ethabi, we need to force the js feature for wasm support getrandom = { version = "0.2.3", features = ["js"] } - [features] solc-backend = ["fe-yulc", "solc", "fe-driver/solc-backend"] From 82fee03c0b68319c374b34527b05a69a8cc7e80b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 5 Feb 2023 21:38:53 +0100 Subject: [PATCH 049/678] Add wasm test for new parser (#841) --- Cargo.lock | 58 +++--- crates/analyzer/src/namespace/items.rs | 4 +- crates/parser2/Cargo.toml | 7 +- .../syntax_node/exprs/{if_.fe => if.fe} | 0 .../syntax_node/exprs/{if_.snap => if.snap} | 3 +- .../syntax_node/exprs/{match_.fe => match.fe} | 0 .../exprs/{match_.snap => match.snap} | 3 +- .../syntax_node/items/{const_.fe => const.fe} | 0 .../items/{const_.snap => const.snap} | 0 .../items/{extern_.fe => extern.fe} | 0 .../items/{extern_.snap => extern.snap} | 0 .../syntax_node/items/{impl_.fe => impl.fe} | 0 .../items/{impl_.snap => impl.snap} | 0 .../syntax_node/items/{trait_.fe => trait.fe} | 0 .../items/{trait_.snap => trait.snap} | 0 .../syntax_node/items/{type_.fe => type.fe} | 0 .../items/{type_.snap => type.snap} | 0 .../syntax_node/items/{use_.fe => use.fe} | 0 .../syntax_node/items/{use_.snap => use.snap} | 0 .../syntax_node/stmts/{for_.fe => for.fe} | 0 .../syntax_node/stmts/{for_.snap => for.snap} | 0 .../syntax_node/stmts/{let_.fe => let.fe} | 0 .../syntax_node/stmts/{let_.snap => let.snap} | 0 .../syntax_node/stmts/{while_.fe => while.fe} | 0 .../stmts/{while_.snap => while.snap} | 0 crates/parser2/tests/error_recovery.rs | 122 +++++++----- crates/parser2/tests/syntax_node.rs | 181 ++++++++++------- crates/parser2/tests/test_runner.rs | 80 ++++++-- crates/test-utils/Cargo.toml | 1 - crates/test-utils/macros/Cargo.toml | 12 -- crates/test-utils/macros/src/lib.rs | 185 ------------------ crates/test-utils/src/_macro_support.rs | 67 ++----- crates/test-utils/src/lib.rs | 3 - crates/tests/Cargo.toml | 2 +- 34 files changed, 302 insertions(+), 426 deletions(-) rename crates/parser2/test_files/syntax_node/exprs/{if_.fe => if.fe} (100%) rename crates/parser2/test_files/syntax_node/exprs/{if_.snap => if.snap} (98%) rename crates/parser2/test_files/syntax_node/exprs/{match_.fe => match.fe} (100%) rename crates/parser2/test_files/syntax_node/exprs/{match_.snap => match.snap} (99%) rename crates/parser2/test_files/syntax_node/items/{const_.fe => const.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{const_.snap => const.snap} (100%) rename crates/parser2/test_files/syntax_node/items/{extern_.fe => extern.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{extern_.snap => extern.snap} (100%) rename crates/parser2/test_files/syntax_node/items/{impl_.fe => impl.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{impl_.snap => impl.snap} (100%) rename crates/parser2/test_files/syntax_node/items/{trait_.fe => trait.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{trait_.snap => trait.snap} (100%) rename crates/parser2/test_files/syntax_node/items/{type_.fe => type.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{type_.snap => type.snap} (100%) rename crates/parser2/test_files/syntax_node/items/{use_.fe => use.fe} (100%) rename crates/parser2/test_files/syntax_node/items/{use_.snap => use.snap} (100%) rename crates/parser2/test_files/syntax_node/stmts/{for_.fe => for.fe} (100%) rename crates/parser2/test_files/syntax_node/stmts/{for_.snap => for.snap} (100%) rename crates/parser2/test_files/syntax_node/stmts/{let_.fe => let.fe} (100%) rename crates/parser2/test_files/syntax_node/stmts/{let_.snap => let.snap} (100%) rename crates/parser2/test_files/syntax_node/stmts/{while_.fe => while.fe} (100%) rename crates/parser2/test_files/syntax_node/stmts/{while_.snap => while.snap} (100%) delete mode 100644 crates/test-utils/macros/Cargo.toml delete mode 100644 crates/test-utils/macros/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 1384dc0d0d..95b698ea52 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -417,6 +417,27 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "dir-test" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035f15b4ae5cc91ca448fe9668799e3d9b09fd7381a7004e232769ff0efabb79" +dependencies = [ + "dir-test-macros", +] + +[[package]] +name = "dir-test-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15699f408d3f58a1259b624d3c127721a442555136933d33cf387b81f5c9c89c" +dependencies = [ + "glob", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dot2" version = "0.1.0" @@ -638,7 +659,6 @@ dependencies = [ "fe-common", "fe-driver", "fe-test-files", - "fe-test-utils-macros", "fe-yulc", "getrandom 0.2.8", "hex", @@ -745,11 +765,14 @@ dependencies = [ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ + "dir-test", "fe-compiler-test-utils", "fxhash", "lazy_static", "logos", "rowan", + "wasm-bindgen", + "wasm-bindgen-test", ] [[package]] @@ -760,15 +783,6 @@ dependencies = [ "include_dir", ] -[[package]] -name = "fe-test-utils-macros" -version = "0.20.0-alpha" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "fe-yulc" version = "0.20.0-alpha" @@ -857,6 +871,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "half" version = "1.8.2" @@ -1080,12 +1100,6 @@ version = "0.2.139" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" -[[package]] -name = "libm" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" - [[package]] name = "linked-hash-map" version = "0.5.6" @@ -1195,7 +1209,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", - "libm", ] [[package]] @@ -1402,9 +1415,9 @@ dependencies = [ [[package]] name = "proptest" -version = "1.1.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29f1b898011ce9595050a68e60f90bad083ff2987a695a42357134c8381fba70" +checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" dependencies = [ "bitflags", "byteorder", @@ -1415,7 +1428,6 @@ dependencies = [ "rand_chacha 0.3.1", "rand_xorshift", "regex-syntax", - "unarray", ] [[package]] @@ -2021,12 +2033,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - [[package]] name = "unescape" version = "0.1.0" diff --git a/crates/analyzer/src/namespace/items.rs b/crates/analyzer/src/namespace/items.rs index 05a446ae3d..8179feb4bd 100644 --- a/crates/analyzer/src/namespace/items.rs +++ b/crates/analyzer/src/namespace/items.rs @@ -1665,8 +1665,8 @@ impl ImplId { || other == Type::SelfType(TraitOrType::TypeId(self.receiver(db))).id(db) } - /// Returns `true` if the `type_in_impl` can stand in for the `type_in_trait` as a type used - /// for a parameter or as a return type + /// Returns `true` if the `type_in_impl` can stand in for the + /// `type_in_trait` as a type used for a parameter or as a return type pub fn can_stand_in_for( &self, db: &dyn AnalyzerDb, diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 42c872ba2d..5a1955b25a 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -16,4 +16,9 @@ fxhash = "0.2.1" lazy_static = "1.4.0" [dev-dependencies] -fe-compiler-test-utils = { path = "../test-utils" } \ No newline at end of file +fe-compiler-test-utils = { path = "../test-utils" } +dir-test = "0.1" +wasm-bindgen-test = "0.3" + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen = "0.2" \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/if_.fe b/crates/parser2/test_files/syntax_node/exprs/if.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/exprs/if_.fe rename to crates/parser2/test_files/syntax_node/exprs/if.fe diff --git a/crates/parser2/test_files/syntax_node/exprs/if_.snap b/crates/parser2/test_files/syntax_node/exprs/if.snap similarity index 98% rename from crates/parser2/test_files/syntax_node/exprs/if_.snap rename to crates/parser2/test_files/syntax_node/exprs/if.snap index 1513eb3dd8..7930ec57ee 100644 --- a/crates/parser2/test_files/syntax_node/exprs/if_.snap +++ b/crates/parser2/test_files/syntax_node/exprs/if.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/if.fe --- Root@0..279 IfExpr@0..15 diff --git a/crates/parser2/test_files/syntax_node/exprs/match_.fe b/crates/parser2/test_files/syntax_node/exprs/match.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/exprs/match_.fe rename to crates/parser2/test_files/syntax_node/exprs/match.fe diff --git a/crates/parser2/test_files/syntax_node/exprs/match_.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap similarity index 99% rename from crates/parser2/test_files/syntax_node/exprs/match_.snap rename to crates/parser2/test_files/syntax_node/exprs/match.snap index 80a8790a4f..b5cbf36cdd 100644 --- a/crates/parser2/test_files/syntax_node/exprs/match_.snap +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/match.fe --- Root@0..516 MatchExpr@0..10 diff --git a/crates/parser2/test_files/syntax_node/items/const_.fe b/crates/parser2/test_files/syntax_node/items/const.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/const_.fe rename to crates/parser2/test_files/syntax_node/items/const.fe diff --git a/crates/parser2/test_files/syntax_node/items/const_.snap b/crates/parser2/test_files/syntax_node/items/const.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/const_.snap rename to crates/parser2/test_files/syntax_node/items/const.snap diff --git a/crates/parser2/test_files/syntax_node/items/extern_.fe b/crates/parser2/test_files/syntax_node/items/extern.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/extern_.fe rename to crates/parser2/test_files/syntax_node/items/extern.fe diff --git a/crates/parser2/test_files/syntax_node/items/extern_.snap b/crates/parser2/test_files/syntax_node/items/extern.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/extern_.snap rename to crates/parser2/test_files/syntax_node/items/extern.snap diff --git a/crates/parser2/test_files/syntax_node/items/impl_.fe b/crates/parser2/test_files/syntax_node/items/impl.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/impl_.fe rename to crates/parser2/test_files/syntax_node/items/impl.fe diff --git a/crates/parser2/test_files/syntax_node/items/impl_.snap b/crates/parser2/test_files/syntax_node/items/impl.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/impl_.snap rename to crates/parser2/test_files/syntax_node/items/impl.snap diff --git a/crates/parser2/test_files/syntax_node/items/trait_.fe b/crates/parser2/test_files/syntax_node/items/trait.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/trait_.fe rename to crates/parser2/test_files/syntax_node/items/trait.fe diff --git a/crates/parser2/test_files/syntax_node/items/trait_.snap b/crates/parser2/test_files/syntax_node/items/trait.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/trait_.snap rename to crates/parser2/test_files/syntax_node/items/trait.snap diff --git a/crates/parser2/test_files/syntax_node/items/type_.fe b/crates/parser2/test_files/syntax_node/items/type.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/type_.fe rename to crates/parser2/test_files/syntax_node/items/type.fe diff --git a/crates/parser2/test_files/syntax_node/items/type_.snap b/crates/parser2/test_files/syntax_node/items/type.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/type_.snap rename to crates/parser2/test_files/syntax_node/items/type.snap diff --git a/crates/parser2/test_files/syntax_node/items/use_.fe b/crates/parser2/test_files/syntax_node/items/use.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/items/use_.fe rename to crates/parser2/test_files/syntax_node/items/use.fe diff --git a/crates/parser2/test_files/syntax_node/items/use_.snap b/crates/parser2/test_files/syntax_node/items/use.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/items/use_.snap rename to crates/parser2/test_files/syntax_node/items/use.snap diff --git a/crates/parser2/test_files/syntax_node/stmts/for_.fe b/crates/parser2/test_files/syntax_node/stmts/for.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/for_.fe rename to crates/parser2/test_files/syntax_node/stmts/for.fe diff --git a/crates/parser2/test_files/syntax_node/stmts/for_.snap b/crates/parser2/test_files/syntax_node/stmts/for.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/for_.snap rename to crates/parser2/test_files/syntax_node/stmts/for.snap diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.fe b/crates/parser2/test_files/syntax_node/stmts/let.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/let_.fe rename to crates/parser2/test_files/syntax_node/stmts/let.fe diff --git a/crates/parser2/test_files/syntax_node/stmts/let_.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/let_.snap rename to crates/parser2/test_files/syntax_node/stmts/let.snap diff --git a/crates/parser2/test_files/syntax_node/stmts/while_.fe b/crates/parser2/test_files/syntax_node/stmts/while.fe similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/while_.fe rename to crates/parser2/test_files/syntax_node/stmts/while.fe diff --git a/crates/parser2/test_files/syntax_node/stmts/while_.snap b/crates/parser2/test_files/syntax_node/stmts/while.snap similarity index 100% rename from crates/parser2/test_files/syntax_node/stmts/while_.snap rename to crates/parser2/test_files/syntax_node/stmts/while.snap diff --git a/crates/parser2/tests/error_recovery.rs b/crates/parser2/tests/error_recovery.rs index 1154c0a2ad..18635f1e91 100644 --- a/crates/parser2/tests/error_recovery.rs +++ b/crates/parser2/tests/error_recovery.rs @@ -1,64 +1,78 @@ -use fe_parser2::{ - parser::{expr::parse_expr, item::ItemListScope, stmt::parse_stmt}, - syntax_node::SyntaxNode, -}; +use dir_test::{dir_test, Fixture}; + +use fe_compiler_test_utils::snap_test; + mod test_runner; use test_runner::*; -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/error_recovery/items", - "parser2/test_files/error_recovery/items", - test_item_list -} -fn test_item_list(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.parse(ItemListScope::default(), None); - }, - false, - ); - runner.run(input) -} -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/error_recovery/exprs", - "parser2/test_files/error_recovery/exprs", - test_expr +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/items", + glob: "*.fe" +)] +fn test_item_list(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fn test_expr(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { - bump_newlines(parser); - parse_expr(parser); - bump_newlines(parser); - } - }, - false, - ); - runner.run(input) +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/stmts", + glob: "*.fe" +)] +fn test_stmt(fixture: Fixture<&str>) { + let runner = TestRunner::stmt_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/error_recovery/stmts", - "parser2/test_files/error_recovery/stmts", - test_stmt +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/exprs", + glob: "*.fe" +)] +fn test_expr(fixture: Fixture<&str>) { + let runner = TestRunner::expr_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fn test_stmt(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { - bump_newlines(parser); - parse_stmt(parser, None); - bump_newlines(parser); - } - }, - false, - ); - runner.run(input) +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/items", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_item_list(fixture: Fixture<&str>) { + TestRunner::item_list(false).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/stmts", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_stmt(fixture: Fixture<&str>) { + TestRunner::stmt_list(false).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/exprs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_expr(fixture: Fixture<&str>) { + TestRunner::expr_list(false).run(fixture.content()); + } } diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs index 8c645760b5..c6068ef988 100644 --- a/crates/parser2/tests/syntax_node.rs +++ b/crates/parser2/tests/syntax_node.rs @@ -1,89 +1,122 @@ -use fe_parser2::{ - parser::{expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt}, - syntax_node::SyntaxNode, -}; +use dir_test::{dir_test, Fixture}; + +use fe_compiler_test_utils::snap_test; mod test_runner; use test_runner::*; -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/structs", - "parser2/test_files/syntax_node/structs", - test_item_list -} -fn test_item_list(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.parse(ItemListScope::default(), None); - }, - true, - ); - runner.run(input) +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/items", + glob: "*.fe" +)] +fn test_item_list(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/pats", - "parser2/test_files/syntax_node/pats", - test_pat -} -fn test_pat(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - while parser.current_kind().is_some() { - parse_pat(parser); - } - }, - true, - ); - runner.run(input) +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/structs", + glob: "*.fe" +)] +fn test_struct(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/exprs", - "parser2/test_files/syntax_node/exprs", - test_expr +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/stmts", + glob: "*.fe" +)] +fn test_stmt(fixture: Fixture<&str>) { + let runner = TestRunner::stmt_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fn test_expr(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { - bump_newlines(parser); - parse_expr(parser); - bump_newlines(parser); - } - }, - true, - ); - runner.run(input) +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/exprs", + glob: "*.fe" +)] +fn test_expr(fixture: Fixture<&str>) { + let runner = TestRunner::expr_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fe_compiler_test_utils::build_debug_snap_tests! { - "parser2/test_files/syntax_node/stmts", - "parser2/test_files/syntax_node/stmts", - test_stmt +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/pats", + glob: "*.fe" +)] +fn test_pat(fixture: Fixture<&str>) { + let runner = TestRunner::pat_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); } -fn test_stmt(input: &str) -> SyntaxNode { - let runner = TestRunner::new( - |parser| { - parser.set_newline_as_trivia(false); - bump_newlines(parser); - while parser.current_kind().is_some() { - bump_newlines(parser); - parse_stmt(parser, None); - bump_newlines(parser); - } - }, - true, - ); - runner.run(input) -} +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/items", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_item_list(fixture: dir_test::Fixture<&str>) { + TestRunner::item_list(true).run(fixture.content()); + } -fe_compiler_test_utils::build_debug_snap_tests!( - "parser2/test_files/syntax_node/items", - "parser2/test_files/syntax_node/items", - test_item_list -); + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/structs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_struct(fixture: dir_test::Fixture<&str>) { + TestRunner::item_list(true).run(fixture.content()); + } + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/stmts", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_stmt(fixture: dir_test::Fixture<&str>) { + TestRunner::stmt_list(true).run(fixture.content()); + } + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/exprs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_expr(fixture: dir_test::Fixture<&str>) { + TestRunner::expr_list(true).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/pats", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_pat(fixture: Fixture<&str>) { + TestRunner::pat_list(true).run(fixture.content()); + } +} diff --git a/crates/parser2/tests/test_runner.rs b/crates/parser2/tests/test_runner.rs index 88cac2888c..bc84c5c261 100644 --- a/crates/parser2/tests/test_runner.rs +++ b/crates/parser2/tests/test_runner.rs @@ -2,25 +2,81 @@ use fe_parser2::{ lexer, - parser::{Parser, RootScope}, + parser::{ + expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt, Parser, RootScope, + }, syntax_node::SyntaxNode, SyntaxKind, }; -pub struct TestRunner -where - F: Fn(&mut Parser), -{ - f: F, +type BoxedParseFn = Box)>; +pub struct TestRunner { + f: BoxedParseFn, should_success: bool, } -impl TestRunner -where - F: Fn(&mut Parser), -{ - pub fn new(f: F, should_success: bool) -> Self { - Self { f, should_success } +impl TestRunner { + /// Constructs a new test runner. + pub fn new(f: F, should_success: bool) -> Self + where + F: Fn(&mut Parser) + 'static, + { + Self { + f: Box::new(f), + should_success, + } + } + + /// Constructs a test runner for parsing a list of expressions. + pub fn item_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.parse(ItemListScope::default(), None); + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of statements. + pub fn stmt_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_stmt(parser, None); + bump_newlines(parser); + } + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of expressions. + pub fn expr_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_expr(parser); + bump_newlines(parser); + } + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of patterns. + pub fn pat_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + while parser.current_kind().is_some() { + parse_pat(parser); + } + } + + Self::new(parse, should_success) } pub fn run(&self, input: &str) -> SyntaxNode { diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index e9ecb879d3..beceb82903 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -15,7 +15,6 @@ fe-driver = {path = "../driver", version = "^0.20.0-alpha"} fe-yulc = {path = "../yulc", version = "^0.20.0-alpha", optional = true, features = ["solc-backend"]} fe-analyzer = {path = "../analyzer", version = "^0.20.0-alpha"} test-files = {path = "../test-files", package = "fe-test-files" } -fe-test-utils-macros = {path = "macros", version = "^0.20.0-alpha"} hex = "0.4" primitive-types = {version = "0.12", default-features = false, features = ["rlp"]} serde_json = "1.0.64" diff --git a/crates/test-utils/macros/Cargo.toml b/crates/test-utils/macros/Cargo.toml deleted file mode 100644 index 691c864b6b..0000000000 --- a/crates/test-utils/macros/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "fe-test-utils-macros" -version = "0.20.0-alpha" -edition = "2021" - -[lib] -proc_macro = true - -[dependencies] -syn = { version = "1.0", features = ["full"] } -proc-macro2 = "1.0" -quote = "1.0" \ No newline at end of file diff --git a/crates/test-utils/macros/src/lib.rs b/crates/test-utils/macros/src/lib.rs deleted file mode 100644 index 78a677e332..0000000000 --- a/crates/test-utils/macros/src/lib.rs +++ /dev/null @@ -1,185 +0,0 @@ -use std::{ - fs, - path::{Path, PathBuf}, -}; - -use quote::quote; - -type Error = syn::Error; -type Result = syn::Result; - -#[proc_macro] -pub fn build_snap_tests(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - match expand(input) { - Ok(ts) => ts, - Err(err) => err.to_compile_error().into(), - } -} - -fn expand(input: proc_macro::TokenStream) -> Result { - let args: Args = syn::parse(input)?; - - let builder = SnapTestBuilder::from_args(args)?; - builder.build().map(|ts| ts.into()) -} - -struct SnapTestBuilder { - fixture_dir: PathBuf, - snapshot_dir: PathBuf, - target_fn: syn::Path, - debug_snap: bool, -} - -impl SnapTestBuilder { - fn from_args(args: Args) -> Result { - let workspace_root = cargo_workspace_dir(); - - let fixture_dir: PathBuf = workspace_root.join(args.fixture_dir.value()); - let snapshot_dir: PathBuf = workspace_root.join(args.snapshot_dir.value()); - - if !fixture_dir.is_dir() | !fixture_dir.exists() { - return Err(Error::new_spanned( - args.fixture_dir, - format! {"invalid path for `fixture_dir`: `{}` is invalid path", - fixture_dir.display()}, - )); - } else if !snapshot_dir.is_dir() { - return Err(Error::new_spanned( - args.snapshot_dir, - format! {"invalid path for `snapshot_dir`: `{}` is invalid path", - snapshot_dir.display()}, - )); - } - - Ok(Self { - fixture_dir, - snapshot_dir, - target_fn: args.target_fn, - debug_snap: args.debug_snap.value(), - }) - } - - fn build(&self) -> Result { - let mut tests = Vec::new(); - - let dir = fs::read_dir(&self.fixture_dir).unwrap(); - for fixture in dir.flatten() { - let fixture_path = fixture.path(); - if fixture_path.is_file() - && fixture_path.extension().and_then(|ext| ext.to_str()) == Some("fe") - { - tests.push(self.build_test(&fixture_path)); - } - } - - Ok(quote! { - #(#tests)* - }) - } - - fn build_test(&self, fixture_file: &Path) -> proc_macro2::TokenStream { - let file_name = fixture_file.file_name().unwrap().to_str().unwrap(); - let file_stem_name = fixture_file.file_stem().unwrap().to_str().unwrap(); - let test_fn_ident = syn::Ident::new(file_stem_name, proc_macro2::Span::call_site()); - let fixture_file = fixture_file.to_str().unwrap(); - - let snapshot_dir = self.snapshot_dir.to_str().unwrap(); - let target_fn = &self.target_fn; - let snapshot = syn::Ident::new("snapshot", proc_macro2::Span::call_site()); - let get_snapshot = if self.debug_snap { - quote! { - let #snapshot = format!("{:#?}", #target_fn(&input)); - } - } else { - quote! { - let #snapshot = format!("{}", #target_fn(&input)); - } - }; - - quote! { - #[test] - fn #test_fn_ident() { - let input = ::std::fs::read_to_string(#fixture_file).unwrap(); - #get_snapshot - let mut settings = ::fe_compiler_test_utils::_macro_support::_insta::Settings::new(); - settings.set_snapshot_path(#snapshot_dir); - settings.set_input_file(#file_name); - settings.set_prepend_module_to_snapshot(false); - - - ::fe_compiler_test_utils::_insta_assert_snapshot!{ - #snapshot, - settings - } - } - } - } -} - -// FIXME: This is quite hacky and should be removed when `span::source_file` is -// stabilized. -// See [`Tracking issue for proc_macro::Span inspection APIs #54725`](https://github.com/rust-lang/rust/issues/54725) for more information. -fn cargo_workspace_dir() -> PathBuf { - let mut cargo_workspace_dir: PathBuf = env!["CARGO_MANIFEST_DIR"].into(); - - for _ in 0..2 { - cargo_workspace_dir.pop(); - } - - cargo_workspace_dir -} -struct Args { - fixture_dir: syn::LitStr, - snapshot_dir: syn::LitStr, - target_fn: syn::Path, - debug_snap: syn::LitBool, -} - -impl syn::parse::Parse for Args { - fn parse(input: syn::parse::ParseStream) -> Result { - let error_msg = "expected `build_snap_tests! { - fixture_dir: .., - snapshot_dir: .., - target_fn: .., - debug_snap: .. - }`"; - - let ident = input.parse::()?; - if ident != "fixture_dir" { - return Err(Error::new_spanned(ident, error_msg)); - } - input.parse::()?; - let fixture_dir = input.parse::()?; - input.parse::()?; - - let ident = input.parse::()?; - if ident != "snapshot_dir" { - return Err(Error::new_spanned(ident, error_msg)); - } - input.parse::()?; - let snapshot_dir = input.parse::()?; - input.parse::()?; - - let ident = input.parse::()?; - if ident != "target_fn" { - return Err(Error::new_spanned(ident, error_msg)); - } - input.parse::()?; - let target_fn = input.parse::()?; - input.parse::()?; - - let ident = input.parse::()?; - if ident != "debug_snap" { - return Err(Error::new_spanned(ident, error_msg)); - } - input.parse::()?; - let debug_snap = input.parse::()?; - - Ok(Self { - fixture_dir, - snapshot_dir, - target_fn, - debug_snap, - }) - } -} diff --git a/crates/test-utils/src/_macro_support.rs b/crates/test-utils/src/_macro_support.rs index b46e00043f..13b387142a 100644 --- a/crates/test-utils/src/_macro_support.rs +++ b/crates/test-utils/src/_macro_support.rs @@ -1,35 +1,26 @@ #[doc(hidden)] pub use insta as _insta; -// NOTE: Borrowed from `insta` implementation from -// [here](https://docs.rs/insta/1.26/src/insta/macros.rs.html#2-16) -/// Utility macro to return the name of the current function. -#[doc(hidden)] +/// A macro to assert that a value matches a snapshot. +/// If the snapshot does not exist, it will be created in the same directory as +/// the test file. #[macro_export] -macro_rules! _function_name { - () => {{ - fn f() {} - fn type_name_of_val(_: T) -> &'static str { - std::any::type_name::() - } - let mut name = type_name_of_val(f).strip_suffix("::f").unwrap_or(""); - while let Some(rest) = name.strip_suffix("::{{closure}}") { - name = rest; - } - name - }}; -} +macro_rules! snap_test { + ($value:expr, $fixture_path: expr) => { + let mut settings = $crate::_macro_support::_insta::Settings::new(); + let fixture_path = ::std::path::Path::new($fixture_path); + let fixture_dir = fixture_path.parent().unwrap(); + let fixture_name = fixture_path.file_stem().unwrap().to_str().unwrap(); -#[doc(hidden)] -#[macro_export] -macro_rules! _insta_assert_snapshot { - ($value: expr, $setting: expr) => { - $setting.bind(|| { + settings.set_snapshot_path(fixture_dir); + settings.set_input_file($fixture_path); + settings.set_prepend_module_to_snapshot(false); + settings.bind(|| { $crate::_macro_support::_insta::_macro_support::assert_snapshot( $crate::_macro_support::_insta::_macro_support::AutoName.into(), &$value, env!("CARGO_MANIFEST_DIR"), - $crate::_function_name!(), + fixture_name, module_path!(), file!(), line!(), @@ -39,33 +30,3 @@ macro_rules! _insta_assert_snapshot { }) }; } - -/// Build a set of snapshot tests from a directory of fixtures. -/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. -/// `target_fn` should take `&str` and return ``. -#[macro_export] -macro_rules! build_snap_tests { - ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path) => { - fe_compiler_test_utils::_build_snap_tests! { - fixture_dir: $fixture_dir, - snapshot_dir: $snapshot_dir, - target_fn: $target_fn, - debug_snap: false - } - }; -} - -/// Build a set of snapshot tests from a directory of fixtures. -/// `fixture_dir` and `snapshot_dir` should be relative to the workspace root. -/// `target_fn` should take `&str` and return ``. -#[macro_export] -macro_rules! build_debug_snap_tests { - ($fixture_dir: literal, $snapshot_dir: literal, $target_fn: path) => { - fe_compiler_test_utils::_build_snap_tests! { - fixture_dir: $fixture_dir, - snapshot_dir: $snapshot_dir, - target_fn: $target_fn, - debug_snap: true - } - }; -} diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index b8f385f655..9f721601d5 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -1,9 +1,6 @@ #[doc(hidden)] pub mod _macro_support; -#[doc(hidden)] -pub use fe_test_utils_macros::build_snap_tests as _build_snap_tests; - use evm_runtime::{ExitReason, Handler}; use fe_common::diagnostics::print_diagnostics; use fe_common::utils::keccak; diff --git a/crates/tests/Cargo.toml b/crates/tests/Cargo.toml index b008d72d77..77749d658e 100644 --- a/crates/tests/Cargo.toml +++ b/crates/tests/Cargo.toml @@ -33,7 +33,7 @@ wasm-bindgen-test = "0.3.24" solc-backend = ["fe-yulc/solc-backend", "fe-compiler-test-utils/solc-backend"] [dev-dependencies.proptest] -version = "1.0.0" +version = "=1.0.0" # The default feature set includes things like process forking which are not # supported in Web Assembly. default-features = false From 027bfdf5446fad4832b812bebe1dc58b79f34c17 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 13:14:32 +0100 Subject: [PATCH 050/678] Add `ConstGenericParam` kind --- crates/parser2/src/parser/mod.rs | 6 +- crates/parser2/src/parser/param.rs | 41 ++++++++--- crates/parser2/src/syntax_kind.rs | 4 +- .../test_files/syntax_node/items/trait.snap | 18 ++--- .../syntax_node/structs/generics.fe | 4 ++ .../syntax_node/structs/generics.snap | 68 ++++++++++++++++++- 6 files changed, 116 insertions(+), 25 deletions(-) diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 3d729e99e3..0c9f0a586c 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -544,7 +544,7 @@ define_scope! { macro_rules! define_scope { ( - $(#[$attrs: expr])* + $(#[$attrs: meta])* $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Inheritance $(($($recoveries: path), *))? @@ -570,7 +570,7 @@ macro_rules! define_scope { }; ( - $(#[$attrs: expr])* + $(#[$attrs: meta])* $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, $kind: path, Override($($recoveries: path), *) @@ -599,7 +599,7 @@ macro_rules! define_scope { macro_rules! define_scope_struct { ( - $(#[$attrs: expr])* + $(#[$attrs: meta])* $visibility: vis $scope_name: ident { $($field: ident: $ty: ty),* }, $kind: path ) => { diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 4d52be64f6..ca7abfde63 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -109,19 +109,42 @@ define_scope! { impl super::Parse for GenericParamScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.bump_if(SyntaxKind::ConstKw); - + let is_const = parser.bump_if(SyntaxKind::ConstKw); + if is_const { + self.set_kind(SyntaxKind::ConstGenericParam); + } parser.with_next_expected_tokens( |parser| { - if !parser.bump_if(SyntaxKind::Ident) { - parser.error_and_recover("expected type parameter", None); - } + if is_const { + parser.with_next_expected_tokens( + |parser| { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected const parameter", None); + } + }, + &[SyntaxKind::Colon], + ); - if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.parse(TypeBoundListScope::default(), None); - } + if !parser.bump_if(SyntaxKind::Colon) { + parser.error_and_recover("expected `:` after const parameter", None); + return; + } + parse_type(parser, None, false); - parser.set_newline_as_trivia(true); + parser.set_newline_as_trivia(true); + } else { + if !parser.bump_if(SyntaxKind::Ident) { + parser.error_and_recover("expected type parameter", None); + } + + if parser.current_kind() == Some(SyntaxKind::Colon) { + { + parser.parse(TypeBoundListScope::default(), None); + } + } + + parser.set_newline_as_trivia(true); + } }, &[SyntaxKind::Comma, SyntaxKind::Gt], ); diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ab5b6fc0b5..3391d87a9f 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -418,7 +418,9 @@ pub enum SyntaxKind { /// `T` /// `T: Trait` GenericParam, - /// `` + /// `const N: usize` + ConstGenericParam, + /// `` GenericParamList, /// `(x: i32, _ y: mut i32)` diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index ca105c9cd2..e0ef9c3aaf 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/trait.fe --- Root@0..588 ItemList@0..588 @@ -42,17 +43,16 @@ Root@0..588 Ident@47..52 "Trait" Comma@52..53 "," WhiteSpace@53..54 " " - GenericParam@54..66 + ConstGenericParam@54..66 ConstKw@54..59 "const" WhiteSpace@59..60 " " Ident@60..61 "U" - TypeBoundList@61..66 - Colon@61..62 ":" - WhiteSpace@62..63 " " - TypeBound@63..66 - Path@63..66 - PathSegment@63..66 - Ident@63..66 "i32" + Colon@61..62 ":" + WhiteSpace@62..63 " " + PathType@63..66 + Path@63..66 + PathSegment@63..66 + Ident@63..66 "i32" Gt@66..67 ">" FnArgList@67..79 LParen@67..68 "(" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe index b4bf28f6b0..26bdeba066 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.fe +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -27,4 +27,8 @@ pub struct StructWithGenericParam3< x: S y: T z: U +} + +pub struct MyArr { + __inner: [T; N] } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index e731c5090e..f77f144f8a 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -1,9 +1,10 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/generics.fe --- -Root@0..404 - ItemList@0..404 +Root@0..480 + ItemList@0..480 Struct@0..74 ItemModifier@0..3 PubKw@0..3 "pub" @@ -316,4 +317,65 @@ Root@0..404 Ident@401..402 "U" Newline@402..403 "\n" RBrace@403..404 "}" + Newline@404..406 "\n\n" + Struct@406..480 + ItemModifier@406..409 + PubKw@406..409 "pub" + WhiteSpace@409..410 " " + StructKw@410..416 "struct" + WhiteSpace@416..417 " " + Ident@417..422 "MyArr" + GenericParamList@422..456 + Lt@422..423 "<" + GenericParam@423..439 + Ident@423..424 "T" + TypeBoundList@424..439 + Colon@424..425 ":" + WhiteSpace@425..426 " " + TypeBound@426..439 + Path@426..439 + PathSegment@426..429 + Ident@426..429 "std" + Colon2@429..431 "::" + PathSegment@431..434 + Ident@431..434 "ops" + Colon2@434..436 "::" + PathSegment@436..439 + Ident@436..439 "Add" + Comma@439..440 "," + WhiteSpace@440..441 " " + ConstGenericParam@441..455 + ConstKw@441..446 "const" + WhiteSpace@446..447 " " + Ident@447..448 "N" + Colon@448..449 ":" + WhiteSpace@449..450 " " + PathType@450..455 + Path@450..455 + PathSegment@450..455 + Ident@450..455 "usize" + Gt@455..456 ">" + WhiteSpace@456..457 " " + RecordFieldDefList@457..480 + LBrace@457..458 "{" + Newline@458..459 "\n" + WhiteSpace@459..463 " " + RecordFieldDef@463..478 + Ident@463..470 "__inner" + Colon@470..471 ":" + WhiteSpace@471..472 " " + ArrayType@472..478 + LBracket@472..473 "[" + PathType@473..474 + Path@473..474 + PathSegment@473..474 + Ident@473..474 "T" + SemiColon@474..475 ";" + WhiteSpace@475..476 " " + Path@476..477 + PathSegment@476..477 + Ident@476..477 "N" + RBracket@477..478 "]" + Newline@478..479 "\n" + RBrace@479..480 "}" From 4182c58cbb473cc1c70442d7012d11ef4385d58a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 16:44:03 +0100 Subject: [PATCH 051/678] Rename `SyntaxToken` to `LexicalToken` --- crates/parser2/src/lexer.rs | 4 ++-- crates/parser2/src/lib.rs | 3 ++- crates/parser2/src/parser/expr.rs | 2 +- crates/parser2/src/parser/mod.rs | 4 ++-- crates/parser2/src/parser/token_stream.rs | 4 ++-- crates/parser2/src/syntax_node.rs | 1 + 6 files changed, 10 insertions(+), 8 deletions(-) diff --git a/crates/parser2/src/lexer.rs b/crates/parser2/src/lexer.rs index 8150d2843d..28558521cc 100644 --- a/crates/parser2/src/lexer.rs +++ b/crates/parser2/src/lexer.rs @@ -1,5 +1,5 @@ use crate::{ - parser::token_stream::{SyntaxToken, TokenStream}, + parser::token_stream::{LexicalToken, TokenStream}, SyntaxKind, }; @@ -46,7 +46,7 @@ pub struct Token<'s> { text: &'s str, } -impl<'s> SyntaxToken for Token<'s> { +impl<'s> LexicalToken for Token<'s> { fn syntax_kind(&self) -> SyntaxKind { self.syntax_kind } diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 9c6848140a..cdace9107f 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -1,12 +1,13 @@ +pub mod ast; pub mod lexer; pub mod parser; pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; +pub use syntax_node::{FeLang, SyntaxNode, SyntaxToken}; use parser::RootScope; -use syntax_node::SyntaxNode; pub type TextRange = rowan::TextRange; diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index e71743f822..0065b2815c 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -3,7 +3,7 @@ use crate::SyntaxKind; use super::{ define_scope, expr_atom, param::{CallArgListScope, GenericArgListScope}, - token_stream::{SyntaxToken, TokenStream}, + token_stream::{LexicalToken, TokenStream}, Checkpoint, Parser, }; diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 0c9f0a586c..fb9e7ccd70 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -6,7 +6,7 @@ use fxhash::{FxHashMap, FxHashSet}; use crate::{syntax_node::SyntaxNode, ParseError, SyntaxKind, TextRange}; -use self::token_stream::{BackTrackableTokenStream, SyntaxToken, TokenStream}; +use self::token_stream::{BackTrackableTokenStream, LexicalToken, TokenStream}; pub mod token_stream; @@ -523,7 +523,7 @@ trait TextSize { impl TextSize for T where - T: SyntaxToken, + T: LexicalToken, { fn text_size(&self) -> rowan::TextSize { rowan::TextSize::of(self.text()) diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs index 7043491a85..c5e93053a3 100644 --- a/crates/parser2/src/parser/token_stream.rs +++ b/crates/parser2/src/parser/token_stream.rs @@ -7,7 +7,7 @@ use crate::SyntaxKind; /// 1. text in source file /// 2. tokens stream produced by procedural macros. pub trait TokenStream { - type Token: SyntaxToken; + type Token: LexicalToken; /// Returns the next token in the stream. fn next(&mut self) -> Option; @@ -17,7 +17,7 @@ pub trait TokenStream { } /// This trait represents a single token in the token stream. -pub trait SyntaxToken: Clone { +pub trait LexicalToken: Clone { /// Returns `SyntaxKind` of the token. fn syntax_kind(&self) -> SyntaxKind; diff --git a/crates/parser2/src/syntax_node.rs b/crates/parser2/src/syntax_node.rs index a9c9a8d3c7..74b221c90c 100644 --- a/crates/parser2/src/syntax_node.rs +++ b/crates/parser2/src/syntax_node.rs @@ -16,3 +16,4 @@ impl rowan::Language for FeLang { } pub type SyntaxNode = rowan::SyntaxNode; +pub type SyntaxToken = rowan::SyntaxToken; From ea900132543ca016285967ad93e8993886d495d4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 16:44:33 +0100 Subject: [PATCH 052/678] Add ast for `Path` --- crates/parser2/src/ast/mod.rs | 53 +++++++++++++++++++++++ crates/parser2/src/ast/path.rs | 77 ++++++++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+) create mode 100644 crates/parser2/src/ast/mod.rs create mode 100644 crates/parser2/src/ast/path.rs diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs new file mode 100644 index 0000000000..9bddaf1dd9 --- /dev/null +++ b/crates/parser2/src/ast/mod.rs @@ -0,0 +1,53 @@ +pub mod item; +pub mod param; +pub mod path; +pub mod type_; + +pub use item::*; +pub use param::*; +pub use path::*; +pub use type_::*; + +pub type AstChildren = rowan::ast::AstChildren; +pub type SyntaxText = rowan::SyntaxText; + +macro_rules! ast_node { + ( + $(#[$attrs: meta])* + $visibility: vis struct $name: ident $({ + $($field_vis: vis $field: ident: $ty: ty),* + })?, + $kind: pat + ) => { + $(#[$attrs])* + #[derive(Debug, Clone, PartialEq, Eq, Hash)] + $visibility struct $name { + __syntax: crate::SyntaxNode, + $($($field: $ty),*)? + } + + impl rowan::ast::AstNode for $name { + type Language = $crate::FeLang; + fn can_cast(node: crate::SyntaxKind) -> bool { + matches!(node, $kind) + } + fn cast(node: crate::SyntaxNode) -> Option { + Self::can_cast(node.kind()).then(|| Self{ + __syntax: node.into(), + $($($field: Default::default(),)*)? + }) + } + fn syntax(&self) -> &crate::SyntaxNode { + &self.__syntax + } + } + + impl std::fmt::Display for $name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(rowan::ast::AstNode::syntax(self), f) + } + } + }; +} + +use ast_node; diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs new file mode 100644 index 0000000000..c36e6f69b5 --- /dev/null +++ b/crates/parser2/src/ast/path.rs @@ -0,0 +1,77 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren}; +use crate::{syntax_node::SyntaxToken, SyntaxKind as SK}; + +ast_node! { + /// A path. + /// `foo::bar::baz` + pub struct Path, + SK::Path +} +impl Path { + /// Returns the segments of the path. + pub fn segments(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// A path segment. + pub struct PathSegment, + SK::PathSegment +} +impl PathSegment { + /// Returns the identifier of the segment. + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns `true` if the segment is a `self` keyword. + pub fn is_self(&self) -> bool { + support::token(self.syntax(), SK::SelfKw).is_some() + } + + /// Returns `true` if the segment is a `Self` keyword. + pub fn is_self_ty(&self) -> bool { + support::token(self.syntax(), SK::SelfTypeKw).is_some() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + lexer::Lexer, + parser::{path::PathScope, Parser}, + }; + + fn parse_path(source: &str) -> Path { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(PathScope::default(), None); + Path::cast(parser.finish().0).unwrap() + } + + #[test] + fn path_ast() { + let source = r#"self::Foo"#; + let path = parse_path(source); + let mut segments = path.segments(); + + assert!(segments.next().unwrap().is_self()); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "Foo"); + assert!(segments.next().is_none()); + } + + #[test] + fn path_ast2() { + let source = r#"Self::Dep"#; + let path = parse_path(source); + let mut segments = path.segments(); + + assert!(segments.next().unwrap().is_self_ty()); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "Dep"); + assert!(segments.next().is_none()); + } +} From 4bf420ee1518cd63aa37eaaea20afe78f04bb934 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 21:15:00 +0100 Subject: [PATCH 053/678] Add `ConstGenericArg` kind --- crates/parser2/src/parser/param.rs | 6 ++-- crates/parser2/src/syntax_kind.rs | 6 ++-- .../test_files/error_recovery/exprs/call.snap | 9 +++--- .../error_recovery/exprs/method.snap | 13 ++++---- .../error_recovery/items/enum_.snap | 7 +++-- .../test_files/error_recovery/items/func.snap | 7 +++-- .../error_recovery/items/impl_.snap | 11 ++++--- .../error_recovery/items/impl_trait.snap | 15 ++++----- .../error_recovery/items/struct_.snap | 7 +++-- .../error_recovery/items/trait_.snap | 27 ++++++++-------- .../error_recovery/items/type_.snap | 11 ++++--- .../test_files/syntax_node/exprs/call.snap | 15 ++++----- .../test_files/syntax_node/exprs/method.snap | 7 +++-- .../test_files/syntax_node/items/enums.snap | 11 ++++--- .../test_files/syntax_node/items/func.snap | 25 ++++++++------- .../test_files/syntax_node/items/impl.snap | 13 ++++---- .../syntax_node/items/impl_trait.snap | 31 ++++++++++--------- .../test_files/syntax_node/items/trait.snap | 16 +++++----- .../test_files/syntax_node/items/type.snap | 13 ++++---- .../syntax_node/structs/generics.snap | 24 +++++++------- 20 files changed, 147 insertions(+), 127 deletions(-) diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index ca7abfde63..d973f10afa 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -103,7 +103,7 @@ impl super::Parse for GenericParamListScope { define_scope! { GenericParamScope, - GenericParam, + TypeGenericParam, Inheritance(Comma) } impl super::Parse for GenericParamScope { @@ -206,7 +206,7 @@ impl super::Parse for GenericArgListScope { define_scope! { GenericArgScope{ allow_bounds: bool }, - GenericArg, + TypeGenericArg, Inheritance } impl super::Parse for GenericArgScope { @@ -216,10 +216,12 @@ impl super::Parse for GenericArgScope { |parser| { match parser.current_kind() { Some(SyntaxKind::LBrace) => { + self.set_kind(SyntaxKind::ConstGenericArg); parser.parse(BlockExprScope::default(), None); } Some(kind) if kind.is_literal_leaf() => { + self.set_kind(SyntaxKind::ConstGenericArg); parser.parse(LitExprScope::default(), None); } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 3391d87a9f..c2e9f096ed 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -248,7 +248,9 @@ pub enum SyntaxKind { /// `` GenericArgList, /// `T` - GenericArg, + TypeGenericArg, + /// `1` + ConstGenericArg, /// `FOO::Bar` PathExpr, /// `Foo { x: 1, y: "String"` }` @@ -417,7 +419,7 @@ pub enum SyntaxKind { /// `T` /// `T: Trait` - GenericParam, + TypeGenericParam, /// `const N: usize` ConstGenericParam, /// `` diff --git a/crates/parser2/test_files/error_recovery/exprs/call.snap b/crates/parser2/test_files/error_recovery/exprs/call.snap index 91e5142937..192293477b 100644 --- a/crates/parser2/test_files/error_recovery/exprs/call.snap +++ b/crates/parser2/test_files/error_recovery/exprs/call.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/call.fe --- Root@0..40 CallExpr@0..16 @@ -39,14 +40,14 @@ Root@0..40 Ident@18..21 "foo" GenericArgList@21..33 Lt@21..22 "<" - GenericArg@22..25 + TypeGenericArg@22..25 PathType@22..25 Path@22..25 PathSegment@22..25 Ident@22..25 "i32" Comma@25..26 "," WhiteSpace@26..27 " " - GenericArg@27..30 + TypeGenericArg@27..30 PathType@27..28 Path@27..28 PathSegment@27..28 @@ -56,7 +57,7 @@ Root@0..40 Ident@29..30 "E" Comma@30..31 "," WhiteSpace@31..32 " " - GenericArg@32..32 + TypeGenericArg@32..32 PathType@32..32 Path@32..32 PathSegment@32..32 diff --git a/crates/parser2/test_files/error_recovery/exprs/method.snap b/crates/parser2/test_files/error_recovery/exprs/method.snap index 7546669e3f..d4c4cc985e 100644 --- a/crates/parser2/test_files/error_recovery/exprs/method.snap +++ b/crates/parser2/test_files/error_recovery/exprs/method.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/method.fe --- Root@0..78 MethodCallExpr@0..31 @@ -14,14 +15,14 @@ Root@0..78 Ident@9..12 "baz" GenericArgList@12..25 Lt@12..13 "<" - GenericArg@13..16 + TypeGenericArg@13..16 PathType@13..16 Path@13..16 PathSegment@13..16 Ident@13..16 "i32" Comma@16..17 "," WhiteSpace@17..18 " " - GenericArg@18..23 + TypeGenericArg@18..23 PathType@18..21 Path@18..21 PathSegment@18..21 @@ -30,7 +31,7 @@ Root@0..78 Error@22..23 Ident@22..23 "T" Comma@23..24 "," - GenericArg@24..24 + TypeGenericArg@24..24 PathType@24..24 Path@24..24 PathSegment@24..24 @@ -86,14 +87,14 @@ Root@0..78 Ident@63..66 "baz" GenericArgList@66..76 Lt@66..67 "<" - GenericArg@67..70 + TypeGenericArg@67..70 PathType@67..70 Path@67..70 PathSegment@67..70 Ident@67..70 "i32" Comma@70..71 "," WhiteSpace@71..72 " " - GenericArg@72..75 + TypeGenericArg@72..75 PathType@72..75 Path@72..75 PathSegment@72..75 diff --git a/crates/parser2/test_files/error_recovery/items/enum_.snap b/crates/parser2/test_files/error_recovery/items/enum_.snap index 1d34535f84..02c56b01d5 100644 --- a/crates/parser2/test_files/error_recovery/items/enum_.snap +++ b/crates/parser2/test_files/error_recovery/items/enum_.snap @@ -1,6 +1,7 @@ --- -source: crates/parser2/tests/errro_recovery.rs -expression: snapshot +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/enum_.fe --- Root@0..63 ItemList@0..63 @@ -13,7 +14,7 @@ Root@0..63 Ident@9..15 "MyEnum" GenericParamList@15..18 Lt@15..16 "<" - GenericParam@16..17 + TypeGenericParam@16..17 Ident@16..17 "T" Gt@17..18 ">" WhiteSpace@18..19 " " diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 5078a99599..7f467bc043 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/func.fe --- Root@0..133 ItemList@0..133 @@ -10,7 +11,7 @@ Root@0..133 Ident@3..6 "foo" GenericParamList@6..16 Lt@6..7 "<" - GenericParam@7..15 + TypeGenericParam@7..15 Ident@7..8 "T" TypeBoundList@8..15 Colon@8..9 ":" @@ -97,7 +98,7 @@ Root@0..133 Ident@83..86 "foo" GenericParamList@86..98 Lt@86..87 "<" - GenericParam@87..98 + TypeGenericParam@87..98 Error@87..98 Lt@87..88 "<" Lt@88..89 "<" diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap index e954244bd6..1b2050e0b5 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/impl_.fe --- Root@0..56 ItemList@0..56 @@ -13,7 +14,7 @@ Root@0..56 Ident@5..8 "Foo" GenericArgList@8..17 Lt@8..9 "<" - GenericArg@9..10 + TypeGenericArg@9..10 PathType@9..10 Path@9..10 PathSegment@9..10 @@ -22,7 +23,7 @@ Root@0..56 WhiteSpace@11..12 " " Newline@12..13 "\n" WhiteSpace@13..17 " " - GenericArg@17..17 + TypeGenericArg@17..17 PathType@17..17 Path@17..17 PathSegment@17..17 @@ -58,14 +59,14 @@ Root@0..56 Ident@44..47 "Foo" GenericArgList@47..52 Lt@47..48 "<" - GenericArg@48..49 + TypeGenericArg@48..49 PathType@48..49 Path@48..49 PathSegment@48..49 Ident@48..49 "T" Comma@49..50 "," WhiteSpace@50..51 " " - GenericArg@51..51 + TypeGenericArg@51..51 PathType@51..51 Path@51..51 PathSegment@51..51 diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.snap b/crates/parser2/test_files/error_recovery/items/impl_trait.snap index ad5c00bc50..ea7204a75d 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_trait.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/impl_trait.fe --- Root@0..90 ItemList@0..90 @@ -13,14 +14,14 @@ Root@0..90 Ident@5..6 "X" GenericArgList@6..14 Lt@6..7 "<" - GenericArg@7..8 + TypeGenericArg@7..8 PathType@7..8 Path@7..8 PathSegment@7..8 Ident@7..8 "T" Comma@8..9 "," WhiteSpace@9..10 " " - GenericArg@10..13 + TypeGenericArg@10..13 PathType@10..11 Path@10..11 PathSegment@10..11 @@ -38,7 +39,7 @@ Root@0..90 Ident@19..20 "Y" GenericArgList@20..23 Lt@20..21 "<" - GenericArg@21..23 + TypeGenericArg@21..23 PathType@21..22 Path@21..22 PathSegment@21..22 @@ -75,14 +76,14 @@ Root@0..90 Ident@43..44 "X" GenericArgList@44..50 Lt@44..45 "<" - GenericArg@45..46 + TypeGenericArg@45..46 PathType@45..46 Path@45..46 PathSegment@45..46 Ident@45..46 "T" Comma@46..47 "," WhiteSpace@47..48 " " - GenericArg@48..50 + TypeGenericArg@48..50 PathType@48..49 Path@48..49 PathSegment@48..49 @@ -98,7 +99,7 @@ Root@0..90 Ident@54..55 "Y" GenericArgList@55..58 Lt@55..56 "<" - GenericArg@56..58 + TypeGenericArg@56..58 PathType@56..57 Path@56..57 PathSegment@56..57 diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index 093c9de961..626d4d20cb 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/struct_.fe --- Root@0..74 ItemList@0..74 @@ -12,11 +13,11 @@ Root@0..74 Error@10..10 GenericParamList@10..16 Lt@10..11 "<" - GenericParam@11..12 + TypeGenericParam@11..12 Ident@11..12 "T" Comma@12..13 "," WhiteSpace@13..14 " " - GenericParam@14..16 + TypeGenericParam@14..16 Ident@14..15 "U" Newline@15..16 "\n" Error@16..16 diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap index 7b7a047ef7..812baa1e68 100644 --- a/crates/parser2/test_files/error_recovery/items/trait_.snap +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/trait_.fe --- Root@0..133 ItemList@0..133 @@ -10,14 +11,14 @@ Root@0..133 Ident@6..9 "Foo" GenericParamList@9..16 Lt@9..10 "<" - GenericParam@10..11 + TypeGenericParam@10..11 Ident@10..11 "T" Comma@11..12 "," WhiteSpace@12..13 " " - GenericParam@13..14 + TypeGenericParam@13..14 Ident@13..14 "Y" Comma@14..15 "," - GenericParam@15..15 + TypeGenericParam@15..15 Error@15..15 Gt@15..16 ">" TraitItemList@16..18 @@ -30,11 +31,11 @@ Root@0..133 Ident@26..29 "Bar" GenericParamList@29..33 Lt@29..30 "<" - GenericParam@30..31 + TypeGenericParam@30..31 Ident@30..31 "Y" Comma@31..32 "," WhiteSpace@32..33 " " - GenericParam@33..33 + TypeGenericParam@33..33 Error@33..33 Error@33..33 Error@33..33 @@ -48,11 +49,11 @@ Root@0..133 Ident@43..46 "Bar" GenericParamList@46..51 Lt@46..47 "<" - GenericParam@47..48 + TypeGenericParam@47..48 Ident@47..48 "T" Comma@48..49 "," WhiteSpace@49..50 " " - GenericParam@50..50 + TypeGenericParam@50..50 Error@50..50 Gt@50..51 ">" Newline@51..53 "\n\n" @@ -66,14 +67,14 @@ Root@0..133 Ident@59..62 "Bar" GenericParamList@62..69 Lt@62..63 "<" - GenericParam@63..64 + TypeGenericParam@63..64 Ident@63..64 "Y" Comma@64..65 "," WhiteSpace@65..66 " " - GenericParam@66..67 + TypeGenericParam@66..67 Ident@66..67 "T" Comma@67..68 "," - GenericParam@68..68 + TypeGenericParam@68..68 Error@68..68 Gt@68..69 ">" WhiteSpace@69..70 " " @@ -103,13 +104,13 @@ Root@0..133 Ident@93..96 "Bar" GenericParamList@96..108 Lt@96..97 "<" - GenericParam@97..99 + TypeGenericParam@97..99 Error@97..99 Lt@97..98 "<" Ident@98..99 "Y" Comma@99..100 "," WhiteSpace@100..101 " " - GenericParam@101..107 + TypeGenericParam@101..107 Ident@101..102 "K" TypeBoundList@102..107 Colon@102..103 ":" diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap index 47a1798b44..47dfa1c535 100644 --- a/crates/parser2/test_files/error_recovery/items/type_.snap +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/type_.fe --- Root@0..29 ItemList@0..29 @@ -10,11 +11,11 @@ Root@0..29 Ident@5..11 "Result" GenericParamList@11..15 Lt@11..12 "<" - GenericParam@12..13 + TypeGenericParam@12..13 Ident@12..13 "T" Comma@13..14 "," WhiteSpace@14..15 " " - GenericParam@15..15 + TypeGenericParam@15..15 Error@15..15 Error@15..15 Error@15..15 @@ -26,14 +27,14 @@ Root@0..29 Ident@17..23 "Result" GenericArgList@23..29 Lt@23..24 "<" - GenericArg@24..25 + TypeGenericArg@24..25 PathType@24..25 Path@24..25 PathSegment@24..25 Ident@24..25 "T" Comma@25..26 "," WhiteSpace@26..27 " " - GenericArg@27..28 + TypeGenericArg@27..28 PathType@27..28 Path@27..28 PathSegment@27..28 diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap index d01e37b579..8c81542b88 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.snap +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/call.fe --- Root@0..270 CallExpr@0..5 @@ -126,14 +127,14 @@ Root@0..270 Ident@88..91 "foo" GenericArgList@91..109 Lt@91..92 "<" - GenericArg@92..95 + TypeGenericArg@92..95 PathType@92..95 Path@92..95 PathSegment@92..95 Ident@92..95 "i32" Comma@95..96 "," WhiteSpace@96..97 " " - GenericArg@97..108 + TypeGenericArg@97..108 PathType@97..108 Path@97..108 PathSegment@97..100 @@ -166,7 +167,7 @@ Root@0..270 Ident@135..138 "foo" GenericArgList@138..157 Lt@138..139 "<" - GenericArg@139..147 + TypeGenericArg@139..147 ArrayType@139..147 LBracket@139..140 "[" PathType@140..143 @@ -180,7 +181,7 @@ Root@0..270 RBracket@146..147 "]" Comma@147..148 "," WhiteSpace@148..149 " " - GenericArg@149..156 + ConstGenericArg@149..156 BlockExpr@149..156 LBrace@149..150 "{" ExprStmt@150..155 @@ -224,14 +225,14 @@ Root@0..270 GenericArgList@251..265 Lt@251..252 "<" WhiteSpace@252..253 " " - GenericArg@253..256 + TypeGenericArg@253..256 PathType@253..256 Path@253..256 PathSegment@253..256 Ident@253..256 "i32" Comma@256..257 "," WhiteSpace@257..258 " " - GenericArg@258..263 + TypeGenericArg@258..263 TupleType@258..263 LParen@258..259 "(" PathType@259..262 diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap index b76eb79e16..961f51e9fa 100644 --- a/crates/parser2/test_files/syntax_node/exprs/method.snap +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/method.fe --- Root@0..75 MethodCallExpr@0..5 @@ -87,14 +88,14 @@ Root@0..75 Ident@51..52 "y" GenericArgList@52..66 Lt@52..53 "<" - GenericArg@53..56 + TypeGenericArg@53..56 PathType@53..56 Path@53..56 PathSegment@53..56 Ident@53..56 "i32" Comma@56..57 "," WhiteSpace@57..58 " " - GenericArg@58..65 + ConstGenericArg@58..65 BlockExpr@58..65 LBrace@58..59 "{" ExprStmt@59..64 diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 30b91daec5..272c86eb92 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- Root@0..220 ItemList@0..220 @@ -50,7 +51,7 @@ Root@0..220 Ident@63..69 "Option" GenericParamList@69..72 Lt@69..70 "<" - GenericParam@70..71 + TypeGenericParam@70..71 Ident@70..71 "T" Gt@71..72 ">" WhiteSpace@72..73 " " @@ -97,7 +98,7 @@ Root@0..220 Ident@124..133 "BoundEnum" GenericParamList@133..162 Lt@133..134 "<" - GenericParam@134..146 + TypeGenericParam@134..146 Ident@134..135 "T" TypeBoundList@135..146 Colon@135..136 ":" @@ -116,7 +117,7 @@ Root@0..220 WhiteSpace@146..147 " " Comma@147..148 "," WhiteSpace@148..149 " " - GenericParam@149..161 + TypeGenericParam@149..161 Ident@149..150 "U" TypeBoundList@150..161 Colon@150..151 ":" @@ -147,7 +148,7 @@ Root@0..220 Ident@175..178 "Bar" GenericArgList@178..181 Lt@178..179 "<" - GenericArg@179..180 + TypeGenericArg@179..180 PathType@179..180 Path@179..180 PathSegment@179..180 diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 60a23426c5..8c0eff27e7 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/func.fe --- Root@0..361 ItemList@0..361 @@ -158,7 +159,7 @@ Root@0..361 Ident@183..192 "generics1" GenericParamList@192..205 Lt@192..193 "<" - GenericParam@193..201 + TypeGenericParam@193..201 Ident@193..194 "T" TypeBoundList@194..201 Colon@194..195 ":" @@ -169,7 +170,7 @@ Root@0..361 Ident@196..201 "Trait" Comma@201..202 "," WhiteSpace@202..203 " " - GenericParam@203..204 + TypeGenericParam@203..204 Ident@203..204 "U" Gt@204..205 ">" FnArgList@205..225 @@ -194,7 +195,7 @@ Root@0..361 Ident@215..221 "Option" GenericArgList@221..224 Lt@221..222 "<" - GenericArg@222..223 + TypeGenericArg@222..223 PathType@222..223 Path@222..223 PathSegment@222..223 @@ -219,7 +220,7 @@ Root@0..361 Ident@241..247 "Result" GenericArgList@247..250 Lt@247..248 "<" - GenericArg@248..249 + TypeGenericArg@248..249 PathType@248..249 Path@248..249 PathSegment@248..249 @@ -241,7 +242,7 @@ Root@0..361 Ident@269..275 "Option" GenericArgList@275..278 Lt@275..276 "<" - GenericArg@276..277 + TypeGenericArg@276..277 PathType@276..277 Path@276..277 PathSegment@276..277 @@ -274,11 +275,11 @@ Root@0..361 Ident@311..315 "decl" GenericParamList@315..321 Lt@315..316 "<" - GenericParam@316..317 + TypeGenericParam@316..317 Ident@316..317 "T" Comma@317..318 "," WhiteSpace@318..319 " " - GenericParam@319..320 + TypeGenericParam@319..320 Ident@319..320 "U" Gt@320..321 ">" FnArgList@321..340 @@ -293,14 +294,14 @@ Root@0..361 Ident@325..333 "MyStruct" GenericArgList@333..339 Lt@333..334 "<" - GenericArg@334..335 + TypeGenericArg@334..335 PathType@334..335 Path@334..335 PathSegment@334..335 Ident@334..335 "T" Comma@335..336 "," WhiteSpace@336..337 " " - GenericArg@337..338 + TypeGenericArg@337..338 PathType@337..338 Path@337..338 PathSegment@337..338 @@ -316,14 +317,14 @@ Root@0..361 Ident@344..350 "Result" GenericArgList@350..358 Lt@350..351 "<" - GenericArg@351..352 + TypeGenericArg@351..352 PathType@351..352 Path@351..352 PathSegment@351..352 Ident@351..352 "T" Comma@352..353 "," WhiteSpace@353..354 " " - GenericArg@354..357 + TypeGenericArg@354..357 PathType@354..357 Path@354..357 PathSegment@354..357 diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 0bbd47f2c7..117b72b258 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/impl.fe --- Root@0..266 ItemList@0..266 @@ -16,7 +17,7 @@ Root@0..266 Ident@10..13 "Bar" GenericArgList@13..21 Lt@13..14 "<" - GenericArg@14..20 + TypeGenericArg@14..20 PathType@14..15 Path@14..15 PathSegment@14..15 @@ -112,7 +113,7 @@ Root@0..266 Ident@141..144 "Foo" GenericArgList@144..147 Lt@144..145 "<" - GenericArg@145..146 + TypeGenericArg@145..146 PathType@145..146 Path@145..146 PathSegment@145..146 @@ -129,7 +130,7 @@ Root@0..266 Ident@155..158 "Foo" GenericArgList@158..161 Lt@158..159 "<" - GenericArg@159..160 + TypeGenericArg@159..160 PathType@159..160 Path@159..160 PathSegment@159..160 @@ -153,7 +154,7 @@ Root@0..266 Ident@178..181 "add" GenericParamList@181..192 Lt@181..182 "<" - GenericParam@182..191 + TypeGenericParam@182..191 Ident@182..183 "U" TypeBoundList@183..191 Colon@183..184 ":" @@ -164,7 +165,7 @@ Root@0..266 Ident@185..188 "Add" GenericArgList@188..191 Lt@188..189 "<" - GenericArg@189..190 + TypeGenericArg@189..190 PathType@189..190 Path@189..190 PathSegment@189..190 diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index c6210832ad..6ee01b54d6 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/impl_trait.fe --- Root@0..317 ItemList@0..317 @@ -13,7 +14,7 @@ Root@0..317 Ident@5..10 "Trait" GenericArgList@10..13 Lt@10..11 "<" - GenericArg@11..12 + TypeGenericArg@11..12 PathType@11..12 Path@11..12 PathSegment@11..12 @@ -28,7 +29,7 @@ Root@0..317 Ident@18..19 "F" GenericArgList@19..22 Lt@19..20 "<" - GenericArg@20..21 + TypeGenericArg@20..21 PathType@20..21 Path@20..21 PathSegment@20..21 @@ -71,14 +72,14 @@ Root@0..317 Ident@71..76 "Trait" GenericArgList@76..82 Lt@76..77 "<" - GenericArg@77..78 + TypeGenericArg@77..78 PathType@77..78 Path@77..78 PathSegment@77..78 Ident@77..78 "T" Comma@78..79 "," WhiteSpace@79..80 " " - GenericArg@80..81 + TypeGenericArg@80..81 PathType@80..81 Path@80..81 PathSegment@80..81 @@ -93,7 +94,7 @@ Root@0..317 Ident@87..88 "F" GenericArgList@88..91 Lt@88..89 "<" - GenericArg@89..90 + TypeGenericArg@89..90 PathType@89..90 Path@89..90 PathSegment@89..90 @@ -139,7 +140,7 @@ Root@0..317 Ident@130..133 "foo" GenericParamList@133..151 Lt@133..134 "<" - GenericParam@134..150 + TypeGenericParam@134..150 Ident@134..135 "T" TypeBoundList@135..150 Colon@135..136 ":" @@ -150,7 +151,7 @@ Root@0..317 Ident@137..147 "OtherTrait" GenericArgList@147..150 Lt@147..148 "<" - GenericArg@148..149 + TypeGenericArg@148..149 PathType@148..149 Path@148..149 PathSegment@148..149 @@ -180,7 +181,7 @@ Root@0..317 Ident@168..180 "do_something" GenericArgList@180..185 Lt@180..181 "<" - GenericArg@181..184 + TypeGenericArg@181..184 PathType@181..184 Path@181..184 PathSegment@181..184 @@ -208,7 +209,7 @@ Root@0..317 Ident@203..208 "Trait" GenericArgList@208..226 Lt@208..209 "<" - GenericArg@209..217 + TypeGenericArg@209..217 PathType@209..210 Path@209..210 PathSegment@209..210 @@ -222,7 +223,7 @@ Root@0..317 Ident@212..217 "Clone" Comma@217..218 "," WhiteSpace@218..219 " " - GenericArg@219..225 + TypeGenericArg@219..225 PathType@219..220 Path@219..220 PathSegment@219..220 @@ -244,7 +245,7 @@ Root@0..317 Ident@231..232 "F" GenericArgList@232..240 Lt@232..233 "<" - GenericArg@233..239 + TypeGenericArg@233..239 PathType@233..234 Path@233..234 PathSegment@233..234 @@ -269,7 +270,7 @@ Root@0..317 Ident@251..254 "foo" GenericParamList@254..272 Lt@254..255 "<" - GenericParam@255..271 + TypeGenericParam@255..271 Ident@255..256 "T" TypeBoundList@256..271 Colon@256..257 ":" @@ -280,7 +281,7 @@ Root@0..317 Ident@258..268 "OtherTrait" GenericArgList@268..271 Lt@268..269 "<" - GenericArg@269..270 + TypeGenericArg@269..270 PathType@269..270 Path@269..270 PathSegment@269..270 @@ -310,7 +311,7 @@ Root@0..317 Ident@289..301 "do_something" GenericArgList@301..306 Lt@301..302 "<" - GenericArg@302..305 + TypeGenericArg@302..305 PathType@302..305 Path@302..305 PathSegment@302..305 diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index e0ef9c3aaf..833eeeeff6 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -32,7 +32,7 @@ Root@0..588 Ident@40..43 "foo" GenericParamList@43..67 Lt@43..44 "<" - GenericParam@44..52 + TypeGenericParam@44..52 Ident@44..45 "T" TypeBoundList@45..52 Colon@45..46 ":" @@ -83,7 +83,7 @@ Root@0..588 Ident@88..102 "default_method" GenericParamList@102..116 Lt@102..103 "<" - GenericParam@103..115 + TypeGenericParam@103..115 Ident@103..104 "T" TypeBoundList@104..115 Colon@104..105 ":" @@ -176,7 +176,7 @@ Root@0..588 Ident@194..197 "Add" GenericParamList@197..207 Lt@197..198 "<" - GenericParam@198..206 + TypeGenericParam@198..206 Ident@198..201 "RHS" TypeBoundList@201..206 Colon@201..202 ":" @@ -254,7 +254,7 @@ Root@0..588 Ident@299..304 "parse" GenericParamList@304..320 Lt@304..305 "<" - GenericParam@305..319 + TypeGenericParam@305..319 Ident@305..306 "S" TypeBoundList@306..319 Colon@306..307 ":" @@ -284,7 +284,7 @@ Root@0..588 Ident@343..349 "Parser" GenericArgList@349..352 Lt@349..350 "<" - GenericArg@350..351 + TypeGenericArg@350..351 PathType@350..351 Path@350..351 PathSegment@350..351 @@ -303,7 +303,7 @@ Root@0..588 Ident@362..368 "Parser" GenericArgList@368..384 Lt@368..369 "<" - GenericArg@369..383 + TypeGenericArg@369..383 PathType@369..370 Path@369..370 PathSegment@369..370 @@ -347,7 +347,7 @@ Root@0..588 Ident@418..423 "parse" GenericParamList@423..433 Lt@423..424 "<" - GenericParam@424..432 + TypeGenericParam@424..432 Ident@424..425 "T" TypeBoundList@425..432 Colon@425..426 ":" @@ -387,7 +387,7 @@ Root@0..588 Ident@470..476 "Option" GenericArgList@476..488 Lt@476..477 "<" - GenericArg@477..487 + TypeGenericArg@477..487 PathType@477..487 Path@477..487 PathSegment@477..487 diff --git a/crates/parser2/test_files/syntax_node/items/type.snap b/crates/parser2/test_files/syntax_node/items/type.snap index 01facad8e9..9a4a5c48ab 100644 --- a/crates/parser2/test_files/syntax_node/items/type.snap +++ b/crates/parser2/test_files/syntax_node/items/type.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/type.fe --- Root@0..98 ItemList@0..98 @@ -26,7 +27,7 @@ Root@0..98 Ident@26..32 "Result" GenericParamList@32..35 Lt@32..33 "<" - GenericParam@33..34 + TypeGenericParam@33..34 Ident@33..34 "T" Gt@34..35 ">" WhiteSpace@35..36 " " @@ -38,14 +39,14 @@ Root@0..98 Ident@38..44 "Result" GenericArgList@44..54 Lt@44..45 "<" - GenericArg@45..46 + TypeGenericArg@45..46 PathType@45..46 Path@45..46 PathSegment@45..46 Ident@45..46 "T" Comma@46..47 "," WhiteSpace@47..48 " " - GenericArg@48..53 + TypeGenericArg@48..53 PathType@48..53 Path@48..53 PathSegment@48..53 @@ -58,7 +59,7 @@ Root@0..98 Ident@61..70 "WithBound" GenericParamList@70..85 Lt@70..71 "<" - GenericParam@71..84 + TypeGenericParam@71..84 Ident@71..72 "T" TypeBoundList@72..84 Colon@72..73 ":" @@ -77,7 +78,7 @@ Root@0..98 Ident@88..95 "NoBound" GenericArgList@95..98 Lt@95..96 "<" - GenericArg@96..97 + TypeGenericArg@96..97 PathType@96..97 Path@96..97 PathSegment@96..97 diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index f77f144f8a..71a60a2e0a 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -14,15 +14,15 @@ Root@0..480 Ident@11..33 "StructWithGenericParam" GenericParamList@33..42 Lt@33..34 "<" - GenericParam@34..35 + TypeGenericParam@34..35 Ident@34..35 "S" Comma@35..36 "," WhiteSpace@36..37 " " - GenericParam@37..38 + TypeGenericParam@37..38 Ident@37..38 "T" Comma@38..39 "," WhiteSpace@39..40 " " - GenericParam@40..41 + TypeGenericParam@40..41 Ident@40..41 "U" Gt@41..42 ">" WhiteSpace@42..43 " " @@ -75,12 +75,12 @@ Root@0..480 Lt@111..112 "<" Newline@112..113 "\n" WhiteSpace@113..117 " " - GenericParam@117..118 + TypeGenericParam@117..118 Ident@117..118 "S" Comma@118..119 "," Newline@119..120 "\n" WhiteSpace@120..124 " " - GenericParam@124..137 + TypeGenericParam@124..137 Ident@124..125 "T" TypeBoundList@125..137 Colon@125..126 ":" @@ -95,7 +95,7 @@ Root@0..480 Comma@137..138 "," Newline@138..139 "\n" WhiteSpace@139..143 " " - GenericParam@143..144 + TypeGenericParam@143..144 Ident@143..144 "U" Newline@144..145 "\n" Gt@145..146 ">" @@ -159,7 +159,7 @@ Root@0..480 Lt@222..223 "<" Newline@223..224 "\n" WhiteSpace@224..228 " " - GenericParam@228..254 + TypeGenericParam@228..254 Ident@228..229 "S" TypeBoundList@229..254 Colon@229..230 ":" @@ -184,12 +184,12 @@ Root@0..480 Comma@254..255 "," Newline@255..256 "\n" WhiteSpace@256..260 " " - GenericParam@260..261 + TypeGenericParam@260..261 Ident@260..261 "T" Comma@261..262 "," Newline@262..263 "\n" WhiteSpace@263..267 " " - GenericParam@267..280 + TypeGenericParam@267..280 Ident@267..268 "U" TypeBoundList@268..280 Colon@268..269 ":" @@ -234,7 +234,7 @@ Root@0..480 Ident@316..322 "Option" GenericArgList@322..325 Lt@322..323 "<" - GenericArg@323..324 + TypeGenericArg@323..324 PathType@323..324 Path@323..324 PathSegment@323..324 @@ -262,7 +262,7 @@ Root@0..480 Ident@347..353 "Result" GenericArgList@353..356 Lt@353..354 "<" - GenericArg@354..355 + TypeGenericArg@354..355 PathType@354..355 Path@354..355 PathSegment@354..355 @@ -327,7 +327,7 @@ Root@0..480 Ident@417..422 "MyArr" GenericParamList@422..456 Lt@422..423 "<" - GenericParam@423..439 + TypeGenericParam@423..439 Ident@423..424 "T" TypeBoundList@424..439 Colon@424..425 ":" From 5ce3eaa844c7880708c38aefc05c3119032e6007 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 22:05:04 +0100 Subject: [PATCH 054/678] Add ast for params --- crates/parser2/src/ast/mod.rs | 2 + crates/parser2/src/ast/param.rs | 275 ++++++++++++++++++++++++++++++++ 2 files changed, 277 insertions(+) create mode 100644 crates/parser2/src/ast/param.rs diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 9bddaf1dd9..d3e60d05df 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -1,8 +1,10 @@ +pub mod expr; pub mod item; pub mod param; pub mod path; pub mod type_; +pub use expr::*; pub use item::*; pub use param::*; pub use path::*; diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs new file mode 100644 index 0000000000..6bbbcddabf --- /dev/null +++ b/crates/parser2/src/ast/param.rs @@ -0,0 +1,275 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren}; +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A list of generic parameters. + /// `` + pub struct GenericParamList, + SK::GenericParamList +} +impl GenericParamList { + pub fn params(&self) -> AstChildren { + support::children(self.syntax()) + } +} +ast_node! { + /// A generic parameter. + /// `T` + /// `T: Trait` + /// `const N: usize` + pub struct GenericParam, + SK::TypeGenericParam | SK::ConstGenericParam +} +impl GenericParam { + /// Returns the specific kind of the generic parameter. + pub fn kind(&self) -> GenericParamKind { + match self.syntax().kind() { + SK::TypeGenericParam => { + GenericParamKind::Type(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::ConstGenericParam => { + GenericParamKind::Const(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +/// A generic parameter kind. +/// `Type` is either `T` or `T: Trait`. +/// `Const` is `const N: usize`. +pub enum GenericParamKind { + Type(TypeGenericParam), + Const(ConstGenericParam), +} + +ast_node! { + /// A type generic parameter. + /// `T` + /// `T: Trait` + pub struct TypeGenericParam, + SK::TypeGenericParam +} +impl TypeGenericParam { + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn bounds(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A const generic parameter. + /// `const N: usize`. + pub struct ConstGenericParam, + SK::ConstGenericParam +} +impl ConstGenericParam { + /// Returns the name of the const generic parameter. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the const generic parameter. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A list of generic arguments. + /// ` AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// A generic argument. + /// `T` + /// `T: Trait` + /// `{expr}` + /// `lit` + pub struct GenericArg, + SK::TypeGenericArg | SK::ConstGenericArg +} +impl GenericArg { + pub fn kind(&self) -> GenericArgKind { + match self.syntax().kind() { + SK::TypeGenericArg => { + GenericArgKind::Type(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::ConstGenericArg => { + GenericArgKind::Const(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +ast_node! { + pub struct TypeGenericArg, + SK::TypeGenericArg +} +impl TypeGenericArg { + pub fn type_(&self) -> Option { + support::child(self.syntax()) + } + + pub fn bounds(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct ConstGenericArg, + SK::ConstGenericArg +} +impl ConstGenericArg { + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A generic argument kind. +/// `Type` is either `Type` or `T: Trait`. +/// `Const` is either `{expr}` or `lit`. +pub enum GenericArgKind { + Type(TypeGenericArg), + Const(ConstGenericArg), +} + +ast_node! { + /// A type bound list. + /// `: Trait + Trait2` + pub struct TypeBoundList, + SK::TypeBoundList +} +impl TypeBoundList { + pub fn bounds(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// A type bound. + /// `Trait` + /// `Trait` + pub struct TypeBound, + SK::TypeBound +} +impl TypeBound { + /// A path of the type bound. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// A generic argument list of the type bound. + pub fn generic_args(&self) -> Option { + support::child(self.syntax()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + lexer::Lexer, + parser::{ + param::{GenericArgListScope, GenericParamListScope}, + Parser, + }, + }; + fn parse_generic_params(source: &str) -> GenericParamList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(GenericParamListScope::default(), None); + GenericParamList::cast(parser.finish().0).unwrap() + } + + fn parse_generic_arg(source: &str) -> GenericArgList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(GenericArgListScope::new(true), None); + GenericArgList::cast(parser.finish().0).unwrap() + } + + #[test] + fn generic_param() { + let source = r#", U, const N: usize>"#; + let gp = parse_generic_params(source); + let mut params = gp.params(); + + let GenericParamKind::Type(p1) = params.next().unwrap().kind() else { + panic!("expected type param"); + }; + assert_eq!(p1.name().unwrap().text(), "T"); + let p1_bounds = p1.bounds().unwrap(); + let mut p1_bounds = p1_bounds.bounds(); + + assert_eq!( + p1_bounds + .next() + .unwrap() + .path() + .unwrap() + .segments() + .next() + .unwrap() + .ident() + .unwrap() + .text(), + "Trait" + ); + let p1_bounds_trait2 = p1_bounds.next().unwrap(); + + assert_eq!( + p1_bounds_trait2 + .path() + .unwrap() + .segments() + .next() + .unwrap() + .ident() + .unwrap() + .text(), + "Trait2" + ); + + let GenericParamKind::Type(p2) = params.next().unwrap().kind() else { + panic!("expected type param"); + }; + assert_eq!(p2.name().unwrap().text(), "U"); + + let GenericParamKind::Const(p3) = params.next().unwrap().kind() else { + panic!("expected const param"); + }; + assert_eq!(p3.name().unwrap().text(), "N"); + assert!(p3.ty().is_some()); + } + + #[test] + fn generic_arg() { + let source = r#""#; + let ga = parse_generic_arg(source); + let mut args = ga.args(); + + let GenericArgKind::Type(a1) = args.next().unwrap().kind() else { + panic!("expected type arg"); + }; + assert!(a1.bounds().is_some()); + + let GenericArgKind::Const(a2) = args.next().unwrap().kind() else { + panic!("expected const arg"); + }; + assert!(a2.expr().is_some()); + } +} From 1193c5d29099bd70cefc7d31dbf1e8fc7f34a4f6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 8 Feb 2023 22:09:41 +0100 Subject: [PATCH 055/678] Add ast for types --- crates/parser2/src/ast/type_.rs | 115 ++++++++++++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 crates/parser2/src/ast/type_.rs diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs new file mode 100644 index 0000000000..5a83e9b5fb --- /dev/null +++ b/crates/parser2/src/ast/type_.rs @@ -0,0 +1,115 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren}; +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A type node. + /// If you want to match a specific kind of type, use `[Type::kind]`. + pub struct Type, + SK::PtrType + | SK::PathType + | SK::SelfType + | SK::TupleType + | SK::ArrayType +} +impl Type { + pub fn kind(&self) -> TypeKind { + match self.syntax().kind() { + SK::PtrType => TypeKind::Ptr(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathType => TypeKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::SelfType => TypeKind::SelfType(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TupleType => TypeKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayType => TypeKind::Array(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// A pointer type. + /// `*i32` + pub struct PtrType, + SK::PtrType +} +impl PtrType { + /// Returns the `*` token. + pub fn star(&self) -> Option { + support::token(self.syntax(), SK::Star) + } + + /// Returns the type pointed to. + pub fn inner(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A path type. + /// `foo::Type` + pub struct PathType, + SK::PathType +} +impl PathType { + /// Returns the path of the type. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + pub fn generic_args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A self type. + /// `Self` + pub struct SelfType, + SK::SelfType +} +impl SelfType { + /// Returns the `Self` keyword. + pub fn self_kw(&self) -> Option { + support::token(self.syntax(), SK::SelfTypeKw) + } +} + +ast_node! { + /// A tuple type. + /// `(i32, foo::Bar)` + pub struct TupleType, + SK::TupleType +} +impl TupleType { + /// Returns the types in the tuple. + pub fn elem_tys(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// An array type. + /// `[i32; 4]` + pub struct ArrayType, + SK::ArrayType +} +impl ArrayType { + /// Returns the type of the array elements. + pub fn elem_ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the length of the array. + pub fn len(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A specific kind of type. +pub enum TypeKind { + Ptr(PtrType), + Path(PathType), + SelfType(SelfType), + Tuple(TupleType), + Array(ArrayType), +} From fd8802aece8dd67aa6e7e29854ecf563b5dffbf4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 15:22:10 +0100 Subject: [PATCH 056/678] Rename `AttrParam` to `AttrArg` --- crates/parser2/src/parser/attr.rs | 20 +++++++++---------- crates/parser2/src/syntax_kind.rs | 4 ++-- .../test_files/syntax_node/structs/attr.snap | 7 ++++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs index e4426c9e36..827d2fb073 100644 --- a/crates/parser2/src/parser/attr.rs +++ b/crates/parser2/src/parser/attr.rs @@ -11,7 +11,7 @@ pub(super) fn parse_attr_list(parser: &mut Parser) -> Option< } define_scope! { - AttrListScope, + pub(crate) AttrListScope, AttrList, Override( Newline @@ -53,19 +53,19 @@ impl super::Parse for AttrScope { ); if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(AttrParamListScope::default(), None); + parser.parse(AttrArgListScope::default(), None); } } } define_scope! { - AttrParamListScope, - AttrParamList, + AttrArgListScope, + AttrArgList, Override( RParen ) } -impl super::Parse for AttrParamListScope { +impl super::Parse for AttrArgListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { @@ -73,12 +73,12 @@ impl super::Parse for AttrParamListScope { } parser.with_next_expected_tokens( - |parser| parser.parse(AttrParam::default(), None), + |parser| parser.parse(AttrArgScope::default(), None), &[SyntaxKind::Comma, SyntaxKind::RParen], ); while parser.bump_if(SyntaxKind::Comma) { parser.with_next_expected_tokens( - |parser| parser.parse(AttrParam::default(), None), + |parser| parser.parse(AttrArgScope::default(), None), &[SyntaxKind::Comma, SyntaxKind::RParen], ); } @@ -88,14 +88,14 @@ impl super::Parse for AttrParamListScope { } define_scope! { - AttrParam, - AttrParam, + AttrArgScope, + AttrArg, Override( Comma, RParen ) } -impl super::Parse for AttrParam { +impl super::Parse for AttrArgScope { fn parse(&mut self, parser: &mut Parser) { parser.with_next_expected_tokens( |parser| parser.bump_or_recover(SyntaxKind::Ident, "Expected `key: value`", None), diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index c2e9f096ed..3abe7443b4 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -399,9 +399,9 @@ pub enum SyntaxKind { /// `#attr` Attr, /// `(key1: value1, key2: value2)` - AttrParamList, + AttrArgList, /// `key: value` - AttrParam, + AttrArg, /// `/// Comment` DocCommentAttr, AttrList, diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap index ddacf555a7..2de2602739 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.snap +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/attr.fe --- Root@0..170 ItemList@0..170 @@ -56,9 +57,9 @@ Root@0..170 Attr@140..157 Pound@140..141 "#" Ident@141..144 "cfg" - AttrParamList@144..157 + AttrArgList@144..157 LParen@144..145 "(" - AttrParam@145..156 + AttrArg@145..156 Ident@145..151 "target" Colon@151..152 ":" WhiteSpace@152..153 " " From 000d11280add4a428a4281785d4d8b23cf538263 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 15:24:30 +0100 Subject: [PATCH 057/678] Extend `ast_node!` macro to automatically implement `IntoIterator` --- crates/parser2/src/ast/mod.rs | 47 +++++++++++++++++++++++- crates/parser2/src/ast/param.rs | 65 ++++++++++++++++++--------------- crates/parser2/src/ast/path.rs | 3 +- crates/parser2/src/ast/type_.rs | 10 +++-- 4 files changed, 90 insertions(+), 35 deletions(-) diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index d3e60d05df..f31b9b5c28 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -1,9 +1,11 @@ +pub mod attr; pub mod expr; pub mod item; pub mod param; pub mod path; pub mod type_; +pub use attr::*; pub use expr::*; pub use item::*; pub use param::*; @@ -13,13 +15,17 @@ pub use type_::*; pub type AstChildren = rowan::ast::AstChildren; pub type SyntaxText = rowan::SyntaxText; +pub mod prelude { + pub use super::{GenericArgsOwner, GenericParamsOwner}; +} + macro_rules! ast_node { ( $(#[$attrs: meta])* $visibility: vis struct $name: ident $({ $($field_vis: vis $field: ident: $ty: ty),* })?, - $kind: pat + $kind: pat $(,)? ) => { $(#[$attrs])* #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -50,6 +56,45 @@ macro_rules! ast_node { } } }; + ( + $(#[$attrs: meta])* + $visibility: vis struct $name: ident $({ + $($field_vis: vis $field: ident: $ty: ty),* + })?, + $kind: pat, + IntoIterator $(,)? + ) => { + ast_node!{ + $(#[$attrs])* + $visibility struct $name $({ + $($field_vis $field: $ty),* + })?, + $kind + } + impl IntoIterator for $name { + type Item = $item_ty; + type IntoIter = AstChildren<$item_ty>; + + fn into_iter(self) -> Self::IntoIter { + support::children(self.syntax()) + } + } + impl IntoIterator for &$name { + type Item = $item_ty; + type IntoIter = AstChildren<$item_ty>; + + fn into_iter(self) -> Self::IntoIter { + support::children(self.syntax()) + } + } + + impl $name { + /// Returns an iterator over the children of this node. + pub fn iter(&self) -> AstChildren<$item_ty> { + self.into_iter() + } + } + }; } use ast_node; diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 6bbbcddabf..b1c5506c9a 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -1,26 +1,24 @@ use rowan::ast::{support, AstNode}; use super::{ast_node, AstChildren}; -use crate::{SyntaxKind as SK, SyntaxToken}; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; ast_node! { /// A list of generic parameters. /// `` pub struct GenericParamList, - SK::GenericParamList -} -impl GenericParamList { - pub fn params(&self) -> AstChildren { - support::children(self.syntax()) - } + SK::GenericParamList, + IntoIterator, + } + ast_node! { /// A generic parameter. /// `T` /// `T: Trait` /// `const N: usize` pub struct GenericParam, - SK::TypeGenericParam | SK::ConstGenericParam + SK::TypeGenericParam | SK::ConstGenericParam, } impl GenericParam { /// Returns the specific kind of the generic parameter. @@ -50,7 +48,7 @@ ast_node! { /// `T` /// `T: Trait` pub struct TypeGenericParam, - SK::TypeGenericParam + SK::TypeGenericParam, } impl TypeGenericParam { pub fn name(&self) -> Option { @@ -66,7 +64,7 @@ ast_node! { /// A const generic parameter. /// `const N: usize`. pub struct ConstGenericParam, - SK::ConstGenericParam + SK::ConstGenericParam, } impl ConstGenericParam { /// Returns the name of the const generic parameter. @@ -84,12 +82,9 @@ ast_node! { /// A list of generic arguments. /// ` AstChildren { - support::children(self.syntax()) - } + SK::GenericArgList, + IntoIterator, + } ast_node! { @@ -99,7 +94,7 @@ ast_node! { /// `{expr}` /// `lit` pub struct GenericArg, - SK::TypeGenericArg | SK::ConstGenericArg + SK::TypeGenericArg | SK::ConstGenericArg, } impl GenericArg { pub fn kind(&self) -> GenericArgKind { @@ -117,7 +112,7 @@ impl GenericArg { ast_node! { pub struct TypeGenericArg, - SK::TypeGenericArg + SK::TypeGenericArg, } impl TypeGenericArg { pub fn type_(&self) -> Option { @@ -131,7 +126,7 @@ impl TypeGenericArg { ast_node! { pub struct ConstGenericArg, - SK::ConstGenericArg + SK::ConstGenericArg, } impl ConstGenericArg { pub fn expr(&self) -> Option { @@ -151,12 +146,8 @@ ast_node! { /// A type bound list. /// `: Trait + Trait2` pub struct TypeBoundList, - SK::TypeBoundList -} -impl TypeBoundList { - pub fn bounds(&self) -> AstChildren { - support::children(self.syntax()) - } + SK::TypeBoundList, + IntoIterator, } ast_node! { @@ -164,7 +155,7 @@ ast_node! { /// `Trait` /// `Trait` pub struct TypeBound, - SK::TypeBound + SK::TypeBound, } impl TypeBound { /// A path of the type bound. @@ -178,6 +169,22 @@ impl TypeBound { } } +/// A trait for AST nodes that can have generic parameters. +pub trait GenericParamsOwner: AstNode { + /// Returns the generic parameter list of the node. + fn generic_params(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A trait for AST nodes that can have generic arguments. +pub trait GenericArgsOwner: AstNode { + /// Returns the generic argument list of the node. + fn generic_args(&self) -> Option { + support::child(self.syntax()) + } +} + #[cfg(test)] mod tests { use super::*; @@ -206,14 +213,14 @@ mod tests { fn generic_param() { let source = r#", U, const N: usize>"#; let gp = parse_generic_params(source); - let mut params = gp.params(); + let mut params = gp.into_iter(); let GenericParamKind::Type(p1) = params.next().unwrap().kind() else { panic!("expected type param"); }; assert_eq!(p1.name().unwrap().text(), "T"); let p1_bounds = p1.bounds().unwrap(); - let mut p1_bounds = p1_bounds.bounds(); + let mut p1_bounds = p1_bounds.iter(); assert_eq!( p1_bounds @@ -260,7 +267,7 @@ mod tests { fn generic_arg() { let source = r#""#; let ga = parse_generic_arg(source); - let mut args = ga.args(); + let mut args = ga.iter(); let GenericArgKind::Type(a1) = args.next().unwrap().kind() else { panic!("expected type arg"); diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs index c36e6f69b5..baea1e3de0 100644 --- a/crates/parser2/src/ast/path.rs +++ b/crates/parser2/src/ast/path.rs @@ -7,7 +7,8 @@ ast_node! { /// A path. /// `foo::bar::baz` pub struct Path, - SK::Path + SK::Path, + IntoIterator, } impl Path { /// Returns the segments of the path. diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs index 5a83e9b5fb..0d94b546e9 100644 --- a/crates/parser2/src/ast/type_.rs +++ b/crates/parser2/src/ast/type_.rs @@ -30,7 +30,7 @@ ast_node! { /// A pointer type. /// `*i32` pub struct PtrType, - SK::PtrType + SK::PtrType, } impl PtrType { /// Returns the `*` token. @@ -60,12 +60,13 @@ impl PathType { support::child(self.syntax()) } } +impl super::GenericArgsOwner for PathType {} ast_node! { /// A self type. /// `Self` pub struct SelfType, - SK::SelfType + SK::SelfType, } impl SelfType { /// Returns the `Self` keyword. @@ -78,7 +79,8 @@ ast_node! { /// A tuple type. /// `(i32, foo::Bar)` pub struct TupleType, - SK::TupleType + SK::TupleType, + IntoIterator, } impl TupleType { /// Returns the types in the tuple. @@ -91,7 +93,7 @@ ast_node! { /// An array type. /// `[i32; 4]` pub struct ArrayType, - SK::ArrayType + SK::ArrayType, } impl ArrayType { /// Returns the type of the array elements. From a0ee534295b97052c9d3dcd207cc56f91e210fec Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 15:25:03 +0100 Subject: [PATCH 058/678] Add ast for attributes --- crates/parser2/src/ast/attr.rs | 186 +++++++++++++++++++++++++++++++++ 1 file changed, 186 insertions(+) create mode 100644 crates/parser2/src/ast/attr.rs diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs new file mode 100644 index 0000000000..e02e01cb53 --- /dev/null +++ b/crates/parser2/src/ast/attr.rs @@ -0,0 +1,186 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren}; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; + +ast_node! { + pub struct AttrList, + SK::AttrList, + IntoIterator, +} +impl AttrList { + /// Returns only normal attributes in the attribute list. + pub fn normal_attrs(&self) -> impl Iterator { + self.iter().filter_map(|attr| match attr.kind() { + AttrKind::Normal(attr) => Some(attr), + AttrKind::DocComment(_) => None, + }) + } + + /// Returns only doc comment attributes in the attribute list. + pub fn doc_attrs(&self) -> impl Iterator { + self.iter().filter_map(|attr| match attr.kind() { + AttrKind::Normal(_) => None, + AttrKind::DocComment(attr) => Some(attr), + }) + } +} + +ast_node! { + /// An attribute, which can be either a normal attribute or a doc comment attribute. + pub struct Attr, + SK::Attr | SK::DocCommentAttr, +} +impl Attr { + /// Returns the kind of the attribute. + pub fn kind(&self) -> AttrKind { + match self.syntax().kind() { + SK::Attr => AttrKind::Normal(AstNode::cast(self.syntax().clone()).unwrap()), + SK::DocCommentAttr => { + AttrKind::DocComment(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +ast_node! { + /// A normal attribute. + /// `#attr(arg1: Arg, arg2: Arg)` + pub struct NormalAttr, + SK::Attr, +} +impl NormalAttr { + /// Returns the name of the attribute. + /// `foo` in `#foo(..)` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// An attribute argument list. + /// `(arg1: Arg, arg2: Arg)` in `#foo(arg1: Arg, arg2: Arg)` + pub struct AttrArgList, + SK::AttrArgList, + IntoIterator, +} + +ast_node! { + /// An Attribute argument. + /// `arg1: Arg` in `#foo(arg1: Arg, arg2: Arg)` + pub struct AttrArg, + SK::AttrArg +} +impl AttrArg { + /// Returns the key of the attribute argument. + /// `arg1` in `arg1: Arg`. + pub fn key(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the value of the attribute argument. + /// `Arg` in `arg1: Arg`. + pub fn value(&self) -> Option { + self.syntax() + .children_with_tokens() + .filter_map(|it| match it.into_token() { + Some(it) if it.kind() == SK::Ident => Some(it), + _ => None, + }) + .nth(1) + } +} + +ast_node! { + pub struct DocCommentAttr, + SK::DocCommentAttr, +} +impl DocCommentAttr { + /// Returns the underlying token of the doc comment, which includes `///`. + pub fn text(&self) -> Option { + support::token(self.syntax(), SK::DocComment) + } +} + +pub enum AttrKind { + /// A normal attribute. + Normal(NormalAttr), + /// A doc comment attribute. + DocComment(DocCommentAttr), +} + +/// A trait for AST nodes that can have an attributes. +pub trait AttrListOwner: AstNode { + /// Returns the attribute list of the node. + fn attr_list(&self) -> Option { + support::child(self.syntax()) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + lexer::Lexer, + parser::{attr::AttrListScope, Parser}, + }; + + use super::*; + + fn parse_attr_list(source: &str) -> AttrList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(AttrListScope::default(), None); + AttrList::cast(parser.finish().0).unwrap() + } + + #[test] + fn attr_list() { + let source = r#" + #foo + /// Doc1 + #cfg(target: evm, abi: solidity) + /// Doc2 + "#; + let attr_list = parse_attr_list(source); + for (i, attr) in attr_list.doc_attrs().enumerate() { + match i { + 0 => assert_eq!(attr.text().unwrap().text(), "/// Doc1"), + 1 => assert_eq!(attr.text().unwrap().text(), "/// Doc2"), + _ => unreachable!(), + } + } + + for (i, attr) in attr_list.normal_attrs().enumerate() { + match i { + 0 => { + assert_eq!(attr.name().unwrap().text(), "foo"); + assert!(attr.args().is_none()); + } + + 1 => { + assert_eq!(attr.name().unwrap().text(), "cfg"); + for (i, arg) in attr.args().unwrap().iter().enumerate() { + match i { + 0 => { + assert_eq!(arg.key().unwrap().text(), "target"); + assert_eq!(arg.value().unwrap().text(), "evm"); + } + 1 => { + assert_eq!(arg.key().unwrap().text(), "abi"); + assert_eq!(arg.value().unwrap().text(), "solidity"); + } + _ => unreachable!(), + } + } + } + + _ => unreachable!(), + } + } + } +} From 080e85227e03b77e649b287611b4bf8635759863 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 17:15:05 +0100 Subject: [PATCH 059/678] Add `Lit` kind --- crates/parser2/src/parser/expr_atom.rs | 14 +-- crates/parser2/src/parser/lit.rs | 26 +++++ crates/parser2/src/parser/mod.rs | 1 + crates/parser2/src/parser/pat.rs | 12 +- crates/parser2/src/syntax_kind.rs | 5 +- .../error_recovery/exprs/array.snap | 18 ++- .../error_recovery/exprs/block.snap | 8 +- .../test_files/error_recovery/exprs/if_.snap | 11 +- .../error_recovery/exprs/index.snap | 15 ++- .../error_recovery/exprs/match_.snap | 9 +- .../error_recovery/exprs/method.snap | 12 +- .../error_recovery/items/const_.snap | 9 +- .../error_recovery/stmts/while_.snap | 12 +- .../test_files/syntax_node/exprs/array.snap | 18 ++- .../test_files/syntax_node/exprs/binop.snap | 105 ++++++++++++------ .../test_files/syntax_node/exprs/call.snap | 54 ++++++--- .../test_files/syntax_node/exprs/if.snap | 24 ++-- .../test_files/syntax_node/exprs/index.snap | 15 ++- .../test_files/syntax_node/exprs/match.snap | 21 ++-- .../test_files/syntax_node/exprs/method.snap | 21 ++-- .../syntax_node/exprs/struct_init.snap | 9 +- .../test_files/syntax_node/items/const.snap | 24 ++-- .../test_files/syntax_node/items/func.snap | 9 +- .../syntax_node/items/impl_trait.snap | 3 +- .../test_files/syntax_node/pats/lit.snap | 9 +- .../test_files/syntax_node/pats/or.snap | 24 ++-- .../syntax_node/pats/path_tuple.snap | 18 ++- .../test_files/syntax_node/pats/record.snap | 6 +- .../test_files/syntax_node/stmts/let.snap | 27 +++-- .../test_files/syntax_node/stmts/while.snap | 15 ++- 30 files changed, 373 insertions(+), 181 deletions(-) create mode 100644 crates/parser2/src/parser/lit.rs diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 8b76c00c8e..f747f81f8d 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -1,6 +1,9 @@ use rowan::Checkpoint; -use crate::{parser::path, SyntaxKind}; +use crate::{ + parser::{lit, path}, + SyntaxKind, +}; use super::{ attr::parse_attr_list, @@ -18,12 +21,12 @@ pub(super) fn parse_expr_atom( ) -> (bool, Checkpoint) { use SyntaxKind::*; match parser.current_kind() { - Some(Int | String | TrueKw | FalseKw) => parser.parse(LitExprScope::default(), None), Some(IfKw) => parser.parse(IfExprScope::default(), None), Some(MatchKw) => parser.parse(MatchExprScope::default(), None), Some(LBrace) => parser.parse(BlockExprScope::default(), None), Some(LParen) => parser.parse(ParenScope::default(), None), Some(LBracket) => parser.parse(ArrayScope::default(), None), + Some(kind) if lit::is_lit(kind) => parser.parse(LitExprScope::default(), None), Some(kind) if path::is_path_segment(kind) => { let (success, checkpoint) = parser.parse(path::PathScope::default(), None); if success && parser.current_kind() == Some(LBrace) && allow_struct_init { @@ -177,12 +180,7 @@ impl super::Parse for MatchArmScope { define_scope! { pub(crate) LitExprScope, LitExpr, Inheritance } impl super::Parse for LitExprScope { fn parse(&mut self, parser: &mut Parser) { - match parser.current_kind() { - Some( - SyntaxKind::Int | SyntaxKind::String | SyntaxKind::TrueKw | SyntaxKind::FalseKw, - ) => parser.bump(), - _ => unreachable!(), - } + parser.parse(lit::LitScope::default(), None); } } diff --git a/crates/parser2/src/parser/lit.rs b/crates/parser2/src/parser/lit.rs new file mode 100644 index 0000000000..a9a3c43ca9 --- /dev/null +++ b/crates/parser2/src/parser/lit.rs @@ -0,0 +1,26 @@ +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { + pub(crate) LitScope, + Lit, + Inheritance +} +impl super::Parse for LitScope { + fn parse(&mut self, parser: &mut Parser) { + match parser.current_kind() { + Some(kind) if is_lit(kind) => { + parser.bump(); + } + _ => parser.error_and_recover("expected literal", None), + } + } +} + +pub fn is_lit(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::Int | SyntaxKind::TrueKw | SyntaxKind::FalseKw | SyntaxKind::String + ) +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index fb9e7ccd70..927e697ed6 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -16,6 +16,7 @@ pub mod attr; pub mod expr; pub mod func; pub mod item; +pub mod lit; pub mod param; pub mod pat; pub mod path; diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index 05a57dcaea..5a51e4fc2d 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -1,4 +1,7 @@ -use crate::SyntaxKind; +use crate::{ + parser::lit::{is_lit, LitScope}, + SyntaxKind, +}; use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; @@ -8,7 +11,7 @@ pub fn parse_pat(parser: &mut Parser) -> bool { Some(Underscore) => parser.parse(WildCardPatScope::default(), None), Some(Dot2) => parser.parse(RestPatScope::default(), None), Some(LParen) => parser.parse(TuplePatScope::default(), None), - Some(Int | String) => parser.parse(LitPatScope::default(), None), + Some(kind) if is_lit(kind) => parser.parse(LitPatScope::default(), None), _ => parser.parse(PathPatScope::default(), None), }; @@ -39,10 +42,7 @@ define_scope! { LitPatScope, LitPat, Inheritance(SyntaxKind::Pipe) } impl super::Parse for LitPatScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - match parser.current_kind() { - Some(SyntaxKind::Int | SyntaxKind::String) => parser.bump(), - _ => unreachable!(), - } + parser.parse(LitScope::default(), None); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 3abe7443b4..952b2bc058 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -1,4 +1,4 @@ -//! This module contains the definition of the `SyntaxKind`. +//! This module contains the definition of the [`SyntaxKind`]. use logos::Logos; @@ -230,6 +230,9 @@ pub enum SyntaxKind { /// `>=` GtEq, + /// `1', `false`, `"String" + Lit, + // Expressions. These are non-leaf nodes. /// { statement-list } BlockExpr, diff --git a/crates/parser2/test_files/error_recovery/exprs/array.snap b/crates/parser2/test_files/error_recovery/exprs/array.snap index 49038a9db8..fc44b5596d 100644 --- a/crates/parser2/test_files/error_recovery/exprs/array.snap +++ b/crates/parser2/test_files/error_recovery/exprs/array.snap @@ -1,33 +1,39 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/array.fe --- Root@0..19 ArrayExpr@0..11 LBracket@0..1 "[" LitExpr@1..2 - Int@1..2 "1" + Lit@1..2 + Int@1..2 "1" Comma@2..3 "," WhiteSpace@3..4 " " LitExpr@4..5 - Int@4..5 "2" + Lit@4..5 + Int@4..5 "2" WhiteSpace@5..6 " " Error@6..7 Ident@6..7 "a" Comma@7..8 "," WhiteSpace@8..9 " " LitExpr@9..10 - Int@9..10 "3" + Lit@9..10 + Int@9..10 "3" RBracket@10..11 "]" Newline@11..12 "\n" ArrayExpr@12..19 LBracket@12..13 "[" LitExpr@13..14 - Int@13..14 "1" + Lit@13..14 + Int@13..14 "1" Comma@14..15 "," WhiteSpace@15..16 " " LitExpr@16..17 - Int@16..17 "2" + Lit@16..17 + Int@16..17 "2" Comma@17..18 "," Error@18..18 RBracket@18..19 "]" diff --git a/crates/parser2/test_files/error_recovery/exprs/block.snap b/crates/parser2/test_files/error_recovery/exprs/block.snap index 95d15e556e..997d333a67 100644 --- a/crates/parser2/test_files/error_recovery/exprs/block.snap +++ b/crates/parser2/test_files/error_recovery/exprs/block.snap @@ -1,6 +1,7 @@ --- -source: crates/parser2/tests/errro_recovery.rs -expression: snapshot +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/block.fe --- Root@0..43 BlockExpr@0..43 @@ -40,7 +41,8 @@ Root@0..43 Eq@36..37 "=" WhiteSpace@37..38 " " LitExpr@38..40 - Int@38..40 "10" + Lit@38..40 + Int@38..40 "10" Newline@40..42 "\n\n" RBrace@42..43 "}" diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.snap b/crates/parser2/test_files/error_recovery/exprs/if_.snap index 9e36f6254d..d8fe748040 100644 --- a/crates/parser2/test_files/error_recovery/exprs/if_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/if_.snap @@ -1,6 +1,7 @@ --- -source: crates/parser2/tests/errro_recovery.rs -expression: snapshot +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/if_.fe --- Root@0..101 IfExpr@0..10 @@ -88,7 +89,8 @@ Root@0..101 WhiteSpace@75..79 " " ExprStmt@79..81 LitExpr@79..81 - Int@79..81 "10" + Lit@79..81 + Int@79..81 "10" WhiteSpace@81..85 " " Newline@85..86 "\n" ExprStmt@86..92 @@ -100,7 +102,8 @@ Root@0..101 WhiteSpace@93..97 " " ExprStmt@97..98 LitExpr@97..98 - Int@97..98 "1" + Lit@97..98 + Int@97..98 "1" Newline@98..99 "\n" RBrace@99..100 "}" Newline@100..101 "\n" diff --git a/crates/parser2/test_files/error_recovery/exprs/index.snap b/crates/parser2/test_files/error_recovery/exprs/index.snap index 6b93380f86..8d2c53794e 100644 --- a/crates/parser2/test_files/error_recovery/exprs/index.snap +++ b/crates/parser2/test_files/error_recovery/exprs/index.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/index.fe --- Root@0..20 IndexExpr@0..6 @@ -9,7 +10,8 @@ Root@0..20 Ident@0..1 "x" LBracket@1..2 "[" LitExpr@2..3 - Int@2..3 "1" + Lit@2..3 + Int@2..3 "1" WhiteSpace@3..4 " " Error@4..5 Ident@4..5 "a" @@ -22,12 +24,14 @@ Root@0..20 LBracket@8..9 "[" BinExpr@9..14 LitExpr@9..10 - Int@9..10 "2" + Lit@9..10 + Int@9..10 "2" WhiteSpace@10..11 " " Plus@11..12 "+" WhiteSpace@12..13 " " LitExpr@13..14 - Int@13..14 "3" + Lit@13..14 + Int@13..14 "3" Error@14..14 Error@14..14 Newline@14..15 "\n" @@ -37,6 +41,7 @@ Root@0..20 Ident@15..16 "x" LBracket@16..17 "[" LitExpr@17..19 - Int@17..19 "41" + Lit@17..19 + Int@17..19 "41" RBracket@19..20 "]" diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap index 0b908cafc9..d39ea9bb98 100644 --- a/crates/parser2/test_files/error_recovery/exprs/match_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/match_.fe --- Root@0..94 MatchExpr@0..40 @@ -30,7 +31,8 @@ Root@0..94 FatArrow@24..26 "=>" WhiteSpace@26..27 " " LitExpr@27..31 - TrueKw@27..31 "true" + Lit@27..31 + TrueKw@27..31 "true" Newline@31..32 "\n" WhiteSpace@32..35 " " MatchArm@35..38 @@ -88,7 +90,8 @@ Root@0..94 FatArrow@70..72 "=>" WhiteSpace@72..73 " " LitExpr@73..77 - TrueKw@73..77 "true" + Lit@73..77 + TrueKw@73..77 "true" WhiteSpace@77..78 " " Error@78..79 Ident@78..79 "x" diff --git a/crates/parser2/test_files/error_recovery/exprs/method.snap b/crates/parser2/test_files/error_recovery/exprs/method.snap index d4c4cc985e..c09bdc33d3 100644 --- a/crates/parser2/test_files/error_recovery/exprs/method.snap +++ b/crates/parser2/test_files/error_recovery/exprs/method.snap @@ -41,12 +41,14 @@ Root@0..78 LParen@25..26 "(" CallArg@26..27 LitExpr@26..27 - Int@26..27 "1" + Lit@26..27 + Int@26..27 "1" Comma@27..28 "," WhiteSpace@28..29 " " CallArg@29..30 LitExpr@29..30 - Int@29..30 "2" + Lit@29..30 + Int@29..30 "2" RParen@30..31 ")" Newline@31..33 "\n\n" MethodCallExpr@33..52 @@ -62,12 +64,14 @@ Root@0..78 LParen@43..44 "(" CallArg@44..45 LitExpr@44..45 - Int@44..45 "1" + Lit@44..45 + Int@44..45 "1" Comma@45..46 "," WhiteSpace@46..47 " " CallArg@47..50 LitExpr@47..48 - Int@47..48 "2" + Lit@47..48 + Int@47..48 "2" WhiteSpace@48..49 " " Error@49..50 Ident@49..50 "E" diff --git a/crates/parser2/test_files/error_recovery/items/const_.snap b/crates/parser2/test_files/error_recovery/items/const_.snap index ebd87c8439..090df6462f 100644 --- a/crates/parser2/test_files/error_recovery/items/const_.snap +++ b/crates/parser2/test_files/error_recovery/items/const_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/items/const_.fe --- Root@0..44 ItemList@0..44 @@ -17,7 +18,8 @@ Root@0..44 Eq@8..9 "=" WhiteSpace@9..10 " " LitExpr@10..12 - Int@10..12 "10" + Lit@10..12 + Int@10..12 "10" Newline@12..14 "\n\n" Const@14..27 ConstKw@14..19 "const" @@ -49,5 +51,6 @@ Root@0..44 Eq@41..42 "=" WhiteSpace@42..43 " " LitExpr@43..44 - Int@43..44 "1" + Lit@43..44 + Int@43..44 "1" diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.snap b/crates/parser2/test_files/error_recovery/stmts/while_.snap index 1f705ce8e5..8221a2bdf9 100644 --- a/crates/parser2/test_files/error_recovery/stmts/while_.snap +++ b/crates/parser2/test_files/error_recovery/stmts/while_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/stmts/while_.fe --- Root@0..56 WhileStmt@0..10 @@ -17,7 +18,8 @@ Root@0..56 WhileKw@12..17 "while" WhiteSpace@17..18 " " LitExpr@18..22 - TrueKw@18..22 "true" + Lit@18..22 + TrueKw@18..22 "true" WhiteSpace@22..23 " " BlockExpr@23..36 LBrace@23..24 "{" @@ -32,7 +34,8 @@ Root@0..56 Plus@31..32 "+" WhiteSpace@32..33 " " LitExpr@33..34 - Int@33..34 "1" + Lit@33..34 + Int@33..34 "1" Newline@34..35 "\n" RBrace@35..36 "}" ExprStmt@36..37 @@ -45,7 +48,8 @@ Root@0..56 WhileKw@43..48 "while" WhiteSpace@48..49 " " LitExpr@49..53 - TrueKw@49..53 "true" + Lit@49..53 + TrueKw@49..53 "true" WhiteSpace@53..54 " " BlockExpr@54..56 LBrace@54..55 "{" diff --git a/crates/parser2/test_files/syntax_node/exprs/array.snap b/crates/parser2/test_files/syntax_node/exprs/array.snap index 8eb26cd584..5514973774 100644 --- a/crates/parser2/test_files/syntax_node/exprs/array.snap +++ b/crates/parser2/test_files/syntax_node/exprs/array.snap @@ -1,12 +1,14 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/array.fe --- Root@0..20 ArrayExpr@0..12 LBracket@0..1 "[" LitExpr@1..2 - Int@1..2 "1" + Lit@1..2 + Int@1..2 "1" Comma@2..3 "," WhiteSpace@3..4 " " BlockExpr@4..11 @@ -14,22 +16,26 @@ Root@0..20 ExprStmt@5..10 BinExpr@5..10 LitExpr@5..6 - Int@5..6 "1" + Lit@5..6 + Int@5..6 "1" WhiteSpace@6..7 " " Plus@7..8 "+" WhiteSpace@8..9 " " LitExpr@9..10 - Int@9..10 "2" + Lit@9..10 + Int@9..10 "2" RBrace@10..11 "}" RBracket@11..12 "]" Newline@12..13 "\n" ArrayRepExpr@13..20 LBracket@13..14 "[" LitExpr@14..15 - Int@14..15 "1" + Lit@14..15 + Int@14..15 "1" SemiColon@15..16 ";" WhiteSpace@16..17 " " LitExpr@17..19 - Int@17..19 "16" + Lit@17..19 + Int@17..19 "16" RBracket@19..20 "]" diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap index 34f7f736de..9b970f7ebd 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.snap +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -1,50 +1,60 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/binop.fe --- Root@0..164 BinExpr@0..9 LitExpr@0..1 - Int@0..1 "1" + Lit@0..1 + Int@0..1 "1" WhiteSpace@1..2 " " Plus@2..3 "+" WhiteSpace@3..4 " " BinExpr@4..9 LitExpr@4..5 - Int@4..5 "2" + Lit@4..5 + Int@4..5 "2" WhiteSpace@5..6 " " Star@6..7 "*" WhiteSpace@7..8 " " LitExpr@8..9 - Int@8..9 "3" + Lit@8..9 + Int@8..9 "3" Newline@9..10 "\n" BinExpr@10..19 BinExpr@10..15 LitExpr@10..11 - Int@10..11 "1" + Lit@10..11 + Int@10..11 "1" WhiteSpace@11..12 " " Star@12..13 "*" WhiteSpace@13..14 " " LitExpr@14..15 - Int@14..15 "2" + Lit@14..15 + Int@14..15 "2" WhiteSpace@15..16 " " Plus@16..17 "+" WhiteSpace@17..18 " " LitExpr@18..19 - Int@18..19 "3" + Lit@18..19 + Int@18..19 "3" Newline@19..20 "\n" BinExpr@20..25 LitExpr@20..21 - Int@20..21 "1" + Lit@20..21 + Int@20..21 "1" WhiteSpace@21..22 " " Lt@22..23 "<" WhiteSpace@23..24 " " LitExpr@24..25 - Int@24..25 "2" + Lit@24..25 + Int@24..25 "2" Newline@25..26 "\n" BinExpr@26..37 LitExpr@26..27 - Int@26..27 "1" + Lit@26..27 + Int@26..27 "1" WhiteSpace@27..28 " " Lt@28..29 "<" WhiteSpace@29..30 " " @@ -52,17 +62,20 @@ Root@0..164 LParen@30..31 "(" BinExpr@31..36 LitExpr@31..32 - Int@31..32 "2" + Lit@31..32 + Int@31..32 "2" WhiteSpace@32..33 " " Plus@33..34 "+" WhiteSpace@34..35 " " LitExpr@35..36 - Int@35..36 "3" + Lit@35..36 + Int@35..36 "3" RParen@36..37 ")" Newline@37..38 "\n" BinExpr@38..48 LitExpr@38..39 - Int@38..39 "1" + Lit@38..39 + Int@38..39 "1" WhiteSpace@39..40 " " Lt@40..41 "<" WhiteSpace@41..42 " " @@ -80,56 +93,66 @@ Root@0..164 Newline@48..49 "\n" BinExpr@49..55 LitExpr@49..50 - Int@49..50 "1" + Lit@49..50 + Int@49..50 "1" WhiteSpace@50..51 " " LtEq@51..53 Lt@51..52 "<" Eq@52..53 "=" WhiteSpace@53..54 " " LitExpr@54..55 - Int@54..55 "2" + Lit@54..55 + Int@54..55 "2" Newline@55..56 "\n" BinExpr@56..62 LitExpr@56..57 - Int@56..57 "1" + Lit@56..57 + Int@56..57 "1" WhiteSpace@57..58 " " GtEq@58..60 Gt@58..59 ">" Eq@59..60 "=" WhiteSpace@60..61 " " LitExpr@61..62 - Int@61..62 "2" + Lit@61..62 + Int@61..62 "2" Newline@62..63 "\n" BinExpr@63..85 LitExpr@63..67 - TrueKw@63..67 "true" + Lit@63..67 + TrueKw@63..67 "true" WhiteSpace@67..68 " " Pipe2@68..70 "||" WhiteSpace@70..71 " " BinExpr@71..85 LitExpr@71..76 - FalseKw@71..76 "false" + Lit@71..76 + FalseKw@71..76 "false" WhiteSpace@76..77 " " Amp2@77..79 "&&" WhiteSpace@79..80 " " BinExpr@80..85 LitExpr@80..81 - Int@80..81 "1" + Lit@80..81 + Int@80..81 "1" WhiteSpace@81..82 " " Lt@82..83 "<" WhiteSpace@83..84 " " LitExpr@84..85 - Int@84..85 "2" + Lit@84..85 + Int@84..85 "2" Newline@85..86 "\n" BinExpr@86..118 LitExpr@86..90 - TrueKw@86..90 "true" + Lit@86..90 + TrueKw@86..90 "true" WhiteSpace@90..91 " " Pipe2@91..93 "||" WhiteSpace@93..94 " " BinExpr@94..118 LitExpr@94..99 - FalseKw@94..99 "false" + Lit@94..99 + FalseKw@94..99 "false" WhiteSpace@99..100 " " Amp2@100..102 "&&" WhiteSpace@102..103 " " @@ -138,24 +161,28 @@ Root@0..164 LParen@103..104 "(" BinExpr@104..109 LitExpr@104..105 - Int@104..105 "1" + Lit@104..105 + Int@104..105 "1" WhiteSpace@105..106 " " Lt@106..107 "<" WhiteSpace@107..108 " " LitExpr@108..109 - Int@108..109 "2" + Lit@108..109 + Int@108..109 "2" RParen@109..110 ")" WhiteSpace@110..111 " " Gt@111..112 ">" WhiteSpace@112..113 " " BinExpr@113..118 LitExpr@113..114 - Int@113..114 "3" + Lit@113..114 + Int@113..114 "3" WhiteSpace@114..115 " " Hat@115..116 "^" WhiteSpace@116..117 " " LitExpr@117..118 - Int@117..118 "2" + Lit@117..118 + Int@117..118 "2" Newline@118..119 "\n" BinExpr@119..130 Path@119..120 @@ -166,46 +193,54 @@ Root@0..164 WhiteSpace@123..124 " " BinExpr@124..130 LitExpr@124..125 - Int@124..125 "2" + Lit@124..125 + Int@124..125 "2" WhiteSpace@125..126 " " Star2@126..128 "**" WhiteSpace@128..129 " " LitExpr@129..130 - Int@129..130 "3" + Lit@129..130 + Int@129..130 "3" Newline@130..131 "\n" BinExpr@131..140 BinExpr@131..136 LitExpr@131..132 - Int@131..132 "1" + Lit@131..132 + Int@131..132 "1" WhiteSpace@132..133 " " Minus@133..134 "-" WhiteSpace@134..135 " " LitExpr@135..136 - Int@135..136 "2" + Lit@135..136 + Int@135..136 "2" WhiteSpace@136..137 " " Minus@137..138 "-" WhiteSpace@138..139 " " LitExpr@139..140 - Int@139..140 "3" + Lit@139..140 + Int@139..140 "3" Newline@140..141 "\n" BinExpr@141..152 BinExpr@141..147 LitExpr@141..142 - Int@141..142 "1" + Lit@141..142 + Int@141..142 "1" WhiteSpace@142..143 " " LShift@143..145 Lt@143..144 "<" Lt@144..145 "<" WhiteSpace@145..146 " " LitExpr@146..147 - Int@146..147 "3" + Lit@146..147 + Int@146..147 "3" WhiteSpace@147..148 " " RShift@148..150 Gt@148..149 ">" Gt@149..150 ">" WhiteSpace@150..151 " " LitExpr@151..152 - Int@151..152 "2" + Lit@151..152 + Int@151..152 "2" Newline@152..153 "\n" FieldExpr@153..158 FieldExpr@153..156 diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap index 8c81542b88..182591fe7b 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.snap +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -34,7 +34,8 @@ Root@0..270 Colon@22..23 ":" WhiteSpace@23..24 " " LitExpr@24..25 - Int@24..25 "1" + Lit@24..25 + Int@24..25 "1" Comma@25..26 "," WhiteSpace@26..27 " " CallArg@27..31 @@ -42,7 +43,8 @@ Root@0..270 Colon@28..29 ":" WhiteSpace@29..30 " " LitExpr@30..31 - Int@30..31 "3" + Lit@30..31 + Int@30..31 "3" RParen@31..32 ")" Newline@32..33 "\n" CallExpr@33..48 @@ -56,7 +58,8 @@ Root@0..270 Colon@38..39 ":" WhiteSpace@39..40 " " LitExpr@40..41 - Int@40..41 "1" + Lit@40..41 + Int@40..41 "1" Comma@41..42 "," WhiteSpace@42..43 " " CallArg@43..47 @@ -64,7 +67,8 @@ Root@0..270 Colon@44..45 ":" WhiteSpace@45..46 " " LitExpr@46..47 - Int@46..47 "3" + Lit@46..47 + Int@46..47 "3" RParen@47..48 ")" Newline@48..49 "\n" CallExpr@49..67 @@ -78,12 +82,14 @@ Root@0..270 Colon@54..55 ":" WhiteSpace@55..56 " " LitExpr@56..57 - Int@56..57 "1" + Lit@56..57 + Int@56..57 "1" Comma@57..58 "," WhiteSpace@58..59 " " CallArg@59..60 LitExpr@59..60 - Int@59..60 "2" + Lit@59..60 + Int@59..60 "2" Comma@60..61 "," WhiteSpace@61..62 " " CallArg@62..66 @@ -91,7 +97,8 @@ Root@0..270 Colon@63..64 ":" WhiteSpace@64..65 " " LitExpr@65..66 - Int@65..66 "3" + Lit@65..66 + Int@65..66 "3" RParen@66..67 ")" Newline@67..68 "\n" CallExpr@68..86 @@ -102,7 +109,8 @@ Root@0..270 LParen@71..72 "(" CallArg@72..73 LitExpr@72..73 - Int@72..73 "1" + Lit@72..73 + Int@72..73 "1" Comma@73..74 "," WhiteSpace@74..75 " " CallArg@75..79 @@ -110,7 +118,8 @@ Root@0..270 Colon@76..77 ":" WhiteSpace@77..78 " " LitExpr@78..79 - Int@78..79 "2" + Lit@78..79 + Int@78..79 "2" Comma@79..80 "," WhiteSpace@80..81 " " CallArg@81..85 @@ -118,7 +127,8 @@ Root@0..270 Colon@82..83 ":" WhiteSpace@83..84 " " LitExpr@84..85 - Int@84..85 "3" + Lit@84..85 + Int@84..85 "3" RParen@85..86 ")" Newline@86..88 "\n\n" CallExpr@88..134 @@ -150,7 +160,8 @@ Root@0..270 Colon@114..115 ":" WhiteSpace@115..116 " " LitExpr@116..117 - Int@116..117 "2" + Lit@116..117 + Int@116..117 "2" Comma@117..118 "," WhiteSpace@118..119 " " CallArg@119..133 @@ -158,7 +169,8 @@ Root@0..270 Colon@123..124 ":" WhiteSpace@124..125 " " LitExpr@125..133 - String@125..133 "\"String\"" + Lit@125..133 + String@125..133 "\"String\"" RParen@133..134 ")" Newline@134..135 "\n" CallExpr@135..169 @@ -177,7 +189,8 @@ Root@0..270 SemiColon@143..144 ";" WhiteSpace@144..145 " " LitExpr@145..146 - Int@145..146 "1" + Lit@145..146 + Int@145..146 "1" RBracket@146..147 "]" Comma@147..148 "," WhiteSpace@148..149 " " @@ -187,12 +200,14 @@ Root@0..270 ExprStmt@150..155 BinExpr@150..155 LitExpr@150..151 - Int@150..151 "3" + Lit@150..151 + Int@150..151 "3" WhiteSpace@151..152 " " Plus@152..153 "+" WhiteSpace@153..154 " " LitExpr@154..155 - Int@154..155 "4" + Lit@154..155 + Int@154..155 "4" RBrace@155..156 "}" Gt@156..157 ">" CallArgList@157..169 @@ -202,7 +217,8 @@ Root@0..270 Colon@159..160 ":" WhiteSpace@160..161 " " LitExpr@161..162 - Int@161..162 "1" + Lit@161..162 + Int@161..162 "1" Comma@162..163 "," WhiteSpace@163..164 " " CallArg@164..168 @@ -210,7 +226,8 @@ Root@0..270 Colon@165..166 ":" WhiteSpace@166..167 " " LitExpr@167..168 - Int@167..168 "2" + Lit@167..168 + Int@167..168 "2" RParen@168..169 ")" Newline@169..171 "\n\n" Comment@171..245 "// Ths should be pars ..." @@ -247,7 +264,8 @@ Root@0..270 LParen@266..267 "(" CallArg@267..268 LitExpr@267..268 - Int@267..268 "1" + Lit@267..268 + Int@267..268 "1" RParen@268..269 ")" RParen@269..270 ")" diff --git a/crates/parser2/test_files/syntax_node/exprs/if.snap b/crates/parser2/test_files/syntax_node/exprs/if.snap index 7930ec57ee..ee28efb4a2 100644 --- a/crates/parser2/test_files/syntax_node/exprs/if.snap +++ b/crates/parser2/test_files/syntax_node/exprs/if.snap @@ -49,7 +49,8 @@ Root@0..279 Eq@42..43 "=" WhiteSpace@43..44 " " LitExpr@44..45 - Int@44..45 "1" + Lit@44..45 + Int@44..45 "1" Newline@45..46 "\n" WhiteSpace@46..50 " " ExprStmt@50..51 @@ -81,7 +82,8 @@ Root@0..279 Eq@72..73 "=" WhiteSpace@73..74 " " LitExpr@74..75 - Int@74..75 "1" + Lit@74..75 + Int@74..75 "1" Newline@75..76 "\n" WhiteSpace@76..80 " " ExprStmt@80..81 @@ -119,7 +121,8 @@ Root@0..279 Eq@110..111 "=" WhiteSpace@111..112 " " LitExpr@112..113 - Int@112..113 "1" + Lit@112..113 + Int@112..113 "1" Newline@113..114 "\n" WhiteSpace@114..118 " " ExprStmt@118..119 @@ -151,7 +154,8 @@ Root@0..279 Eq@140..141 "=" WhiteSpace@141..142 " " LitExpr@142..143 - Int@142..143 "1" + Lit@142..143 + Int@142..143 "1" Newline@143..144 "\n" WhiteSpace@144..148 " " ExprStmt@148..149 @@ -178,7 +182,8 @@ Root@0..279 Eq@169..170 "=" WhiteSpace@170..171 " " LitExpr@171..172 - Int@171..172 "1" + Lit@171..172 + Int@171..172 "1" Newline@172..173 "\n" WhiteSpace@173..177 " " ExprStmt@177..178 @@ -214,7 +219,8 @@ Root@0..279 FatArrow@213..215 "=>" WhiteSpace@215..216 " " LitExpr@216..220 - TrueKw@216..220 "true" + Lit@216..220 + TrueKw@216..220 "true" Newline@220..221 "\n" WhiteSpace@221..225 " " MatchArm@225..246 @@ -229,7 +235,8 @@ Root@0..279 FatArrow@238..240 "=>" WhiteSpace@240..241 " " LitExpr@241..246 - FalseKw@241..246 "false" + Lit@241..246 + FalseKw@241..246 "false" Newline@246..247 "\n" RBrace@247..248 "}" WhiteSpace@248..249 " " @@ -251,7 +258,8 @@ Root@0..279 WhiteSpace@272..276 " " ExprStmt@276..277 LitExpr@276..277 - Int@276..277 "1" + Lit@276..277 + Int@276..277 "1" Newline@277..278 "\n" RBrace@278..279 "}" diff --git a/crates/parser2/test_files/syntax_node/exprs/index.snap b/crates/parser2/test_files/syntax_node/exprs/index.snap index b9dd501fce..32eb3ccaa3 100644 --- a/crates/parser2/test_files/syntax_node/exprs/index.snap +++ b/crates/parser2/test_files/syntax_node/exprs/index.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/index.fe --- Root@0..23 IndexExpr@0..8 @@ -10,12 +11,14 @@ Root@0..23 LBracket@1..2 "[" BinExpr@2..7 LitExpr@2..3 - Int@2..3 "1" + Lit@2..3 + Int@2..3 "1" WhiteSpace@3..4 " " Plus@4..5 "+" WhiteSpace@5..6 " " LitExpr@6..7 - Int@6..7 "2" + Lit@6..7 + Int@6..7 "2" RBracket@7..8 "]" Newline@8..9 "\n" IndexExpr@9..23 @@ -33,12 +36,14 @@ Root@0..23 LParen@16..17 "(" CallArg@17..18 LitExpr@17..18 - Int@17..18 "1" + Lit@17..18 + Int@17..18 "1" Comma@18..19 "," WhiteSpace@19..20 " " CallArg@20..21 LitExpr@20..21 - Int@20..21 "2" + Lit@20..21 + Int@20..21 "2" RParen@21..22 ")" RBracket@22..23 "]" diff --git a/crates/parser2/test_files/syntax_node/exprs/match.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap index b5cbf36cdd..25bef2e692 100644 --- a/crates/parser2/test_files/syntax_node/exprs/match.snap +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -120,7 +120,8 @@ Root@0..516 Colon@94..95 ":" WhiteSpace@95..96 " " LitExpr@96..97 - Int@96..97 "1" + Lit@96..97 + Int@96..97 "1" Comma@97..98 "," WhiteSpace@98..99 " " RecordField@99..103 @@ -128,7 +129,8 @@ Root@0..516 Colon@100..101 ":" WhiteSpace@101..102 " " LitExpr@102..103 - Int@102..103 "2" + Lit@102..103 + Int@102..103 "2" RBrace@103..104 "}" RParen@104..105 ")" WhiteSpace@105..106 " " @@ -143,7 +145,8 @@ Root@0..516 FatArrow@114..116 "=>" WhiteSpace@116..117 " " LitExpr@117..118 - Int@117..118 "1" + Lit@117..118 + Int@117..118 "1" Newline@118..119 "\n" RBrace@119..120 "}" Newline@120..122 "\n\n" @@ -430,7 +433,8 @@ Root@0..516 Eq@368..369 "=" WhiteSpace@369..370 " " LitExpr@370..371 - Int@370..371 "1" + Lit@370..371 + Int@370..371 "1" Newline@371..372 "\n" WhiteSpace@372..380 " " ExprStmt@380..392 @@ -521,7 +525,8 @@ Root@0..516 Colon@458..459 ":" WhiteSpace@459..460 " " LitExpr@460..461 - Int@460..461 "2" + Lit@460..461 + Int@460..461 "2" RBrace@461..462 "}" RParen@462..463 ")" WhiteSpace@463..464 " " @@ -568,7 +573,8 @@ Root@0..516 FatArrow@492..494 "=>" WhiteSpace@494..495 " " LitExpr@495..499 - TrueKw@495..499 "true" + Lit@495..499 + TrueKw@495..499 "true" Newline@499..500 "\n" WhiteSpace@500..504 " " MatchArm@504..514 @@ -578,7 +584,8 @@ Root@0..516 FatArrow@506..508 "=>" WhiteSpace@508..509 " " LitExpr@509..514 - FalseKw@509..514 "false" + Lit@509..514 + FalseKw@509..514 "false" Newline@514..515 "\n" RBrace@515..516 "}" diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap index 961f51e9fa..c81ead0de1 100644 --- a/crates/parser2/test_files/syntax_node/exprs/method.snap +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -24,12 +24,14 @@ Root@0..75 LParen@9..10 "(" CallArg@10..11 LitExpr@10..11 - Int@10..11 "1" + Lit@10..11 + Int@10..11 "1" Comma@11..12 "," WhiteSpace@12..13 " " CallArg@13..14 LitExpr@13..14 - Int@13..14 "2" + Lit@13..14 + Int@13..14 "2" RParen@14..15 ")" Newline@15..17 "\n\n" MethodCallExpr@17..34 @@ -48,7 +50,8 @@ Root@0..75 Colon@24..25 ":" WhiteSpace@25..26 " " LitExpr@26..27 - Int@26..27 "1" + Lit@26..27 + Int@26..27 "1" Comma@27..28 "," WhiteSpace@28..29 " " CallArg@29..33 @@ -56,7 +59,8 @@ Root@0..75 Colon@30..31 ":" WhiteSpace@31..32 " " LitExpr@32..33 - Int@32..33 "2" + Lit@32..33 + Int@32..33 "2" RParen@33..34 ")" Newline@34..35 "\n" MethodCallExpr@35..47 @@ -66,7 +70,8 @@ Root@0..75 Ident@35..36 "x" LBracket@36..37 "[" LitExpr@37..38 - Int@37..38 "0" + Lit@37..38 + Int@37..38 "0" RBracket@38..39 "]" Dot@39..40 "." Ident@40..41 "z" @@ -77,7 +82,8 @@ Root@0..75 Colon@43..44 ":" WhiteSpace@44..45 " " LitExpr@45..46 - Int@45..46 "1" + Lit@45..46 + Int@45..46 "1" RParen@46..47 ")" Newline@47..49 "\n\n" MethodCallExpr@49..75 @@ -118,7 +124,8 @@ Root@0..75 Colon@68..69 ":" WhiteSpace@69..70 " " LitExpr@70..71 - Int@70..71 "1" + Lit@70..71 + Int@70..71 "1" Comma@71..72 "," WhiteSpace@72..73 " " CallArg@73..74 diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap index 291a3e6872..09a3f1b5d3 100644 --- a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/struct_init.fe --- Root@0..40 RecordInitExpr@0..13 @@ -31,12 +32,14 @@ Root@0..40 WhiteSpace@24..25 " " BinExpr@25..30 LitExpr@25..26 - Int@25..26 "1" + Lit@25..26 + Int@25..26 "1" WhiteSpace@26..27 " " Plus@27..28 "+" WhiteSpace@28..29 " " LitExpr@29..30 - Int@29..30 "2" + Lit@29..30 + Int@29..30 "2" RBrace@30..31 "}" Newline@31..32 "\n" RecordInitExpr@32..40 diff --git a/crates/parser2/test_files/syntax_node/items/const.snap b/crates/parser2/test_files/syntax_node/items/const.snap index c6eceb2337..c5a30332bc 100644 --- a/crates/parser2/test_files/syntax_node/items/const.snap +++ b/crates/parser2/test_files/syntax_node/items/const.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/const.fe --- Root@0..160 ItemList@0..160 @@ -21,7 +22,8 @@ Root@0..160 Eq@19..20 "=" WhiteSpace@20..21 " " LitExpr@21..22 - Int@21..22 "1" + Lit@21..22 + Int@21..22 "1" Newline@22..24 "\n\n" Const@24..159 ConstKw@24..29 "const" @@ -51,7 +53,8 @@ Root@0..160 Eq@54..55 "=" WhiteSpace@55..56 " " LitExpr@56..60 - TrueKw@56..60 "true" + Lit@56..60 + TrueKw@56..60 "true" Newline@60..61 "\n" WhiteSpace@61..65 " " LetStmt@65..74 @@ -65,7 +68,8 @@ Root@0..160 Eq@71..72 "=" WhiteSpace@72..73 " " LitExpr@73..74 - Int@73..74 "1" + Lit@73..74 + Int@73..74 "1" Newline@74..75 "\n" WhiteSpace@75..79 " " ExprStmt@79..157 @@ -82,7 +86,8 @@ Root@0..160 WhiteSpace@86..94 " " ExprStmt@94..95 LitExpr@94..95 - Int@94..95 "1" + Lit@94..95 + Int@94..95 "1" Newline@95..96 "\n" WhiteSpace@96..100 " " RBrace@100..101 "}" @@ -100,7 +105,8 @@ Root@0..160 Eq2@112..114 "==" WhiteSpace@114..115 " " LitExpr@115..116 - Int@115..116 "1" + Lit@115..116 + Int@115..116 "1" WhiteSpace@116..117 " " BlockExpr@117..134 LBrace@117..118 "{" @@ -108,7 +114,8 @@ Root@0..160 WhiteSpace@119..127 " " ExprStmt@127..128 LitExpr@127..128 - Int@127..128 "2" + Lit@127..128 + Int@127..128 "2" Newline@128..129 "\n" WhiteSpace@129..133 " " RBrace@133..134 "}" @@ -121,7 +128,8 @@ Root@0..160 WhiteSpace@142..150 " " ExprStmt@150..151 LitExpr@150..151 - Int@150..151 "3" + Lit@150..151 + Int@150..151 "3" Newline@151..152 "\n" WhiteSpace@152..156 " " RBrace@156..157 "}" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 8c0eff27e7..c31f42f4f4 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -31,7 +31,8 @@ Root@0..361 Eq@25..26 "=" WhiteSpace@26..27 " " LitExpr@27..28 - Int@27..28 "1" + Lit@27..28 + Int@27..28 "1" Newline@28..29 "\n" RBrace@29..30 "}" Newline@30..32 "\n\n" @@ -76,7 +77,8 @@ Root@0..361 WhiteSpace@73..77 " " ExprStmt@77..78 LitExpr@77..78 - Int@77..78 "1" + Lit@77..78 + Int@77..78 "1" Newline@78..79 "\n" RBrace@79..80 "}" Newline@80..82 "\n\n" @@ -149,7 +151,8 @@ Root@0..361 WhiteSpace@171..175 " " ExprStmt@175..176 LitExpr@175..176 - Int@175..176 "1" + Lit@175..176 + Int@175..176 "1" Newline@176..177 "\n" RBrace@177..178 "}" Newline@178..180 "\n\n" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index 6ee01b54d6..f66b586162 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -56,7 +56,8 @@ Root@0..317 ReturnKw@48..54 "return" WhiteSpace@54..55 " " LitExpr@55..56 - Int@55..56 "1" + Lit@55..56 + Int@55..56 "1" Newline@56..57 "\n" WhiteSpace@57..61 " " RBrace@61..62 "}" diff --git a/crates/parser2/test_files/syntax_node/pats/lit.snap b/crates/parser2/test_files/syntax_node/pats/lit.snap index 2608ab927b..6f652353b6 100644 --- a/crates/parser2/test_files/syntax_node/pats/lit.snap +++ b/crates/parser2/test_files/syntax_node/pats/lit.snap @@ -1,11 +1,14 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/lit.fe --- Root@0..12 LitPat@0..3 - Int@0..3 "0x1" + Lit@0..3 + Int@0..3 "0x1" Newline@3..4 "\n" LitPat@4..12 - String@4..12 "\"String\"" + Lit@4..12 + String@4..12 "\"String\"" diff --git a/crates/parser2/test_files/syntax_node/pats/or.snap b/crates/parser2/test_files/syntax_node/pats/or.snap index acabcd61ff..75fe119301 100644 --- a/crates/parser2/test_files/syntax_node/pats/or.snap +++ b/crates/parser2/test_files/syntax_node/pats/or.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/or.fe --- Root@0..117 OrPat@0..19 @@ -35,12 +36,14 @@ Root@0..117 TuplePatElem@30..35 OrPat@30..35 LitPat@30..31 - Int@30..31 "1" + Lit@30..31 + Int@30..31 "1" WhiteSpace@31..32 " " Pipe@32..33 "|" WhiteSpace@33..34 " " LitPat@34..35 - Int@34..35 "2" + Lit@34..35 + Int@34..35 "2" RParen@35..36 ")" WhiteSpace@36..37 " " Pipe@37..38 "|" @@ -72,12 +75,14 @@ Root@0..117 TuplePatElem@62..67 OrPat@62..67 LitPat@62..63 - Int@62..63 "1" + Lit@62..63 + Int@62..63 "1" WhiteSpace@63..64 " " Pipe@64..65 "|" WhiteSpace@65..66 " " LitPat@66..67 - Int@66..67 "2" + Lit@66..67 + Int@66..67 "2" RParen@67..68 ")" WhiteSpace@68..69 " " Pipe@69..70 "|" @@ -105,12 +110,14 @@ Root@0..117 TuplePatElem@89..94 OrPat@89..94 LitPat@89..90 - Int@89..90 "1" + Lit@89..90 + Int@89..90 "1" WhiteSpace@90..91 " " Pipe@91..92 "|" WhiteSpace@92..93 " " LitPat@93..94 - Int@93..94 "2" + Lit@93..94 + Int@93..94 "2" RParen@94..95 ")" WhiteSpace@95..96 " " Pipe@96..97 "|" @@ -126,7 +133,8 @@ Root@0..117 LParen@106..107 "(" TuplePatElem@107..115 LitPat@107..115 - String@107..115 "\"STRING\"" + Lit@107..115 + String@107..115 "\"STRING\"" RParen@115..116 ")" RParen@116..117 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap index 1c6db30315..d4b0a3a7f5 100644 --- a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/path_tuple.fe --- Root@0..203 PathTuplePat@0..7 @@ -53,12 +54,14 @@ Root@0..203 LParen@52..53 "(" TuplePatElem@53..54 LitPat@53..54 - Int@53..54 "1" + Lit@53..54 + Int@53..54 "1" Comma@54..55 "," WhiteSpace@55..56 " " TuplePatElem@56..57 LitPat@56..57 - Int@56..57 "2" + Lit@56..57 + Int@56..57 "2" RParen@57..58 ")" Comma@58..59 "," WhiteSpace@59..61 " " @@ -107,12 +110,14 @@ Root@0..203 LParen@106..107 "(" TuplePatElem@107..108 LitPat@107..108 - Int@107..108 "1" + Lit@107..108 + Int@107..108 "1" Comma@108..109 "," WhiteSpace@109..110 " " TuplePatElem@110..111 LitPat@110..111 - Int@110..111 "2" + Lit@110..111 + Int@110..111 "2" RParen@111..112 ")" Comma@112..113 "," WhiteSpace@113..114 " " @@ -190,7 +195,8 @@ Root@0..203 LParen@198..199 "(" TuplePatElem@199..201 LitPat@199..201 - Int@199..201 "10" + Lit@199..201 + Int@199..201 "10" RParen@201..202 ")" RParen@202..203 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/record.snap b/crates/parser2/test_files/syntax_node/pats/record.snap index 01ff9af07b..9f5fbfc887 100644 --- a/crates/parser2/test_files/syntax_node/pats/record.snap +++ b/crates/parser2/test_files/syntax_node/pats/record.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/record.fe --- Root@0..96 RecordPat@0..9 @@ -93,7 +94,8 @@ Root@0..96 LParen@76..77 "(" TuplePatElem@77..78 LitPat@77..78 - Int@77..78 "1" + Lit@77..78 + Int@77..78 "1" Comma@78..79 "," WhiteSpace@79..80 " " TuplePatElem@80..81 diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap index 6c6aac9ea3..3923e012ee 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/let.fe --- Root@0..231 LetStmt@0..5 @@ -22,7 +23,8 @@ Root@0..231 Eq@13..14 "=" WhiteSpace@14..15 " " LitExpr@15..16 - Int@15..16 "1" + Lit@15..16 + Int@15..16 "1" Newline@16..17 "\n" LetStmt@17..31 LetKw@17..20 "let" @@ -41,7 +43,8 @@ Root@0..231 Eq@28..29 "=" WhiteSpace@29..30 " " LitExpr@30..31 - Int@30..31 "1" + Lit@30..31 + Int@30..31 "1" Newline@31..32 "\n" LetStmt@32..50 LetKw@32..35 "let" @@ -62,7 +65,8 @@ Root@0..231 Eq@47..48 "=" WhiteSpace@48..49 " " LitExpr@49..50 - Int@49..50 "1" + Lit@49..50 + Int@49..50 "1" Newline@50..52 "\n\n" AugAssignStmt@52..62 PathPat@52..53 @@ -75,12 +79,14 @@ Root@0..231 WhiteSpace@56..57 " " BinExpr@57..62 LitExpr@57..58 - Int@57..58 "1" + Lit@57..58 + Int@57..58 "1" WhiteSpace@58..59 " " Plus@59..60 "+" WhiteSpace@60..61 " " LitExpr@61..62 - Int@61..62 "1" + Lit@61..62 + Int@61..62 "1" Newline@62..63 "\n" AugAssignStmt@63..75 PathPat@63..64 @@ -95,14 +101,16 @@ Root@0..231 WhiteSpace@68..69 " " BinExpr@69..75 LitExpr@69..70 - Int@69..70 "1" + Lit@69..70 + Int@69..70 "1" WhiteSpace@70..71 " " RShift@71..73 Gt@71..72 ">" Gt@72..73 ">" WhiteSpace@73..74 " " LitExpr@74..75 - Int@74..75 "2" + Lit@74..75 + Int@74..75 "2" Newline@75..77 "\n\n" LetStmt@77..102 LetKw@77..80 "let" @@ -281,7 +289,8 @@ Root@0..231 FatArrow@225..227 "=>" WhiteSpace@227..228 " " LitExpr@228..229 - Int@228..229 "0" + Lit@228..229 + Int@228..229 "0" Newline@229..230 "\n" RBrace@230..231 "}" diff --git a/crates/parser2/test_files/syntax_node/stmts/while.snap b/crates/parser2/test_files/syntax_node/stmts/while.snap index 70c6c59ff3..cd3cd4ae05 100644 --- a/crates/parser2/test_files/syntax_node/stmts/while.snap +++ b/crates/parser2/test_files/syntax_node/stmts/while.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/while.fe --- Root@0..46 WhileStmt@0..46 @@ -14,7 +15,8 @@ Root@0..46 Lt@8..9 "<" WhiteSpace@9..10 " " LitExpr@10..12 - Int@10..12 "10" + Lit@10..12 + Int@10..12 "10" WhiteSpace@12..13 " " BlockExpr@13..46 LBrace@13..14 "{" @@ -30,12 +32,14 @@ Root@0..46 WhiteSpace@24..25 " " BinExpr@25..30 LitExpr@25..26 - Int@25..26 "1" + Lit@25..26 + Int@25..26 "1" WhiteSpace@26..27 " " Plus@27..28 "+" WhiteSpace@28..29 " " LitExpr@29..30 - Int@29..30 "2" + Lit@29..30 + Int@29..30 "2" Newline@30..31 "\n" WhiteSpace@31..35 " " AssignStmt@35..44 @@ -54,7 +58,8 @@ Root@0..46 Plus@41..42 "+" WhiteSpace@42..43 " " LitExpr@43..44 - Int@43..44 "1" + Lit@43..44 + Int@43..44 "1" Newline@44..45 "\n" RBrace@45..46 "}" From dd80c88432a84483594d9569f2f842bf17ba4d15 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 17:26:49 +0100 Subject: [PATCH 060/678] Add ast for literal --- crates/parser2/src/ast/lit.rs | 54 +++++++++++++++++++++++++++++++++++ crates/parser2/src/ast/mod.rs | 6 ++++ 2 files changed, 60 insertions(+) create mode 100644 crates/parser2/src/ast/lit.rs diff --git a/crates/parser2/src/ast/lit.rs b/crates/parser2/src/ast/lit.rs new file mode 100644 index 0000000000..63eb70fb10 --- /dev/null +++ b/crates/parser2/src/ast/lit.rs @@ -0,0 +1,54 @@ +use rowan::ast::AstNode; + +use crate::{syntax_kind::SyntaxKind as SK, SyntaxToken}; + +use super::ast_node; + +ast_node! { + pub struct Lit, + SK::Lit +} +impl Lit { + pub fn kind(&self) -> LitKind { + let token = self.syntax().first_token().unwrap(); + match token.kind() { + SK::Int => LitKind::Int(LitInt { token }), + SK::TrueKw | SK::FalseKw => LitKind::Bool(LitBool { token }), + SK::String => LitKind::String(LitString { token }), + _ => unreachable!(), + } + } +} + +pub struct LitInt { + token: SyntaxToken, +} +impl LitInt { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +pub struct LitBool { + token: SyntaxToken, +} +impl LitBool { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +pub struct LitString { + token: SyntaxToken, +} +impl LitString { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +pub enum LitKind { + Int(LitInt), + Bool(LitBool), + String(LitString), +} diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index f31b9b5c28..8ebaf31679 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -1,15 +1,21 @@ pub mod attr; pub mod expr; pub mod item; +pub mod lit; pub mod param; +pub mod pat; pub mod path; +pub mod stmt; pub mod type_; pub use attr::*; pub use expr::*; pub use item::*; +pub use lit::*; pub use param::*; +pub use pat::*; pub use path::*; +pub use stmt::*; pub use type_::*; pub type AstChildren = rowan::ast::AstChildren; From 3cc3da83d5a66f2caffd1c8006958062f0432f87 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 17:39:22 +0100 Subject: [PATCH 061/678] Add ast for pat --- crates/parser2/src/ast/attr.rs | 2 +- crates/parser2/src/ast/mod.rs | 6 +- crates/parser2/src/ast/param.rs | 2 +- crates/parser2/src/ast/pat.rs | 306 ++++++++++++++++++ crates/parser2/src/parser/pat.rs | 11 +- crates/parser2/src/syntax_kind.rs | 2 - .../error_recovery/exprs/match_.snap | 29 +- .../test_files/syntax_node/exprs/match.snap | 153 ++++----- .../test_files/syntax_node/pats/or.snap | 124 ++++--- .../syntax_node/pats/path_tuple.snap | 209 ++++++------ .../test_files/syntax_node/pats/record.snap | 16 +- .../test_files/syntax_node/stmts/let.snap | 36 +-- 12 files changed, 573 insertions(+), 323 deletions(-) create mode 100644 crates/parser2/src/ast/pat.rs diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index e02e01cb53..dc48d63f4b 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -1,6 +1,6 @@ use rowan::ast::{support, AstNode}; -use super::{ast_node, AstChildren}; +use super::ast_node; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; ast_node! { diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 8ebaf31679..69bbf20212 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -79,7 +79,7 @@ macro_rules! ast_node { } impl IntoIterator for $name { type Item = $item_ty; - type IntoIter = AstChildren<$item_ty>; + type IntoIter = crate::ast::AstChildren<$item_ty>; fn into_iter(self) -> Self::IntoIter { support::children(self.syntax()) @@ -87,7 +87,7 @@ macro_rules! ast_node { } impl IntoIterator for &$name { type Item = $item_ty; - type IntoIter = AstChildren<$item_ty>; + type IntoIter = crate::ast::AstChildren<$item_ty>; fn into_iter(self) -> Self::IntoIter { support::children(self.syntax()) @@ -96,7 +96,7 @@ macro_rules! ast_node { impl $name { /// Returns an iterator over the children of this node. - pub fn iter(&self) -> AstChildren<$item_ty> { + pub fn iter(&self) -> crate::ast::AstChildren<$item_ty> { self.into_iter() } } diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index b1c5506c9a..ec664371d1 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -1,6 +1,6 @@ use rowan::ast::{support, AstNode}; -use super::{ast_node, AstChildren}; +use super::ast_node; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; ast_node! { diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs new file mode 100644 index 0000000000..0376c59d09 --- /dev/null +++ b/crates/parser2/src/ast/pat.rs @@ -0,0 +1,306 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A pattern. + /// Use [`Self::kind`] to get the specific kind of the pattern. + pub struct Pat, + SK::WildCardPat + | SK::RestPat + | SK::LitPat + | SK::TuplePat + | SK::PathPat + | SK::PathTuplePat + | SK::RecordPat + | SK::OrPat +} +impl Pat { + /// Returns the specific kind of the pattern. + pub fn kind(&self) -> PatKind { + match self.syntax().kind() { + SK::WildCardPat => PatKind::WildCard(AstNode::cast(self.syntax().clone()).unwrap()), + SK::RestPat => PatKind::Rest(AstNode::cast(self.syntax().clone()).unwrap()), + SK::LitPat => PatKind::Lit(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TuplePat => PatKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathPat => PatKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathTuplePat => { + PatKind::PathTuple(PathTuplePat::cast(self.syntax().clone()).unwrap()) + } + SK::RecordPat => PatKind::Record(AstNode::cast(self.syntax().clone()).unwrap()), + SK::OrPat => PatKind::Or(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `_` + pub struct WildCardPat, + SK::WildCardPat, +} + +ast_node! { + /// `..` + pub struct RestPat, + SK::RestPat, +} + +ast_node! { + /// `1` + pub struct LitPat, + SK::LitPat, +} +impl LitPat { + /// Returns the underlying literal. + pub fn lit(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(Foo::Bar, 1, ..)` + pub struct TuplePat, + SK::TuplePat, +} +impl TuplePat { + pub fn elems(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(Foo::Bar, 1, ..)` + pub struct TuplePatElemList, + SK::TuplePatElemList, + IntoIterator +} + +ast_node! { + /// `Foo::Bar` + pub struct PathPat, + SK::PathPat, +} +impl PathPat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `Foo::Bar(1, 2)` + pub struct PathTuplePat, + SK::PathTuplePat, +} +impl PathTuplePat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + pub fn elems(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `Foo::Bar{a: 1, b: Foo::baz, c} + pub struct RecordPat, + SK::RecordPat, +} +impl RecordPat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `{a: 1, b: Foo::baz, c}` + pub struct RecordPatFieldList, + SK::RecordPatFieldList, + IntoIterator +} + +ast_node! { + /// `a: 1` + pub struct RecordPatField, + SK::RecordPatField, +} +impl RecordPatField { + /// Returns the field name. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the field pattern. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `Foo::Bar | 1` + pub struct OrPat, + SK::OrPat, +} +impl OrPat { + pub fn lhs(&self) -> Option { + support::child(self.syntax()) + } + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +/// A specific pattern kind. +pub enum PatKind { + WildCard(WildCardPat), + Rest(RestPat), + Lit(LitPat), + Tuple(TuplePat), + Path(PathPat), + PathTuple(PathTuplePat), + Record(RecordPat), + Or(OrPat), +} + +#[cfg(test)] +mod tests { + use crate::{lexer::Lexer, parser::Parser}; + + use super::*; + + fn parse_pat(source: &str) -> Pat { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::pat::parse_pat(&mut parser); + Pat::cast(parser.finish().0).unwrap() + } + + #[test] + fn wildcard() { + let pat = parse_pat("_"); + assert!(matches!(pat.kind(), PatKind::WildCard(_))) + } + + #[test] + fn rest() { + let pat = parse_pat(".."); + assert!(matches!(pat.kind(), PatKind::Rest(_))); + } + + #[test] + fn lit() { + let lit_int = parse_pat("0x1"); + let lit_bool = parse_pat("true"); + let lit_str = parse_pat(r#""foo""#); + assert!(matches!(lit_int.kind(), PatKind::Lit(_))); + assert!(matches!(lit_bool.kind(), PatKind::Lit(_))); + assert!(matches!(lit_str.kind(), PatKind::Lit(_))); + } + + #[test] + fn tuple() { + let source = r#"(Foo::Bar, true, ..)"#; + let pat = parse_pat(source); + let tuple_pat = match pat.kind() { + PatKind::Tuple(tuple_pat) => tuple_pat, + _ => panic!("expected tuple pat"), + }; + + for (i, pat) in tuple_pat.elems().unwrap().iter().enumerate() { + match i { + 0 => assert!(matches!(pat.kind(), PatKind::Path(_))), + 1 => assert!(matches!(pat.kind(), PatKind::Lit(_))), + 2 => assert!(matches!(pat.kind(), PatKind::Rest(_))), + _ => panic!("unexpected tuple pat"), + } + } + + let pat = parse_pat("()"); + let tuple_pat = match pat.kind() { + PatKind::Tuple(tuple_pat) => tuple_pat, + _ => panic!("expected tuple pat"), + }; + + assert!(tuple_pat.elems().unwrap().iter().next().is_none()); + } + + #[test] + fn path_tuple() { + let source = r#"Self::Bar(1, Foo::Bar)"#; + let pat = parse_pat(source); + let path_tuple_pat = match pat.kind() { + PatKind::PathTuple(path_tuple_pat) => path_tuple_pat, + _ => panic!("expected path tuple pat"), + }; + + for (i, seg) in path_tuple_pat.path().unwrap().segments().enumerate() { + match i { + 0 => assert!(seg.is_self_ty()), + 1 => assert_eq!(seg.ident().unwrap().text(), "Bar"), + _ => panic!("unexpected path tuple pat"), + } + } + + for (i, pat) in path_tuple_pat.elems().unwrap().iter().enumerate() { + match i { + 0 => assert!(matches!(pat.kind(), PatKind::Lit(_))), + 1 => assert!(matches!(pat.kind(), PatKind::Path(_))), + _ => panic!("unexpected path tuple pat"), + } + } + } + + #[test] + fn record() { + let source = r#"Foo::Bar{a: 1, b: Foo::baz, c}"#; + let pat = parse_pat(source); + let record_pat = match pat.kind() { + PatKind::Record(record_pat) => record_pat, + _ => panic!("expected record pat"), + }; + + for (i, seg) in record_pat.path().unwrap().segments().enumerate() { + match i { + 0 => assert_eq!(seg.ident().unwrap().text(), "Foo"), + 1 => assert_eq!(seg.ident().unwrap().text(), "Bar"), + _ => panic!("unexpected record pat"), + } + } + + for (i, field) in record_pat.fields().unwrap().iter().enumerate() { + match i { + 0 => { + assert_eq!(field.name().unwrap().text(), "a"); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Lit(_))); + } + 1 => { + assert_eq!(field.name().unwrap().text(), "b"); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Path(_))); + } + 2 => { + assert!(field.name().is_none()); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Path(_))); + } + _ => panic!("unexpected record pat"), + } + } + } + + #[test] + fn or() { + let source = r#"Foo::Int | Foo::Float | Foo::Str "#; + let pat = parse_pat(source); + let or_pat = match pat.kind() { + PatKind::Or(or_pat) => or_pat, + _ => panic!("expected or pat"), + }; + + assert!(matches!(or_pat.lhs().unwrap().kind(), PatKind::Path(_))); + assert!(matches!(or_pat.rhs().unwrap().kind(), PatKind::Or(_))); + } +} diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index 5a51e4fc2d..fb6c572c0f 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -61,22 +61,15 @@ impl super::Parse for TuplePatElemListScope { return; } - parser.parse(TuplePatElemScope::default(), None); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::RParen, SyntaxKind::Comma]); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(TuplePatElemScope::default(), None); + parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::RParen, SyntaxKind::Comma]); } parser.bump_or_recover(SyntaxKind::RParen, "expected `)`", None); } } -define_scope! { TuplePatElemScope, TuplePatElem, Inheritance } -impl super::Parse for TuplePatElemScope { - fn parse(&mut self, parser: &mut Parser) { - parser.with_next_expected_tokens(parse_pat, &[SyntaxKind::RParen, SyntaxKind::Comma]); - } -} - define_scope! { PathPatScope, PathPat, Inheritance(Pipe) } impl super::Parse for PathPatScope { fn parse(&mut self, parser: &mut Parser) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 952b2bc058..eeb48a727e 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -315,8 +315,6 @@ pub enum SyntaxKind { TuplePat, /// `(x, y)` TuplePatElemList, - /// `x` - TuplePatElem, /// `Enum::Variant` PathPat, /// `Enum::Variant(x, y)` diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap index d39ea9bb98..c6e1e6a428 100644 --- a/crates/parser2/test_files/error_recovery/exprs/match_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -66,26 +66,23 @@ Root@0..94 Ident@58..61 "Foo" TuplePatElemList@61..70 LParen@61..62 "(" - TuplePatElem@62..63 - PathPat@62..63 - Path@62..63 - PathSegment@62..63 - Ident@62..63 "i" + PathPat@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "i" Comma@63..64 "," WhiteSpace@64..65 " " - TuplePatElem@65..66 - PathPat@65..66 - Path@65..66 - PathSegment@65..66 - Ident@65..66 "j" + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "j" Comma@66..67 "," WhiteSpace@67..70 " " - TuplePatElem@70..70 - PathPat@70..70 - Path@70..70 - PathSegment@70..70 - Error@70..70 - Error@70..70 + PathPat@70..70 + Path@70..70 + PathSegment@70..70 + Error@70..70 + Error@70..70 Error@70..70 FatArrow@70..72 "=>" WhiteSpace@72..73 " " diff --git a/crates/parser2/test_files/syntax_node/exprs/match.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap index 25bef2e692..477f381db4 100644 --- a/crates/parser2/test_files/syntax_node/exprs/match.snap +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -36,18 +36,16 @@ Root@0..516 Ident@32..35 "Add" TuplePatElemList@35..41 LParen@35..36 "(" - TuplePatElem@36..37 - PathPat@36..37 - Path@36..37 - PathSegment@36..37 - Ident@36..37 "x" + PathPat@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "x" Comma@37..38 "," WhiteSpace@38..39 " " - TuplePatElem@39..40 - PathPat@39..40 - Path@39..40 - PathSegment@39..40 - Ident@39..40 "y" + PathPat@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "y" RParen@40..41 ")" WhiteSpace@41..42 " " FatArrow@42..44 "=>" @@ -74,18 +72,16 @@ Root@0..516 Ident@61..64 "Sub" TuplePatElemList@64..70 LParen@64..65 "(" - TuplePatElem@65..66 - PathPat@65..66 - Path@65..66 - PathSegment@65..66 - Ident@65..66 "x" + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "x" Comma@66..67 "," WhiteSpace@67..68 " " - TuplePatElem@68..69 - PathPat@68..69 - Path@68..69 - PathSegment@68..69 - Ident@68..69 "y" + PathPat@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "y" RParen@69..70 ")" WhiteSpace@70..71 " " FatArrow@71..73 "=>" @@ -171,18 +167,16 @@ Root@0..516 Ident@142..145 "Add" TuplePatElemList@145..151 LParen@145..146 "(" - TuplePatElem@146..147 - PathPat@146..147 - Path@146..147 - PathSegment@146..147 - Ident@146..147 "x" + PathPat@146..147 + Path@146..147 + PathSegment@146..147 + Ident@146..147 "x" Comma@147..148 "," WhiteSpace@148..149 " " - TuplePatElem@149..150 - PathPat@149..150 - Path@149..150 - PathSegment@149..150 - Ident@149..150 "y" + PathPat@149..150 + Path@149..150 + PathSegment@149..150 + Ident@149..150 "y" RParen@150..151 ")" WhiteSpace@151..152 " " FatArrow@152..154 "=>" @@ -209,18 +203,16 @@ Root@0..516 Ident@171..174 "Sub" TuplePatElemList@174..180 LParen@174..175 "(" - TuplePatElem@175..176 - PathPat@175..176 - Path@175..176 - PathSegment@175..176 - Ident@175..176 "x" + PathPat@175..176 + Path@175..176 + PathSegment@175..176 + Ident@175..176 "x" Comma@176..177 "," WhiteSpace@177..178 " " - TuplePatElem@178..179 - PathPat@178..179 - Path@178..179 - PathSegment@178..179 - Ident@178..179 "y" + PathPat@178..179 + Path@178..179 + PathSegment@178..179 + Ident@178..179 "y" RParen@179..180 ")" WhiteSpace@180..181 " " FatArrow@181..183 "=>" @@ -259,18 +251,16 @@ Root@0..516 Ident@213..216 "Add" TuplePatElemList@216..222 LParen@216..217 "(" - TuplePatElem@217..218 - PathPat@217..218 - Path@217..218 - PathSegment@217..218 - Ident@217..218 "x" + PathPat@217..218 + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" Comma@218..219 "," WhiteSpace@219..220 " " - TuplePatElem@220..221 - PathPat@220..221 - Path@220..221 - PathSegment@220..221 - Ident@220..221 "y" + PathPat@220..221 + Path@220..221 + PathSegment@220..221 + Ident@220..221 "y" RParen@221..222 ")" WhiteSpace@222..223 " " FatArrow@223..225 "=>" @@ -307,18 +297,16 @@ Root@0..516 Ident@260..263 "Sub" TuplePatElemList@263..269 LParen@263..264 "(" - TuplePatElem@264..265 - PathPat@264..265 - Path@264..265 - PathSegment@264..265 - Ident@264..265 "x" + PathPat@264..265 + Path@264..265 + PathSegment@264..265 + Ident@264..265 "x" Comma@265..266 "," WhiteSpace@266..267 " " - TuplePatElem@267..268 - PathPat@267..268 - Path@267..268 - PathSegment@267..268 - Ident@267..268 "y" + PathPat@267..268 + Path@267..268 + PathSegment@267..268 + Ident@267..268 "y" RParen@268..269 ")" WhiteSpace@269..270 " " FatArrow@270..272 "=>" @@ -345,18 +333,16 @@ Root@0..516 Ident@289..292 "Mul" TuplePatElemList@292..298 LParen@292..293 "(" - TuplePatElem@293..294 - PathPat@293..294 - Path@293..294 - PathSegment@293..294 - Ident@293..294 "x" + PathPat@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "x" Comma@294..295 "," WhiteSpace@295..296 " " - TuplePatElem@296..297 - PathPat@296..297 - Path@296..297 - PathSegment@296..297 - Ident@296..297 "y" + PathPat@296..297 + Path@296..297 + PathSegment@296..297 + Ident@296..297 "y" RParen@297..298 ")" WhiteSpace@298..299 " " FatArrow@299..301 "=>" @@ -400,11 +386,10 @@ Root@0..516 Ident@331..334 "Var" TuplePatElemList@334..337 LParen@334..335 "(" - TuplePatElem@335..336 - PathPat@335..336 - Path@335..336 - PathSegment@335..336 - Ident@335..336 "s" + PathPat@335..336 + Path@335..336 + PathSegment@335..336 + Ident@335..336 "s" RParen@336..337 ")" WhiteSpace@337..338 " " FatArrow@338..340 "=>" @@ -472,11 +457,10 @@ Root@0..516 Ident@415..418 "Var" TuplePatElemList@418..421 LParen@418..419 "(" - TuplePatElem@419..420 - PathPat@419..420 - Path@419..420 - PathSegment@419..420 - Ident@419..420 "s" + PathPat@419..420 + Path@419..420 + PathSegment@419..420 + Ident@419..420 "s" RParen@420..421 ")" WhiteSpace@421..422 " " FatArrow@422..424 "=>" @@ -555,11 +539,10 @@ Root@0..516 Ident@481..484 "Bar" TuplePatElemList@484..487 LParen@484..485 "(" - TuplePatElem@485..486 - PathPat@485..486 - Path@485..486 - PathSegment@485..486 - Ident@485..486 "x" + PathPat@485..486 + Path@485..486 + PathSegment@485..486 + Ident@485..486 "x" RParen@486..487 ")" Comma@487..488 "," WhiteSpace@488..489 " " diff --git a/crates/parser2/test_files/syntax_node/pats/or.snap b/crates/parser2/test_files/syntax_node/pats/or.snap index 75fe119301..2a8c75fd6c 100644 --- a/crates/parser2/test_files/syntax_node/pats/or.snap +++ b/crates/parser2/test_files/syntax_node/pats/or.snap @@ -33,17 +33,16 @@ Root@0..117 Ident@26..29 "Bar" TuplePatElemList@29..36 LParen@29..30 "(" - TuplePatElem@30..35 - OrPat@30..35 - LitPat@30..31 - Lit@30..31 - Int@30..31 "1" - WhiteSpace@31..32 " " - Pipe@32..33 "|" - WhiteSpace@33..34 " " - LitPat@34..35 - Lit@34..35 - Int@34..35 "2" + OrPat@30..35 + LitPat@30..31 + Lit@30..31 + Int@30..31 "1" + WhiteSpace@31..32 " " + Pipe@32..33 "|" + WhiteSpace@33..34 " " + LitPat@34..35 + Lit@34..35 + Int@34..35 "2" RParen@35..36 ")" WhiteSpace@36..37 " " Pipe@37..38 "|" @@ -57,9 +56,8 @@ Root@0..117 Ident@44..47 "Baz" TuplePatElemList@47..51 LParen@47..48 "(" - TuplePatElem@48..50 - RestPat@48..50 - Dot2@48..50 ".." + RestPat@48..50 + Dot2@48..50 ".." RParen@50..51 ")" Newline@51..53 "\n\n" OrPat@53..117 @@ -72,17 +70,16 @@ Root@0..117 Ident@58..61 "Bar" TuplePatElemList@61..68 LParen@61..62 "(" - TuplePatElem@62..67 - OrPat@62..67 - LitPat@62..63 - Lit@62..63 - Int@62..63 "1" - WhiteSpace@63..64 " " - Pipe@64..65 "|" - WhiteSpace@65..66 " " - LitPat@66..67 - Lit@66..67 - Int@66..67 "2" + OrPat@62..67 + LitPat@62..63 + Lit@62..63 + Int@62..63 "1" + WhiteSpace@63..64 " " + Pipe@64..65 "|" + WhiteSpace@65..66 " " + LitPat@66..67 + Lit@66..67 + Int@66..67 "2" RParen@67..68 ")" WhiteSpace@68..69 " " Pipe@69..70 "|" @@ -96,45 +93,42 @@ Root@0..117 Ident@76..79 "Baz" TuplePatElemList@79..117 LParen@79..80 "(" - TuplePatElem@80..116 - OrPat@80..116 - PathTuplePat@80..95 - Path@80..88 - PathSegment@80..83 - Ident@80..83 "Foo" - Colon2@83..85 "::" - PathSegment@85..88 - Ident@85..88 "Bar" - TuplePatElemList@88..95 - LParen@88..89 "(" - TuplePatElem@89..94 - OrPat@89..94 - LitPat@89..90 - Lit@89..90 - Int@89..90 "1" - WhiteSpace@90..91 " " - Pipe@91..92 "|" - WhiteSpace@92..93 " " - LitPat@93..94 - Lit@93..94 - Int@93..94 "2" - RParen@94..95 ")" - WhiteSpace@95..96 " " - Pipe@96..97 "|" - WhiteSpace@97..98 " " - PathTuplePat@98..116 - Path@98..106 - PathSegment@98..101 - Ident@98..101 "Bar" - Colon2@101..103 "::" - PathSegment@103..106 - Ident@103..106 "Baz" - TuplePatElemList@106..116 - LParen@106..107 "(" - TuplePatElem@107..115 - LitPat@107..115 - Lit@107..115 - String@107..115 "\"STRING\"" - RParen@115..116 ")" + OrPat@80..116 + PathTuplePat@80..95 + Path@80..88 + PathSegment@80..83 + Ident@80..83 "Foo" + Colon2@83..85 "::" + PathSegment@85..88 + Ident@85..88 "Bar" + TuplePatElemList@88..95 + LParen@88..89 "(" + OrPat@89..94 + LitPat@89..90 + Lit@89..90 + Int@89..90 "1" + WhiteSpace@90..91 " " + Pipe@91..92 "|" + WhiteSpace@92..93 " " + LitPat@93..94 + Lit@93..94 + Int@93..94 "2" + RParen@94..95 ")" + WhiteSpace@95..96 " " + Pipe@96..97 "|" + WhiteSpace@97..98 " " + PathTuplePat@98..116 + Path@98..106 + PathSegment@98..101 + Ident@98..101 "Bar" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Baz" + TuplePatElemList@106..116 + LParen@106..107 "(" + LitPat@107..115 + Lit@107..115 + String@107..115 "\"STRING\"" + RParen@115..116 ")" RParen@116..117 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap index d4b0a3a7f5..01dd16e43a 100644 --- a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -32,47 +32,41 @@ Root@0..203 Ident@34..37 "Foo" TuplePatElemList@37..67 LParen@37..38 "(" - TuplePatElem@38..44 - PathPat@38..44 - Path@38..44 - PathSegment@38..39 - Ident@38..39 "X" - Colon2@39..41 "::" - PathSegment@41..44 - Ident@41..44 "Foo" + PathPat@38..44 + Path@38..44 + PathSegment@38..39 + Ident@38..39 "X" + Colon2@39..41 "::" + PathSegment@41..44 + Ident@41..44 "Foo" Comma@44..45 "," WhiteSpace@45..46 " " - TuplePatElem@46..58 - PathTuplePat@46..58 - Path@46..52 - PathSegment@46..47 - Ident@46..47 "Z" - Colon2@47..49 "::" - PathSegment@49..52 - Ident@49..52 "Bar" - TuplePatElemList@52..58 - LParen@52..53 "(" - TuplePatElem@53..54 - LitPat@53..54 - Lit@53..54 - Int@53..54 "1" - Comma@54..55 "," - WhiteSpace@55..56 " " - TuplePatElem@56..57 - LitPat@56..57 - Lit@56..57 - Int@56..57 "2" - RParen@57..58 ")" + PathTuplePat@46..58 + Path@46..52 + PathSegment@46..47 + Ident@46..47 "Z" + Colon2@47..49 "::" + PathSegment@49..52 + Ident@49..52 "Bar" + TuplePatElemList@52..58 + LParen@52..53 "(" + LitPat@53..54 + Lit@53..54 + Int@53..54 "1" + Comma@54..55 "," + WhiteSpace@55..56 " " + LitPat@56..57 + Lit@56..57 + Int@56..57 "2" + RParen@57..58 ")" Comma@58..59 "," WhiteSpace@59..61 " " - TuplePatElem@61..62 - WildCardPat@61..62 - Underscore@61..62 "_" + WildCardPat@61..62 + Underscore@61..62 "_" Comma@62..63 "," WhiteSpace@63..64 " " - TuplePatElem@64..66 - RestPat@64..66 - Dot2@64..66 ".." + RestPat@64..66 + Dot2@64..66 ".." RParen@66..67 ")" Newline@67..69 "\n\n" PathTuplePat@69..133 @@ -86,53 +80,47 @@ Root@0..203 LParen@81..82 "(" Newline@82..83 "\n" WhiteSpace@83..87 " " - TuplePatElem@87..93 - PathPat@87..93 - Path@87..93 - PathSegment@87..88 - Ident@87..88 "X" - Colon2@88..90 "::" - PathSegment@90..93 - Ident@90..93 "Foo" + PathPat@87..93 + Path@87..93 + PathSegment@87..88 + Ident@87..88 "X" + Colon2@88..90 "::" + PathSegment@90..93 + Ident@90..93 "Foo" Comma@93..94 "," WhiteSpace@94..95 " " Newline@95..96 "\n" WhiteSpace@96..100 " " - TuplePatElem@100..112 - PathTuplePat@100..112 - Path@100..106 - PathSegment@100..101 - Ident@100..101 "Z" - Colon2@101..103 "::" - PathSegment@103..106 - Ident@103..106 "Bar" - TuplePatElemList@106..112 - LParen@106..107 "(" - TuplePatElem@107..108 - LitPat@107..108 - Lit@107..108 - Int@107..108 "1" - Comma@108..109 "," - WhiteSpace@109..110 " " - TuplePatElem@110..111 - LitPat@110..111 - Lit@110..111 - Int@110..111 "2" - RParen@111..112 ")" + PathTuplePat@100..112 + Path@100..106 + PathSegment@100..101 + Ident@100..101 "Z" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Bar" + TuplePatElemList@106..112 + LParen@106..107 "(" + LitPat@107..108 + Lit@107..108 + Int@107..108 "1" + Comma@108..109 "," + WhiteSpace@109..110 " " + LitPat@110..111 + Lit@110..111 + Int@110..111 "2" + RParen@111..112 ")" Comma@112..113 "," WhiteSpace@113..114 " " Newline@114..115 "\n" WhiteSpace@115..120 " " - TuplePatElem@120..121 - WildCardPat@120..121 - Underscore@120..121 "_" + WildCardPat@120..121 + Underscore@120..121 "_" Comma@121..122 "," WhiteSpace@122..123 " " Newline@123..124 "\n" WhiteSpace@124..129 " " - TuplePatElem@129..131 - RestPat@129..131 - Dot2@129..131 ".." + RestPat@129..131 + Dot2@129..131 ".." Newline@131..132 "\n" RParen@132..133 ")" Newline@133..135 "\n\n" @@ -145,11 +133,10 @@ Root@0..203 Ident@143..147 "Bind" TuplePatElemList@147..150 LParen@147..148 "(" - TuplePatElem@148..149 - PathPat@148..149 - Path@148..149 - PathSegment@148..149 - Ident@148..149 "x" + PathPat@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" RParen@149..150 ")" Newline@150..152 "\n\n" PathTuplePat@152..203 @@ -161,42 +148,40 @@ Root@0..203 Ident@160..167 "OrTuple" TuplePatElemList@167..203 LParen@167..168 "(" - TuplePatElem@168..202 - OrPat@168..202 - PathPat@168..176 - Path@168..176 - PathSegment@168..171 - Ident@168..171 "Int" - Colon2@171..173 "::" - PathSegment@173..176 - Ident@173..176 "I32" - WhiteSpace@176..177 " " - Pipe@177..178 "|" - WhiteSpace@178..179 " " - OrPat@179..202 - PathPat@179..187 - Path@179..187 - PathSegment@179..182 - Ident@179..182 "Int" - Colon2@182..184 "::" - PathSegment@184..187 - Ident@184..187 "I64" - WhiteSpace@187..188 " " - Pipe@188..189 "|" - WhiteSpace@189..190 " " - PathTuplePat@190..202 - Path@190..198 - PathSegment@190..193 - Ident@190..193 "Int" - Colon2@193..195 "::" - PathSegment@195..198 - Ident@195..198 "Any" - TuplePatElemList@198..202 - LParen@198..199 "(" - TuplePatElem@199..201 - LitPat@199..201 - Lit@199..201 - Int@199..201 "10" - RParen@201..202 ")" + OrPat@168..202 + PathPat@168..176 + Path@168..176 + PathSegment@168..171 + Ident@168..171 "Int" + Colon2@171..173 "::" + PathSegment@173..176 + Ident@173..176 "I32" + WhiteSpace@176..177 " " + Pipe@177..178 "|" + WhiteSpace@178..179 " " + OrPat@179..202 + PathPat@179..187 + Path@179..187 + PathSegment@179..182 + Ident@179..182 "Int" + Colon2@182..184 "::" + PathSegment@184..187 + Ident@184..187 "I64" + WhiteSpace@187..188 " " + Pipe@188..189 "|" + WhiteSpace@189..190 " " + PathTuplePat@190..202 + Path@190..198 + PathSegment@190..193 + Ident@190..193 "Int" + Colon2@193..195 "::" + PathSegment@195..198 + Ident@195..198 "Any" + TuplePatElemList@198..202 + LParen@198..199 "(" + LitPat@199..201 + Lit@199..201 + Int@199..201 "10" + RParen@201..202 ")" RParen@202..203 ")" diff --git a/crates/parser2/test_files/syntax_node/pats/record.snap b/crates/parser2/test_files/syntax_node/pats/record.snap index 9f5fbfc887..cc833d76ce 100644 --- a/crates/parser2/test_files/syntax_node/pats/record.snap +++ b/crates/parser2/test_files/syntax_node/pats/record.snap @@ -92,17 +92,15 @@ Root@0..96 TuplePat@76..82 TuplePatElemList@76..82 LParen@76..77 "(" - TuplePatElem@77..78 - LitPat@77..78 - Lit@77..78 - Int@77..78 "1" + LitPat@77..78 + Lit@77..78 + Int@77..78 "1" Comma@78..79 "," WhiteSpace@79..80 " " - TuplePatElem@80..81 - PathPat@80..81 - Path@80..81 - PathSegment@80..81 - Ident@80..81 "a" + PathPat@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "a" RParen@81..82 ")" Comma@82..83 "," WhiteSpace@83..84 " " diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap index 3923e012ee..919c059efa 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -124,18 +124,16 @@ Root@0..231 Ident@89..92 "Foo" TuplePatElemList@92..98 LParen@92..93 "(" - TuplePatElem@93..94 - PathPat@93..94 - Path@93..94 - PathSegment@93..94 - Ident@93..94 "x" + PathPat@93..94 + Path@93..94 + PathSegment@93..94 + Ident@93..94 "x" Comma@94..95 "," WhiteSpace@95..96 " " - TuplePatElem@96..97 - PathPat@96..97 - Path@96..97 - PathSegment@96..97 - Ident@96..97 "y" + PathPat@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "y" RParen@97..98 ")" WhiteSpace@98..99 " " Eq@99..100 "=" @@ -250,11 +248,10 @@ Root@0..231 Ident@194..195 "A" TuplePatElemList@195..198 LParen@195..196 "(" - TuplePatElem@196..197 - PathPat@196..197 - Path@196..197 - PathSegment@196..197 - Ident@196..197 "x" + PathPat@196..197 + Path@196..197 + PathSegment@196..197 + Ident@196..197 "x" RParen@197..198 ")" WhiteSpace@198..199 " " Pipe@199..200 "|" @@ -268,11 +265,10 @@ Root@0..231 Ident@209..210 "B" TuplePatElemList@210..213 LParen@210..211 "(" - TuplePatElem@211..212 - PathPat@211..212 - Path@211..212 - PathSegment@211..212 - Ident@211..212 "x" + PathPat@211..212 + Path@211..212 + PathSegment@211..212 + Ident@211..212 "x" RParen@212..213 ")" WhiteSpace@213..214 " " FatArrow@214..216 "=>" From 80aa7ff35dffd8f79530cda5996e6b2b355bfede Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 21:20:36 +0100 Subject: [PATCH 062/678] Add missing assert message parsing --- crates/parser2/src/parser/expr_atom.rs | 3 +- crates/parser2/src/parser/stmt.rs | 8 +++- .../test_files/syntax_node/stmts/assert.fe | 2 + .../test_files/syntax_node/stmts/assert.snap | 39 +++++++++++++++++++ 4 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/stmts/assert.fe create mode 100644 crates/parser2/test_files/syntax_node/stmts/assert.snap diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index f747f81f8d..69044410c5 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -68,8 +68,7 @@ impl super::Parse for BlockExprScope { { break; } - let checkpoint = parse_attr_list(parser); - if !parse_stmt(parser, checkpoint) { + if !parse_stmt(parser, None) { continue; } diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index f60668f28a..6e52215a60 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -3,6 +3,7 @@ use crate::{parser::expr, SyntaxKind}; use super::{ define_scope, expr::{parse_expr, parse_expr_no_struct}, + expr_atom::BlockExprScope, pat::parse_pat, token_stream::TokenStream, type_::parse_type, @@ -69,7 +70,7 @@ impl super::Parse for ForStmtScope { parser.error_and_recover("expected block", None); return; } - parse_expr(parser); + parser.parse(BlockExprScope::default(), None); } } @@ -84,7 +85,7 @@ impl super::Parse for WhileStmtScope { parser.error_and_recover("expected block", None); return; } - parse_expr(parser); + parser.parse(BlockExprScope::default(), None); } } @@ -108,6 +109,9 @@ impl super::Parse for AssertStmtScope { parser.bump_expected(SyntaxKind::AssertKw); parser.set_newline_as_trivia(false); parse_expr(parser); + if parser.bump_if(SyntaxKind::Comma) { + parse_expr(parser); + } } } diff --git a/crates/parser2/test_files/syntax_node/stmts/assert.fe b/crates/parser2/test_files/syntax_node/stmts/assert.fe new file mode 100644 index 0000000000..2e4d7b8c44 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assert.fe @@ -0,0 +1,2 @@ +assert 0 < x +assert 0 < x, "`x` must be positive" \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/assert.snap b/crates/parser2/test_files/syntax_node/stmts/assert.snap new file mode 100644 index 0000000000..aeeb5e835a --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assert.snap @@ -0,0 +1,39 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/assert.fe +--- +Root@0..49 + AssertStmt@0..12 + AssertKw@0..6 "assert" + WhiteSpace@6..7 " " + BinExpr@7..12 + LitExpr@7..8 + Lit@7..8 + Int@7..8 "0" + WhiteSpace@8..9 " " + Lt@9..10 "<" + WhiteSpace@10..11 " " + Path@11..12 + PathSegment@11..12 + Ident@11..12 "x" + Newline@12..13 "\n" + AssertStmt@13..49 + AssertKw@13..19 "assert" + WhiteSpace@19..20 " " + BinExpr@20..25 + LitExpr@20..21 + Lit@20..21 + Int@20..21 "0" + WhiteSpace@21..22 " " + Lt@22..23 "<" + WhiteSpace@23..24 " " + Path@24..25 + PathSegment@24..25 + Ident@24..25 "x" + Comma@25..26 "," + WhiteSpace@26..27 " " + LitExpr@27..49 + Lit@27..49 + String@27..49 "\"`x` must be positive\"" + From ae02dbfa14e9bffbd01817081c7071c0d31a1b19 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 21:52:08 +0100 Subject: [PATCH 063/678] Add missing `PathExprScope` --- crates/parser2/src/parser/expr_atom.rs | 10 +- .../test_files/error_recovery/exprs/call.snap | 49 +++-- .../test_files/error_recovery/exprs/if_.snap | 35 +-- .../error_recovery/exprs/index.snap | 21 +- .../error_recovery/exprs/match_.snap | 21 +- .../error_recovery/exprs/method.snap | 39 ++-- .../test_files/error_recovery/stmts/for_.snap | 17 +- .../error_recovery/stmts/while_.snap | 7 +- .../test_files/syntax_node/exprs/binop.snap | 35 +-- .../test_files/syntax_node/exprs/call.snap | 69 +++--- .../test_files/syntax_node/exprs/if.snap | 77 ++++--- .../test_files/syntax_node/exprs/index.snap | 21 +- .../test_files/syntax_node/exprs/match.snap | 201 ++++++++++-------- .../test_files/syntax_node/exprs/method.snap | 56 ++--- .../syntax_node/exprs/struct_init.snap | 21 +- .../test_files/syntax_node/items/const.snap | 14 +- .../test_files/syntax_node/items/func.snap | 7 +- .../test_files/syntax_node/items/impl.snap | 35 +-- .../syntax_node/items/impl_trait.snap | 28 +-- .../test_files/syntax_node/items/trait.snap | 55 ++--- .../test_files/syntax_node/stmts/assert.snap | 14 +- .../test_files/syntax_node/stmts/assign.fe | 2 + .../test_files/syntax_node/stmts/assign.snap | 46 ++++ .../test_files/syntax_node/stmts/for.snap | 52 +++-- .../test_files/syntax_node/stmts/let.snap | 49 +++-- .../test_files/syntax_node/stmts/while.snap | 14 +- .../syntax_node/structs/generics.snap | 7 +- 27 files changed, 593 insertions(+), 409 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/stmts/assign.fe create mode 100644 crates/parser2/test_files/syntax_node/stmts/assign.snap diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 69044410c5..71f6346cff 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -6,7 +6,6 @@ use crate::{ }; use super::{ - attr::parse_attr_list, define_scope, expr::{parse_expr, parse_expr_no_struct}, parse_pat, @@ -28,7 +27,7 @@ pub(super) fn parse_expr_atom( Some(LBracket) => parser.parse(ArrayScope::default(), None), Some(kind) if lit::is_lit(kind) => parser.parse(LitExprScope::default(), None), Some(kind) if path::is_path_segment(kind) => { - let (success, checkpoint) = parser.parse(path::PathScope::default(), None); + let (success, checkpoint) = parser.parse(PathExprScope::default(), None); if success && parser.current_kind() == Some(LBrace) && allow_struct_init { let (success, _) = parser.parse(RecordInitExprScope::default(), Some(checkpoint)); (success, checkpoint) @@ -183,6 +182,13 @@ impl super::Parse for LitExprScope { } } +define_scope! { PathExprScope, PathExpr, Inheritance } +impl super::Parse for PathExprScope { + fn parse(&mut self, parser: &mut Parser) { + parser.parse(path::PathScope::default(), None); + } +} + define_scope! { RecordInitExprScope, RecordInitExpr, Inheritance } impl super::Parse for RecordInitExprScope { fn parse(&mut self, parser: &mut Parser) { diff --git a/crates/parser2/test_files/error_recovery/exprs/call.snap b/crates/parser2/test_files/error_recovery/exprs/call.snap index 192293477b..1573ecfd13 100644 --- a/crates/parser2/test_files/error_recovery/exprs/call.snap +++ b/crates/parser2/test_files/error_recovery/exprs/call.snap @@ -5,39 +5,44 @@ input_file: crates/parser2/test_files/error_recovery/exprs/call.fe --- Root@0..40 CallExpr@0..16 - Path@0..3 - PathSegment@0..3 - Ident@0..3 "foo" + PathExpr@0..3 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" CallArgList@3..16 LParen@3..4 "(" CallArg@4..5 - Path@4..5 - PathSegment@4..5 - Ident@4..5 "x" + PathExpr@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "x" Comma@5..6 "," WhiteSpace@6..7 " " CallArg@7..10 - Path@7..8 - PathSegment@7..8 - Ident@7..8 "y" + PathExpr@7..8 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "y" WhiteSpace@8..9 " " Error@9..10 Ident@9..10 "a" Comma@10..11 "," WhiteSpace@11..12 " " CallArg@12..15 - Path@12..13 - PathSegment@12..13 - Ident@12..13 "z" + PathExpr@12..13 + Path@12..13 + PathSegment@12..13 + Ident@12..13 "z" WhiteSpace@13..14 " " Error@14..15 SemiColon@14..15 ";" RParen@15..16 ")" Newline@16..18 "\n\n" CallExpr@18..39 - Path@18..21 - PathSegment@18..21 - Ident@18..21 "foo" + PathExpr@18..21 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "foo" GenericArgList@21..33 Lt@21..22 "<" TypeGenericArg@22..25 @@ -66,15 +71,17 @@ Root@0..40 CallArgList@33..39 LParen@33..34 "(" CallArg@34..35 - Path@34..35 - PathSegment@34..35 - Ident@34..35 "x" + PathExpr@34..35 + Path@34..35 + PathSegment@34..35 + Ident@34..35 "x" Comma@35..36 "," WhiteSpace@36..37 " " CallArg@37..38 - Path@37..38 - PathSegment@37..38 - Ident@37..38 "y" + PathExpr@37..38 + Path@37..38 + PathSegment@37..38 + Ident@37..38 "y" RParen@38..39 ")" Newline@39..40 "\n" diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.snap b/crates/parser2/test_files/error_recovery/exprs/if_.snap index d8fe748040..116fcd04b5 100644 --- a/crates/parser2/test_files/error_recovery/exprs/if_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/if_.snap @@ -7,9 +7,10 @@ Root@0..101 IfExpr@0..10 IfKw@0..2 "if" WhiteSpace@2..3 " " - Path@3..4 - PathSegment@3..4 - Ident@3..4 "x" + PathExpr@3..4 + Path@3..4 + PathSegment@3..4 + Ident@3..4 "x" WhiteSpace@4..5 " " Error@5..6 Ident@5..6 "y" @@ -22,9 +23,10 @@ Root@0..101 IfExpr@12..31 IfKw@12..14 "if" WhiteSpace@14..15 " " - Path@15..16 - PathSegment@15..16 - Ident@15..16 "x" + PathExpr@15..16 + Path@15..16 + PathSegment@15..16 + Ident@15..16 "x" WhiteSpace@16..17 " " BlockExpr@17..21 LBrace@17..18 "{" @@ -43,9 +45,10 @@ Root@0..101 IfExpr@33..66 IfKw@33..35 "if" WhiteSpace@35..36 " " - Path@36..37 - PathSegment@36..37 - Ident@36..37 "x" + PathExpr@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "x" WhiteSpace@37..38 " " BlockExpr@38..41 LBrace@38..39 "{" @@ -60,9 +63,10 @@ Root@0..101 IfExpr@49..66 IfKw@49..51 "if" WhiteSpace@51..52 " " - Path@52..53 - PathSegment@52..53 - Ident@52..53 "x" + PathExpr@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "x" WhiteSpace@53..54 " " BlockExpr@54..57 LBrace@54..55 "{" @@ -79,9 +83,10 @@ Root@0..101 IfExpr@68..100 IfKw@68..70 "if" WhiteSpace@70..71 " " - Path@71..72 - PathSegment@71..72 - Ident@71..72 "x" + PathExpr@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "x" WhiteSpace@72..73 " " BlockExpr@73..100 LBrace@73..74 "{" diff --git a/crates/parser2/test_files/error_recovery/exprs/index.snap b/crates/parser2/test_files/error_recovery/exprs/index.snap index 8d2c53794e..cd5a52c44d 100644 --- a/crates/parser2/test_files/error_recovery/exprs/index.snap +++ b/crates/parser2/test_files/error_recovery/exprs/index.snap @@ -5,9 +5,10 @@ input_file: crates/parser2/test_files/error_recovery/exprs/index.fe --- Root@0..20 IndexExpr@0..6 - Path@0..1 - PathSegment@0..1 - Ident@0..1 "x" + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" LBracket@1..2 "[" LitExpr@2..3 Lit@2..3 @@ -18,9 +19,10 @@ Root@0..20 RBracket@5..6 "]" Newline@6..7 "\n" IndexExpr@7..14 - Path@7..8 - PathSegment@7..8 - Ident@7..8 "x" + PathExpr@7..8 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "x" LBracket@8..9 "[" BinExpr@9..14 LitExpr@9..10 @@ -36,9 +38,10 @@ Root@0..20 Error@14..14 Newline@14..15 "\n" IndexExpr@15..20 - Path@15..16 - PathSegment@15..16 - Ident@15..16 "x" + PathExpr@15..16 + Path@15..16 + PathSegment@15..16 + Ident@15..16 "x" LBracket@16..17 "[" LitExpr@17..19 Lit@17..19 diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap index c6e1e6a428..39f6b55436 100644 --- a/crates/parser2/test_files/error_recovery/exprs/match_.snap +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -7,9 +7,10 @@ Root@0..94 MatchExpr@0..40 MatchKw@0..5 "match" WhiteSpace@5..6 " " - Path@6..7 - PathSegment@6..7 - Ident@6..7 "X" + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "X" WhiteSpace@7..9 " " Error@9..11 FatArrow@9..11 "=>" @@ -50,9 +51,10 @@ Root@0..94 MatchExpr@43..93 MatchKw@43..48 "match" WhiteSpace@48..49 " " - Path@49..50 - PathSegment@49..50 - Ident@49..50 "X" + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "X" WhiteSpace@50..52 " " MatchArmList@52..93 LBrace@52..53 "{" @@ -102,9 +104,10 @@ Root@0..94 WhiteSpace@86..87 " " FatArrow@87..89 "=>" WhiteSpace@89..90 " " - Path@90..91 - PathSegment@90..91 - Ident@90..91 "x" + PathExpr@90..91 + Path@90..91 + PathSegment@90..91 + Ident@90..91 "x" Newline@91..92 "\n" RBrace@92..93 "}" WhiteSpace@93..94 " " diff --git a/crates/parser2/test_files/error_recovery/exprs/method.snap b/crates/parser2/test_files/error_recovery/exprs/method.snap index c09bdc33d3..98ad9e7542 100644 --- a/crates/parser2/test_files/error_recovery/exprs/method.snap +++ b/crates/parser2/test_files/error_recovery/exprs/method.snap @@ -5,12 +5,13 @@ input_file: crates/parser2/test_files/error_recovery/exprs/method.fe --- Root@0..78 MethodCallExpr@0..31 - Path@0..8 - PathSegment@0..3 - Ident@0..3 "foo" - Colon2@3..5 "::" - PathSegment@5..8 - Ident@5..8 "bar" + PathExpr@0..8 + Path@0..8 + PathSegment@0..3 + Ident@0..3 "foo" + Colon2@3..5 "::" + PathSegment@5..8 + Ident@5..8 "bar" Dot@8..9 "." Ident@9..12 "baz" GenericArgList@12..25 @@ -52,12 +53,13 @@ Root@0..78 RParen@30..31 ")" Newline@31..33 "\n\n" MethodCallExpr@33..52 - Path@33..41 - PathSegment@33..36 - Ident@33..36 "foo" - Colon2@36..38 "::" - PathSegment@38..41 - Ident@38..41 "bar" + PathExpr@33..41 + Path@33..41 + PathSegment@33..36 + Ident@33..36 "foo" + Colon2@36..38 "::" + PathSegment@38..41 + Ident@38..41 "bar" Dot@41..42 "." Ident@42..43 "x" CallArgList@43..52 @@ -81,12 +83,13 @@ Root@0..78 RParen@51..52 ")" Newline@52..54 "\n\n" MethodCallExpr@54..78 - Path@54..62 - PathSegment@54..57 - Ident@54..57 "foo" - Colon2@57..59 "::" - PathSegment@59..62 - Ident@59..62 "bar" + PathExpr@54..62 + Path@54..62 + PathSegment@54..57 + Ident@54..57 "foo" + Colon2@57..59 "::" + PathSegment@59..62 + Ident@59..62 "bar" Dot@62..63 "." Ident@63..66 "baz" GenericArgList@66..76 diff --git a/crates/parser2/test_files/error_recovery/stmts/for_.snap b/crates/parser2/test_files/error_recovery/stmts/for_.snap index 739df48836..1d0c95cb47 100644 --- a/crates/parser2/test_files/error_recovery/stmts/for_.snap +++ b/crates/parser2/test_files/error_recovery/stmts/for_.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/error_recovery.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/error_recovery/stmts/for_.fe --- Root@0..67 ForStmt@0..29 @@ -42,9 +43,10 @@ Root@0..67 WhiteSpace@36..37 " " InKw@37..39 "in" WhiteSpace@39..40 " " - Path@40..43 - PathSegment@40..43 - Ident@40..43 "arr" + PathExpr@40..43 + Path@40..43 + PathSegment@40..43 + Ident@40..43 "arr" WhiteSpace@43..44 " " BlockExpr@44..46 LBrace@44..45 "{" @@ -61,9 +63,10 @@ Root@0..67 WhiteSpace@53..54 " " InKw@54..56 "in" WhiteSpace@56..57 " " - Path@57..60 - PathSegment@57..60 - Ident@57..60 "arr" + PathExpr@57..60 + Path@57..60 + PathSegment@57..60 + Ident@57..60 "arr" WhiteSpace@60..61 " " Error@61..64 Ident@61..62 "x" diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.snap b/crates/parser2/test_files/error_recovery/stmts/while_.snap index 8221a2bdf9..262ba63de1 100644 --- a/crates/parser2/test_files/error_recovery/stmts/while_.snap +++ b/crates/parser2/test_files/error_recovery/stmts/while_.snap @@ -27,9 +27,10 @@ Root@0..56 WhiteSpace@25..29 " " ExprStmt@29..34 BinExpr@29..34 - Path@29..30 - PathSegment@29..30 - Ident@29..30 "x" + PathExpr@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "x" WhiteSpace@30..31 " " Plus@31..32 "+" WhiteSpace@32..33 " " diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap index 9b970f7ebd..4bdb51021c 100644 --- a/crates/parser2/test_files/syntax_node/exprs/binop.snap +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -80,15 +80,17 @@ Root@0..164 Lt@40..41 "<" WhiteSpace@41..42 " " CallExpr@42..48 - Path@42..43 - PathSegment@42..43 - Ident@42..43 "a" + PathExpr@42..43 + Path@42..43 + PathSegment@42..43 + Ident@42..43 "a" CallArgList@43..48 LParen@43..44 "(" CallArg@44..47 - Path@44..47 - PathSegment@44..47 - Ident@44..47 "foo" + PathExpr@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "foo" RParen@47..48 ")" Newline@48..49 "\n" BinExpr@49..55 @@ -185,9 +187,10 @@ Root@0..164 Int@117..118 "2" Newline@118..119 "\n" BinExpr@119..130 - Path@119..120 - PathSegment@119..120 - Ident@119..120 "a" + PathExpr@119..120 + Path@119..120 + PathSegment@119..120 + Ident@119..120 "a" WhiteSpace@120..121 " " Star2@121..123 "**" WhiteSpace@123..124 " " @@ -244,9 +247,10 @@ Root@0..164 Newline@152..153 "\n" FieldExpr@153..158 FieldExpr@153..156 - Path@153..154 - PathSegment@153..154 - Ident@153..154 "a" + PathExpr@153..154 + Path@153..154 + PathSegment@153..154 + Ident@153..154 "a" Dot@154..155 "." Ident@155..156 "b" Dot@156..157 "." @@ -254,9 +258,10 @@ Root@0..164 Newline@158..159 "\n" FieldExpr@159..164 FieldExpr@159..162 - Path@159..160 - PathSegment@159..160 - Ident@159..160 "a" + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "a" Dot@160..161 "." Int@161..162 "0" Dot@162..163 "." diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap index 182591fe7b..039785a82e 100644 --- a/crates/parser2/test_files/syntax_node/exprs/call.snap +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -5,28 +5,31 @@ input_file: crates/parser2/test_files/syntax_node/exprs/call.fe --- Root@0..270 CallExpr@0..5 - Path@0..3 - PathSegment@0..3 - Ident@0..3 "foo" + PathExpr@0..3 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" CallArgList@3..5 LParen@3..4 "(" RParen@4..5 ")" Newline@5..6 "\n" CallExpr@6..16 - Path@6..14 - PathSegment@6..9 - Ident@6..9 "foo" - Colon2@9..11 "::" - PathSegment@11..14 - Ident@11..14 "Bar" + PathExpr@6..14 + Path@6..14 + PathSegment@6..9 + Ident@6..9 "foo" + Colon2@9..11 "::" + PathSegment@11..14 + Ident@11..14 "Bar" CallArgList@14..16 LParen@14..15 "(" RParen@15..16 ")" Newline@16..17 "\n" CallExpr@17..32 - Path@17..20 - PathSegment@17..20 - Ident@17..20 "foo" + PathExpr@17..20 + Path@17..20 + PathSegment@17..20 + Ident@17..20 "foo" CallArgList@20..32 LParen@20..21 "(" CallArg@21..25 @@ -48,9 +51,10 @@ Root@0..270 RParen@31..32 ")" Newline@32..33 "\n" CallExpr@33..48 - Path@33..36 - PathSegment@33..36 - Ident@33..36 "foo" + PathExpr@33..36 + Path@33..36 + PathSegment@33..36 + Ident@33..36 "foo" CallArgList@36..48 LParen@36..37 "(" CallArg@37..41 @@ -72,9 +76,10 @@ Root@0..270 RParen@47..48 ")" Newline@48..49 "\n" CallExpr@49..67 - Path@49..52 - PathSegment@49..52 - Ident@49..52 "foo" + PathExpr@49..52 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "foo" CallArgList@52..67 LParen@52..53 "(" CallArg@53..57 @@ -102,9 +107,10 @@ Root@0..270 RParen@66..67 ")" Newline@67..68 "\n" CallExpr@68..86 - Path@68..71 - PathSegment@68..71 - Ident@68..71 "foo" + PathExpr@68..71 + Path@68..71 + PathSegment@68..71 + Ident@68..71 "foo" CallArgList@71..86 LParen@71..72 "(" CallArg@72..73 @@ -132,9 +138,10 @@ Root@0..270 RParen@85..86 ")" Newline@86..88 "\n\n" CallExpr@88..134 - Path@88..91 - PathSegment@88..91 - Ident@88..91 "foo" + PathExpr@88..91 + Path@88..91 + PathSegment@88..91 + Ident@88..91 "foo" GenericArgList@91..109 Lt@91..92 "<" TypeGenericArg@92..95 @@ -174,9 +181,10 @@ Root@0..270 RParen@133..134 ")" Newline@134..135 "\n" CallExpr@135..169 - Path@135..138 - PathSegment@135..138 - Ident@135..138 "foo" + PathExpr@135..138 + Path@135..138 + PathSegment@135..138 + Ident@135..138 "foo" GenericArgList@138..157 Lt@138..139 "<" TypeGenericArg@139..147 @@ -235,9 +243,10 @@ Root@0..270 ParenExpr@246..270 LParen@246..247 "(" CallExpr@247..269 - Path@247..250 - PathSegment@247..250 - Ident@247..250 "foo" + PathExpr@247..250 + Path@247..250 + PathSegment@247..250 + Ident@247..250 "foo" WhiteSpace@250..251 " " GenericArgList@251..265 Lt@251..252 "<" diff --git a/crates/parser2/test_files/syntax_node/exprs/if.snap b/crates/parser2/test_files/syntax_node/exprs/if.snap index ee28efb4a2..4fe25e692b 100644 --- a/crates/parser2/test_files/syntax_node/exprs/if.snap +++ b/crates/parser2/test_files/syntax_node/exprs/if.snap @@ -7,9 +7,10 @@ Root@0..279 IfExpr@0..15 IfKw@0..2 "if" WhiteSpace@2..3 " " - Path@3..4 - PathSegment@3..4 - Ident@3..4 "b" + PathExpr@3..4 + Path@3..4 + PathSegment@3..4 + Ident@3..4 "b" WhiteSpace@4..5 " " BlockExpr@5..7 LBrace@5..6 "{" @@ -24,9 +25,10 @@ Root@0..279 IfExpr@17..53 IfKw@17..19 "if" WhiteSpace@19..20 " " - Path@20..21 - PathSegment@20..21 - Ident@20..21 "b" + PathExpr@20..21 + Path@20..21 + PathSegment@20..21 + Ident@20..21 "b" WhiteSpace@21..22 " " BlockExpr@22..24 LBrace@22..23 "{" @@ -54,18 +56,20 @@ Root@0..279 Newline@45..46 "\n" WhiteSpace@46..50 " " ExprStmt@50..51 - Path@50..51 - PathSegment@50..51 - Ident@50..51 "x" + PathExpr@50..51 + Path@50..51 + PathSegment@50..51 + Ident@50..51 "x" Newline@51..52 "\n" RBrace@52..53 "}" Newline@53..55 "\n\n" IfExpr@55..91 IfKw@55..57 "if" WhiteSpace@57..58 " " - Path@58..59 - PathSegment@58..59 - Ident@58..59 "b" + PathExpr@58..59 + Path@58..59 + PathSegment@58..59 + Ident@58..59 "b" WhiteSpace@59..60 " " BlockExpr@60..83 LBrace@60..61 "{" @@ -87,9 +91,10 @@ Root@0..279 Newline@75..76 "\n" WhiteSpace@76..80 " " ExprStmt@80..81 - Path@80..81 - PathSegment@80..81 - Ident@80..81 "x" + PathExpr@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "x" Newline@81..82 "\n" RBrace@82..83 "}" WhiteSpace@83..84 " " @@ -102,9 +107,10 @@ Root@0..279 IfExpr@93..121 IfKw@93..95 "if" WhiteSpace@95..96 " " - Path@96..97 - PathSegment@96..97 - Ident@96..97 "b" + PathExpr@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "b" WhiteSpace@97..98 " " BlockExpr@98..121 LBrace@98..99 "{" @@ -126,18 +132,20 @@ Root@0..279 Newline@113..114 "\n" WhiteSpace@114..118 " " ExprStmt@118..119 - Path@118..119 - PathSegment@118..119 - Ident@118..119 "x" + PathExpr@118..119 + Path@118..119 + PathSegment@118..119 + Ident@118..119 "x" Newline@119..120 "\n" RBrace@120..121 "}" Newline@121..123 "\n\n" IfExpr@123..180 IfKw@123..125 "if" WhiteSpace@125..126 " " - Path@126..127 - PathSegment@126..127 - Ident@126..127 "b" + PathExpr@126..127 + Path@126..127 + PathSegment@126..127 + Ident@126..127 "b" WhiteSpace@127..128 " " BlockExpr@128..151 LBrace@128..129 "{" @@ -159,9 +167,10 @@ Root@0..279 Newline@143..144 "\n" WhiteSpace@144..148 " " ExprStmt@148..149 - Path@148..149 - PathSegment@148..149 - Ident@148..149 "x" + PathExpr@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" Newline@149..150 "\n" RBrace@150..151 "}" WhiteSpace@151..152 " " @@ -187,9 +196,10 @@ Root@0..279 Newline@172..173 "\n" WhiteSpace@173..177 " " ExprStmt@177..178 - Path@177..178 - PathSegment@177..178 - Ident@177..178 "y" + PathExpr@177..178 + Path@177..178 + PathSegment@177..178 + Ident@177..178 "y" Newline@178..179 "\n" RBrace@179..180 "}" Newline@180..182 "\n\n" @@ -199,9 +209,10 @@ Root@0..279 MatchExpr@185..248 MatchKw@185..190 "match" WhiteSpace@190..191 " " - Path@191..192 - PathSegment@191..192 - Ident@191..192 "x" + PathExpr@191..192 + Path@191..192 + PathSegment@191..192 + Ident@191..192 "x" WhiteSpace@192..193 " " MatchArmList@193..248 LBrace@193..194 "{" diff --git a/crates/parser2/test_files/syntax_node/exprs/index.snap b/crates/parser2/test_files/syntax_node/exprs/index.snap index 32eb3ccaa3..94a7173502 100644 --- a/crates/parser2/test_files/syntax_node/exprs/index.snap +++ b/crates/parser2/test_files/syntax_node/exprs/index.snap @@ -5,9 +5,10 @@ input_file: crates/parser2/test_files/syntax_node/exprs/index.fe --- Root@0..23 IndexExpr@0..8 - Path@0..1 - PathSegment@0..1 - Ident@0..1 "x" + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" LBracket@1..2 "[" BinExpr@2..7 LitExpr@2..3 @@ -22,14 +23,16 @@ Root@0..23 RBracket@7..8 "]" Newline@8..9 "\n" IndexExpr@9..23 - Path@9..10 - PathSegment@9..10 - Ident@9..10 "x" + PathExpr@9..10 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "x" LBracket@10..11 "[" MethodCallExpr@11..22 - Path@11..14 - PathSegment@11..14 - Ident@11..14 "foo" + PathExpr@11..14 + Path@11..14 + PathSegment@11..14 + Ident@11..14 "foo" Dot@14..15 "." Ident@15..16 "y" CallArgList@16..22 diff --git a/crates/parser2/test_files/syntax_node/exprs/match.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap index 477f381db4..206420bc23 100644 --- a/crates/parser2/test_files/syntax_node/exprs/match.snap +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -7,9 +7,10 @@ Root@0..516 MatchExpr@0..10 MatchKw@0..5 "match" WhiteSpace@5..6 " " - Path@6..7 - PathSegment@6..7 - Ident@6..7 "e" + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "e" WhiteSpace@7..8 " " MatchArmList@8..10 LBrace@8..9 "{" @@ -18,9 +19,10 @@ Root@0..516 MatchExpr@12..81 MatchKw@12..17 "match" WhiteSpace@17..18 " " - Path@18..19 - PathSegment@18..19 - Ident@18..19 "e" + PathExpr@18..19 + Path@18..19 + PathSegment@18..19 + Ident@18..19 "e" WhiteSpace@19..20 " " MatchArmList@20..81 LBrace@20..21 "{" @@ -51,15 +53,17 @@ Root@0..516 FatArrow@42..44 "=>" WhiteSpace@44..45 " " BinExpr@45..50 - Path@45..46 - PathSegment@45..46 - Ident@45..46 "x" + PathExpr@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "x" WhiteSpace@46..47 " " Plus@47..48 "+" WhiteSpace@48..49 " " - Path@49..50 - PathSegment@49..50 - Ident@49..50 "y" + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "y" Newline@50..51 "\n" WhiteSpace@51..55 " " MatchArm@55..79 @@ -87,15 +91,17 @@ Root@0..516 FatArrow@71..73 "=>" WhiteSpace@73..74 " " BinExpr@74..79 - Path@74..75 - PathSegment@74..75 - Ident@74..75 "x" + PathExpr@74..75 + Path@74..75 + PathSegment@74..75 + Ident@74..75 "x" WhiteSpace@75..76 " " Minus@76..77 "-" WhiteSpace@77..78 " " - Path@78..79 - PathSegment@78..79 - Ident@78..79 "y" + PathExpr@78..79 + Path@78..79 + PathSegment@78..79 + Ident@78..79 "y" Newline@79..80 "\n" RBrace@80..81 "}" Newline@81..83 "\n\n" @@ -105,9 +111,10 @@ Root@0..516 ParenExpr@89..105 LParen@89..90 "(" RecordInitExpr@90..104 - Path@90..91 - PathSegment@90..91 - Ident@90..91 "S" + PathExpr@90..91 + Path@90..91 + PathSegment@90..91 + Ident@90..91 "S" WhiteSpace@91..92 " " RecordFieldList@92..104 LBrace@92..93 "{" @@ -149,9 +156,10 @@ Root@0..516 MatchExpr@122..191 MatchKw@122..127 "match" WhiteSpace@127..128 " " - Path@128..129 - PathSegment@128..129 - Ident@128..129 "e" + PathExpr@128..129 + Path@128..129 + PathSegment@128..129 + Ident@128..129 "e" WhiteSpace@129..130 " " MatchArmList@130..191 LBrace@130..131 "{" @@ -182,15 +190,17 @@ Root@0..516 FatArrow@152..154 "=>" WhiteSpace@154..155 " " BinExpr@155..160 - Path@155..156 - PathSegment@155..156 - Ident@155..156 "x" + PathExpr@155..156 + Path@155..156 + PathSegment@155..156 + Ident@155..156 "x" WhiteSpace@156..157 " " Plus@157..158 "+" WhiteSpace@158..159 " " - Path@159..160 - PathSegment@159..160 - Ident@159..160 "y" + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "y" Newline@160..161 "\n" WhiteSpace@161..165 " " MatchArm@165..189 @@ -218,24 +228,27 @@ Root@0..516 FatArrow@181..183 "=>" WhiteSpace@183..184 " " BinExpr@184..189 - Path@184..185 - PathSegment@184..185 - Ident@184..185 "x" + PathExpr@184..185 + Path@184..185 + PathSegment@184..185 + Ident@184..185 "x" WhiteSpace@185..186 " " Minus@186..187 "-" WhiteSpace@187..188 " " - Path@188..189 - PathSegment@188..189 - Ident@188..189 "y" + PathExpr@188..189 + Path@188..189 + PathSegment@188..189 + Ident@188..189 "y" Newline@189..190 "\n" RBrace@190..191 "}" Newline@191..193 "\n\n" MatchExpr@193..313 MatchKw@193..198 "match" WhiteSpace@198..199 " " - Path@199..200 - PathSegment@199..200 - Ident@199..200 "e" + PathExpr@199..200 + Path@199..200 + PathSegment@199..200 + Ident@199..200 "e" WhiteSpace@200..201 " " MatchArmList@201..313 LBrace@201..202 "{" @@ -272,15 +285,17 @@ Root@0..516 WhiteSpace@229..237 " " ExprStmt@237..242 BinExpr@237..242 - Path@237..238 - PathSegment@237..238 - Ident@237..238 "x" + PathExpr@237..238 + Path@237..238 + PathSegment@237..238 + Ident@237..238 "x" WhiteSpace@238..239 " " Plus@239..240 "+" WhiteSpace@240..241 " " - Path@241..242 - PathSegment@241..242 - Ident@241..242 "y" + PathExpr@241..242 + Path@241..242 + PathSegment@241..242 + Ident@241..242 "y" WhiteSpace@242..243 " " Newline@243..244 "\n" WhiteSpace@244..248 " " @@ -312,15 +327,17 @@ Root@0..516 FatArrow@270..272 "=>" WhiteSpace@272..273 " " BinExpr@273..278 - Path@273..274 - PathSegment@273..274 - Ident@273..274 "x" + PathExpr@273..274 + Path@273..274 + PathSegment@273..274 + Ident@273..274 "x" WhiteSpace@274..275 " " Minus@275..276 "-" WhiteSpace@276..277 " " - Path@277..278 - PathSegment@277..278 - Ident@277..278 "y" + PathExpr@277..278 + Path@277..278 + PathSegment@277..278 + Ident@277..278 "y" Newline@278..279 "\n" WhiteSpace@279..283 " " MatchArm@283..311 @@ -352,15 +369,17 @@ Root@0..516 WhiteSpace@303..304 " " ExprStmt@304..309 BinExpr@304..309 - Path@304..305 - PathSegment@304..305 - Ident@304..305 "x" + PathExpr@304..305 + Path@304..305 + PathSegment@304..305 + Ident@304..305 "x" WhiteSpace@305..306 " " Star@306..307 "*" WhiteSpace@307..308 " " - Path@308..309 - PathSegment@308..309 - Ident@308..309 "y" + PathExpr@308..309 + Path@308..309 + PathSegment@308..309 + Ident@308..309 "y" WhiteSpace@309..310 " " RBrace@310..311 "}" Newline@311..312 "\n" @@ -369,9 +388,10 @@ Root@0..516 MatchExpr@315..344 MatchKw@315..320 "match" WhiteSpace@320..321 " " - Path@321..322 - PathSegment@321..322 - Ident@321..322 "e" + PathExpr@321..322 + Path@321..322 + PathSegment@321..322 + Ident@321..322 "e" WhiteSpace@322..323 " " MatchArmList@323..344 LBrace@323..324 "{" @@ -394,9 +414,10 @@ Root@0..516 WhiteSpace@337..338 " " FatArrow@338..340 "=>" WhiteSpace@340..341 " " - Path@341..342 - PathSegment@341..342 - Ident@341..342 "s" + PathExpr@341..342 + Path@341..342 + PathSegment@341..342 + Ident@341..342 "s" WhiteSpace@342..343 " " RBrace@343..344 "}" Newline@344..346 "\n\n" @@ -424,18 +445,20 @@ Root@0..516 WhiteSpace@372..380 " " ExprStmt@380..392 CallExpr@380..392 - Path@380..389 - PathSegment@380..384 - Ident@380..384 "Enum" - Colon2@384..386 "::" - PathSegment@386..389 - Ident@386..389 "Var" + PathExpr@380..389 + Path@380..389 + PathSegment@380..384 + Ident@380..384 "Enum" + Colon2@384..386 "::" + PathSegment@386..389 + Ident@386..389 "Var" CallArgList@389..392 LParen@389..390 "(" CallArg@390..391 - Path@390..391 - PathSegment@390..391 - Ident@390..391 "x" + PathExpr@390..391 + Path@390..391 + PathSegment@390..391 + Ident@390..391 "x" RParen@391..392 ")" Newline@392..393 "\n" WhiteSpace@393..399 " " @@ -465,9 +488,10 @@ Root@0..516 WhiteSpace@421..422 " " FatArrow@422..424 "=>" WhiteSpace@424..425 " " - Path@425..426 - PathSegment@425..426 - Ident@425..426 "s" + PathExpr@425..426 + Path@425..426 + PathSegment@425..426 + Ident@425..426 "s" WhiteSpace@426..427 " " Newline@427..428 "\n" RBrace@428..429 "}" @@ -478,9 +502,10 @@ Root@0..516 ParenExpr@437..463 LParen@437..438 "(" RecordInitExpr@438..462 - Path@438..439 - PathSegment@438..439 - Ident@438..439 "S" + PathExpr@438..439 + Path@438..439 + PathSegment@438..439 + Ident@438..439 "S" WhiteSpace@439..440 " " RecordFieldList@440..462 LBrace@440..441 "{" @@ -489,18 +514,20 @@ Root@0..516 Colon@442..443 ":" WhiteSpace@443..444 " " CallExpr@444..455 - Path@444..452 - PathSegment@444..447 - Ident@444..447 "Foo" - Colon2@447..449 "::" - PathSegment@449..452 - Ident@449..452 "Bar" + PathExpr@444..452 + Path@444..452 + PathSegment@444..447 + Ident@444..447 "Foo" + Colon2@447..449 "::" + PathSegment@449..452 + Ident@449..452 "Bar" CallArgList@452..455 LParen@452..453 "(" CallArg@453..454 - Path@453..454 - PathSegment@453..454 - Ident@453..454 "x" + PathExpr@453..454 + Path@453..454 + PathSegment@453..454 + Ident@453..454 "x" RParen@454..455 ")" Comma@455..456 "," WhiteSpace@456..457 " " diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap index c81ead0de1..62132d71e5 100644 --- a/crates/parser2/test_files/syntax_node/exprs/method.snap +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -5,9 +5,10 @@ input_file: crates/parser2/test_files/syntax_node/exprs/method.fe --- Root@0..75 MethodCallExpr@0..5 - Path@0..1 - PathSegment@0..1 - Ident@0..1 "x" + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" Dot@1..2 "." Ident@2..3 "y" CallArgList@3..5 @@ -15,9 +16,10 @@ Root@0..75 RParen@4..5 ")" Newline@5..6 "\n" MethodCallExpr@6..15 - Path@6..7 - PathSegment@6..7 - Ident@6..7 "x" + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "x" Dot@7..8 "." Ident@8..9 "y" CallArgList@9..15 @@ -36,9 +38,10 @@ Root@0..75 Newline@15..17 "\n\n" MethodCallExpr@17..34 FieldExpr@17..20 - Path@17..18 - PathSegment@17..18 - Ident@17..18 "x" + PathExpr@17..18 + Path@17..18 + PathSegment@17..18 + Ident@17..18 "x" Dot@18..19 "." Ident@19..20 "y" Dot@20..21 "." @@ -65,9 +68,10 @@ Root@0..75 Newline@34..35 "\n" MethodCallExpr@35..47 IndexExpr@35..39 - Path@35..36 - PathSegment@35..36 - Ident@35..36 "x" + PathExpr@35..36 + Path@35..36 + PathSegment@35..36 + Ident@35..36 "x" LBracket@36..37 "[" LitExpr@37..38 Lit@37..38 @@ -87,9 +91,10 @@ Root@0..75 RParen@46..47 ")" Newline@47..49 "\n\n" MethodCallExpr@49..75 - Path@49..50 - PathSegment@49..50 - Ident@49..50 "x" + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "x" Dot@50..51 "." Ident@51..52 "y" GenericArgList@52..66 @@ -106,15 +111,17 @@ Root@0..75 LBrace@58..59 "{" ExprStmt@59..64 BinExpr@59..64 - Path@59..60 - PathSegment@59..60 - Ident@59..60 "x" + PathExpr@59..60 + Path@59..60 + PathSegment@59..60 + Ident@59..60 "x" WhiteSpace@60..61 " " Plus@61..62 "+" WhiteSpace@62..63 " " - Path@63..64 - PathSegment@63..64 - Ident@63..64 "y" + PathExpr@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "y" RBrace@64..65 "}" Gt@65..66 ">" CallArgList@66..75 @@ -129,8 +136,9 @@ Root@0..75 Comma@71..72 "," WhiteSpace@72..73 " " CallArg@73..74 - Path@73..74 - PathSegment@73..74 - Ident@73..74 "y" + PathExpr@73..74 + Path@73..74 + PathSegment@73..74 + Ident@73..74 "y" RParen@74..75 ")" diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap index 09a3f1b5d3..e22d6fd9f5 100644 --- a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -5,9 +5,10 @@ input_file: crates/parser2/test_files/syntax_node/exprs/struct_init.fe --- Root@0..40 RecordInitExpr@0..13 - Path@0..6 - PathSegment@0..6 - Ident@0..6 "Struct" + PathExpr@0..6 + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Struct" WhiteSpace@6..7 " " RecordFieldList@7..13 LBrace@7..8 "{" @@ -20,9 +21,10 @@ Root@0..40 RBrace@12..13 "}" Newline@13..14 "\n" RecordInitExpr@14..31 - Path@14..20 - PathSegment@14..20 - Ident@14..20 "Struct" + PathExpr@14..20 + Path@14..20 + PathSegment@14..20 + Ident@14..20 "Struct" WhiteSpace@20..21 " " RecordFieldList@21..31 LBrace@21..22 "{" @@ -43,9 +45,10 @@ Root@0..40 RBrace@30..31 "}" Newline@31..32 "\n" RecordInitExpr@32..40 - Path@32..37 - PathSegment@32..37 - Ident@32..37 "Empty" + PathExpr@32..37 + Path@32..37 + PathSegment@32..37 + Ident@32..37 "Empty" WhiteSpace@37..38 " " RecordFieldList@38..40 LBrace@38..39 "{" diff --git a/crates/parser2/test_files/syntax_node/items/const.snap b/crates/parser2/test_files/syntax_node/items/const.snap index c5a30332bc..02f1b689c2 100644 --- a/crates/parser2/test_files/syntax_node/items/const.snap +++ b/crates/parser2/test_files/syntax_node/items/const.snap @@ -76,9 +76,10 @@ Root@0..160 IfExpr@79..157 IfKw@79..81 "if" WhiteSpace@81..82 " " - Path@82..83 - PathSegment@82..83 - Ident@82..83 "b" + PathExpr@82..83 + Path@82..83 + PathSegment@82..83 + Ident@82..83 "b" WhiteSpace@83..84 " " BlockExpr@84..101 LBrace@84..85 "{" @@ -98,9 +99,10 @@ Root@0..160 IfKw@107..109 "if" WhiteSpace@109..110 " " BinExpr@110..116 - Path@110..111 - PathSegment@110..111 - Ident@110..111 "x" + PathExpr@110..111 + Path@110..111 + PathSegment@110..111 + Ident@110..111 "x" WhiteSpace@111..112 " " Eq2@112..114 "==" WhiteSpace@114..115 " " diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index c31f42f4f4..7f19a8d2bb 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -266,9 +266,10 @@ Root@0..361 Newline@298..299 "\n" WhiteSpace@299..303 " " ExprStmt@303..304 - Path@303..304 - PathSegment@303..304 - Ident@303..304 "t" + PathExpr@303..304 + Path@303..304 + PathSegment@303..304 + Ident@303..304 "t" Newline@304..305 "\n" RBrace@305..306 "}" Newline@306..308 "\n\n" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 117b72b258..951c9a2e75 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -67,9 +67,10 @@ Root@0..266 WhiteSpace@66..74 " " ExprStmt@74..126 RecordInitExpr@74..126 - Path@74..78 - PathSegment@74..78 - SelfTypeKw@74..78 "Self" + PathExpr@74..78 + Path@74..78 + PathSegment@74..78 + SelfTypeKw@74..78 "Self" WhiteSpace@78..79 " " RecordFieldList@79..126 LBrace@79..80 "{" @@ -81,18 +82,20 @@ Root@0..266 WhiteSpace@97..98 " " BinExpr@98..116 FieldExpr@98..106 - Path@98..102 - PathSegment@98..102 - SelfKw@98..102 "self" + PathExpr@98..102 + Path@98..102 + PathSegment@98..102 + SelfKw@98..102 "self" Dot@102..103 "." Ident@103..106 "val" WhiteSpace@106..107 " " Plus@107..108 "+" WhiteSpace@108..109 " " FieldExpr@109..116 - Path@109..112 - PathSegment@109..112 - Ident@109..112 "rhs" + PathExpr@109..112 + Path@109..112 + PathSegment@109..112 + Ident@109..112 "rhs" Dot@112..113 "." Ident@113..116 "val" Newline@116..117 "\n" @@ -214,16 +217,18 @@ Root@0..266 ParenExpr@244..258 LParen@244..245 "(" BinExpr@245..257 - Path@245..248 - PathSegment@245..248 - Ident@245..248 "rhs" + PathExpr@245..248 + Path@245..248 + PathSegment@245..248 + Ident@245..248 "rhs" WhiteSpace@248..249 " " Minus@249..250 "-" WhiteSpace@250..251 " " FieldExpr@251..257 - Path@251..255 - PathSegment@251..255 - SelfKw@251..255 "self" + PathExpr@251..255 + Path@251..255 + PathSegment@251..255 + SelfKw@251..255 "self" Dot@255..256 "." Ident@256..257 "t" RParen@257..258 ")" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index f66b586162..32444f2c1e 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -177,9 +177,10 @@ Root@0..317 WhiteSpace@160..168 " " ExprStmt@168..188 CallExpr@168..188 - Path@168..180 - PathSegment@168..180 - Ident@168..180 "do_something" + PathExpr@168..180 + Path@168..180 + PathSegment@168..180 + Ident@168..180 "do_something" GenericArgList@180..185 Lt@180..181 "<" TypeGenericArg@181..184 @@ -191,9 +192,10 @@ Root@0..317 CallArgList@185..188 LParen@185..186 "(" CallArg@186..187 - Path@186..187 - PathSegment@186..187 - Ident@186..187 "t" + PathExpr@186..187 + Path@186..187 + PathSegment@186..187 + Ident@186..187 "t" RParen@187..188 ")" Newline@188..189 "\n" WhiteSpace@189..193 " " @@ -307,9 +309,10 @@ Root@0..317 WhiteSpace@281..289 " " ExprStmt@289..309 CallExpr@289..309 - Path@289..301 - PathSegment@289..301 - Ident@289..301 "do_something" + PathExpr@289..301 + Path@289..301 + PathSegment@289..301 + Ident@289..301 "do_something" GenericArgList@301..306 Lt@301..302 "<" TypeGenericArg@302..305 @@ -321,9 +324,10 @@ Root@0..317 CallArgList@306..309 LParen@306..307 "(" CallArg@307..308 - Path@307..308 - PathSegment@307..308 - Ident@307..308 "t" + PathExpr@307..308 + Path@307..308 + PathSegment@307..308 + Ident@307..308 "t" RParen@308..309 ")" Newline@309..310 "\n" WhiteSpace@310..314 " " diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index 833eeeeff6..a38a042c1e 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -136,30 +136,34 @@ Root@0..588 ExprStmt@151..174 BinExpr@151..174 BinExpr@151..160 - Path@151..154 - PathSegment@151..154 - Ident@151..154 "lhs" + PathExpr@151..154 + Path@151..154 + PathSegment@151..154 + Ident@151..154 "lhs" WhiteSpace@154..155 " " Plus@155..156 "+" WhiteSpace@156..157 " " - Path@157..160 - PathSegment@157..160 - Ident@157..160 "lhs" + PathExpr@157..160 + Path@157..160 + PathSegment@157..160 + Ident@157..160 "lhs" WhiteSpace@160..161 " " Minus@161..162 "-" WhiteSpace@162..163 " " ParenExpr@163..174 LParen@163..164 "(" BinExpr@164..173 - Path@164..167 - PathSegment@164..167 - Ident@164..167 "rhs" + PathExpr@164..167 + Path@164..167 + PathSegment@164..167 + Ident@164..167 "rhs" WhiteSpace@167..168 " " Plus@168..169 "+" WhiteSpace@169..170 " " - Path@170..173 - PathSegment@170..173 - Ident@170..173 "rhs" + PathExpr@170..173 + Path@170..173 + PathSegment@170..173 + Ident@170..173 "rhs" RParen@173..174 ")" Newline@174..175 "\n" WhiteSpace@175..179 " " @@ -419,20 +423,22 @@ Root@0..588 TupleExpr@522..580 LParen@522..523 "(" CallExpr@523..566 - Path@523..543 - PathSegment@523..533 - Ident@523..533 "SyntaxNode" - Colon2@533..535 "::" - PathSegment@535..543 - Ident@535..543 "new_root" + PathExpr@523..543 + Path@523..543 + PathSegment@523..533 + Ident@523..533 "SyntaxNode" + Colon2@533..535 "::" + PathSegment@535..543 + Ident@535..543 "new_root" CallArgList@543..566 LParen@543..544 "(" CallArg@544..565 MethodCallExpr@544..565 FieldExpr@544..556 - Path@544..548 - PathSegment@544..548 - SelfKw@544..548 "self" + PathExpr@544..548 + Path@544..548 + PathSegment@544..548 + SelfKw@544..548 "self" Dot@548..549 "." Ident@549..556 "builder" Dot@556..557 "." @@ -444,9 +450,10 @@ Root@0..588 Comma@566..567 "," WhiteSpace@567..568 " " FieldExpr@568..579 - Path@568..572 - PathSegment@568..572 - SelfKw@568..572 "self" + PathExpr@568..572 + Path@568..572 + PathSegment@568..572 + SelfKw@568..572 "self" Dot@572..573 "." Ident@573..579 "errors" RParen@579..580 ")" diff --git a/crates/parser2/test_files/syntax_node/stmts/assert.snap b/crates/parser2/test_files/syntax_node/stmts/assert.snap index aeeb5e835a..ca935998e2 100644 --- a/crates/parser2/test_files/syntax_node/stmts/assert.snap +++ b/crates/parser2/test_files/syntax_node/stmts/assert.snap @@ -14,9 +14,10 @@ Root@0..49 WhiteSpace@8..9 " " Lt@9..10 "<" WhiteSpace@10..11 " " - Path@11..12 - PathSegment@11..12 - Ident@11..12 "x" + PathExpr@11..12 + Path@11..12 + PathSegment@11..12 + Ident@11..12 "x" Newline@12..13 "\n" AssertStmt@13..49 AssertKw@13..19 "assert" @@ -28,9 +29,10 @@ Root@0..49 WhiteSpace@21..22 " " Lt@22..23 "<" WhiteSpace@23..24 " " - Path@24..25 - PathSegment@24..25 - Ident@24..25 "x" + PathExpr@24..25 + Path@24..25 + PathSegment@24..25 + Ident@24..25 "x" Comma@25..26 "," WhiteSpace@26..27 " " LitExpr@27..49 diff --git a/crates/parser2/test_files/syntax_node/stmts/assign.fe b/crates/parser2/test_files/syntax_node/stmts/assign.fe new file mode 100644 index 0000000000..42a9d86103 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assign.fe @@ -0,0 +1,2 @@ +x = 1 +Foo{x, y} = foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/assign.snap b/crates/parser2/test_files/syntax_node/stmts/assign.snap new file mode 100644 index 0000000000..e62c0752fd --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assign.snap @@ -0,0 +1,46 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/assign.fe +--- +Root@0..21 + AssignStmt@0..5 + PathPat@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + WhiteSpace@1..2 " " + Eq@2..3 "=" + WhiteSpace@3..4 " " + LitExpr@4..5 + Lit@4..5 + Int@4..5 "1" + Newline@5..6 "\n" + AssignStmt@6..21 + RecordPat@6..15 + Path@6..9 + PathSegment@6..9 + Ident@6..9 "Foo" + RecordPatFieldList@9..15 + LBrace@9..10 "{" + RecordPatField@10..11 + PathPat@10..11 + Path@10..11 + PathSegment@10..11 + Ident@10..11 "x" + Comma@11..12 "," + WhiteSpace@12..13 " " + RecordPatField@13..14 + PathPat@13..14 + Path@13..14 + PathSegment@13..14 + Ident@13..14 "y" + RBrace@14..15 "}" + WhiteSpace@15..16 " " + Eq@16..17 "=" + WhiteSpace@17..18 " " + PathExpr@18..21 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "foo" + diff --git a/crates/parser2/test_files/syntax_node/stmts/for.snap b/crates/parser2/test_files/syntax_node/stmts/for.snap index e411c28968..4f24a411cc 100644 --- a/crates/parser2/test_files/syntax_node/stmts/for.snap +++ b/crates/parser2/test_files/syntax_node/stmts/for.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/for.fe --- Root@0..96 ForStmt@0..34 @@ -13,9 +14,10 @@ Root@0..96 WhiteSpace@5..6 " " InKw@6..8 "in" WhiteSpace@8..9 " " - Path@9..12 - PathSegment@9..12 - Ident@9..12 "arr" + PathExpr@9..12 + Path@9..12 + PathSegment@9..12 + Ident@9..12 "arr" WhiteSpace@12..13 " " BlockExpr@13..34 LBrace@13..14 "{" @@ -30,15 +32,17 @@ Root@0..96 Eq@23..24 "=" WhiteSpace@24..25 " " BinExpr@25..32 - Path@25..28 - PathSegment@25..28 - Ident@25..28 "sum" + PathExpr@25..28 + Path@25..28 + PathSegment@25..28 + Ident@25..28 "sum" WhiteSpace@28..29 " " Plus@29..30 "+" WhiteSpace@30..31 " " - Path@31..32 - PathSegment@31..32 - Ident@31..32 "i" + PathExpr@31..32 + Path@31..32 + PathSegment@31..32 + Ident@31..32 "i" Newline@32..33 "\n" RBrace@33..34 "}" Newline@34..36 "\n\n" @@ -69,9 +73,10 @@ Root@0..96 InKw@54..56 "in" WhiteSpace@56..57 " " MethodCallExpr@57..70 - Path@57..63 - PathSegment@57..63 - Ident@57..63 "s_list" + PathExpr@57..63 + Path@57..63 + PathSegment@57..63 + Ident@57..63 "s_list" Dot@63..64 "." Ident@64..68 "iter" CallArgList@68..70 @@ -92,21 +97,24 @@ Root@0..96 WhiteSpace@82..83 " " BinExpr@83..94 BinExpr@83..90 - Path@83..86 - PathSegment@83..86 - Ident@83..86 "sum" + PathExpr@83..86 + Path@83..86 + PathSegment@83..86 + Ident@83..86 "sum" WhiteSpace@86..87 " " Plus@87..88 "+" WhiteSpace@88..89 " " - Path@89..90 - PathSegment@89..90 - Ident@89..90 "x" + PathExpr@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "x" WhiteSpace@90..91 " " Plus@91..92 "+" WhiteSpace@92..93 " " - Path@93..94 - PathSegment@93..94 - Ident@93..94 "y" + PathExpr@93..94 + Path@93..94 + PathSegment@93..94 + Ident@93..94 "y" Newline@94..95 "\n" RBrace@95..96 "}" diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap index 919c059efa..d0bc0fe2de 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -138,9 +138,10 @@ Root@0..231 WhiteSpace@98..99 " " Eq@99..100 "=" WhiteSpace@100..101 " " - Path@101..102 - PathSegment@101..102 - Ident@101..102 "e" + PathExpr@101..102 + Path@101..102 + PathSegment@101..102 + Ident@101..102 "e" Newline@102..104 "\n\n" LetStmt@104..123 LetKw@104..107 "let" @@ -171,9 +172,10 @@ Root@0..231 WhiteSpace@119..120 " " Eq@120..121 "=" WhiteSpace@121..122 " " - Path@122..123 - PathSegment@122..123 - Ident@122..123 "s" + PathExpr@122..123 + Path@122..123 + PathSegment@122..123 + Ident@122..123 "s" Newline@123..125 "\n\n" LetStmt@125..162 LetKw@125..128 "let" @@ -188,18 +190,20 @@ Root@0..231 IfExpr@133..162 IfKw@133..135 "if" WhiteSpace@135..136 " " - Path@136..137 - PathSegment@136..137 - Ident@136..137 "b" + PathExpr@136..137 + Path@136..137 + PathSegment@136..137 + Ident@136..137 "b" WhiteSpace@137..138 " " BlockExpr@138..147 LBrace@138..139 "{" Newline@139..140 "\n" WhiteSpace@140..144 " " ExprStmt@144..145 - Path@144..145 - PathSegment@144..145 - Ident@144..145 "y" + PathExpr@144..145 + Path@144..145 + PathSegment@144..145 + Ident@144..145 "y" Newline@145..146 "\n" RBrace@146..147 "}" WhiteSpace@147..148 " " @@ -210,9 +214,10 @@ Root@0..231 Newline@154..155 "\n" WhiteSpace@155..159 " " ExprStmt@159..160 - Path@159..160 - PathSegment@159..160 - Ident@159..160 "z" + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "z" Newline@160..161 "\n" RBrace@161..162 "}" Newline@162..164 "\n\n" @@ -229,9 +234,10 @@ Root@0..231 MatchExpr@172..231 MatchKw@172..177 "match" WhiteSpace@177..178 " " - Path@178..179 - PathSegment@178..179 - Ident@178..179 "b" + PathExpr@178..179 + Path@178..179 + PathSegment@178..179 + Ident@178..179 "b" WhiteSpace@179..180 " " MatchArmList@180..231 LBrace@180..181 "{" @@ -273,9 +279,10 @@ Root@0..231 WhiteSpace@213..214 " " FatArrow@214..216 "=>" WhiteSpace@216..217 " " - Path@217..218 - PathSegment@217..218 - Ident@217..218 "x" + PathExpr@217..218 + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" Newline@218..219 "\n" WhiteSpace@219..223 " " MatchArm@223..229 diff --git a/crates/parser2/test_files/syntax_node/stmts/while.snap b/crates/parser2/test_files/syntax_node/stmts/while.snap index cd3cd4ae05..da4486b5da 100644 --- a/crates/parser2/test_files/syntax_node/stmts/while.snap +++ b/crates/parser2/test_files/syntax_node/stmts/while.snap @@ -8,9 +8,10 @@ Root@0..46 WhileKw@0..5 "while" WhiteSpace@5..6 " " BinExpr@6..12 - Path@6..7 - PathSegment@6..7 - Ident@6..7 "i" + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "i" WhiteSpace@7..8 " " Lt@8..9 "<" WhiteSpace@9..10 " " @@ -51,9 +52,10 @@ Root@0..46 Eq@37..38 "=" WhiteSpace@38..39 " " BinExpr@39..44 - Path@39..40 - PathSegment@39..40 - Ident@39..40 "i" + PathExpr@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "i" WhiteSpace@40..41 " " Plus@41..42 "+" WhiteSpace@42..43 " " diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 71a60a2e0a..18a611032b 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -372,9 +372,10 @@ Root@0..480 Ident@473..474 "T" SemiColon@474..475 ";" WhiteSpace@475..476 " " - Path@476..477 - PathSegment@476..477 - Ident@476..477 "N" + PathExpr@476..477 + Path@476..477 + PathSegment@476..477 + Ident@476..477 "N" RBracket@477..478 "]" Newline@478..479 "\n" RBrace@479..480 "}" From 7d40739b88121d85a0287322a7023cb4198e32a4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 21:54:24 +0100 Subject: [PATCH 064/678] Add ast for stmt --- crates/parser2/src/ast/mod.rs | 4 +- crates/parser2/src/ast/stmt.rs | 364 +++++++++++++++++++++++++++++++++ 2 files changed, 366 insertions(+), 2 deletions(-) create mode 100644 crates/parser2/src/ast/stmt.rs diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 69bbf20212..1915b374de 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -82,7 +82,7 @@ macro_rules! ast_node { type IntoIter = crate::ast::AstChildren<$item_ty>; fn into_iter(self) -> Self::IntoIter { - support::children(self.syntax()) + rowan::ast::support::children(rowan::ast::AstNode::syntax(&self)) } } impl IntoIterator for &$name { @@ -90,7 +90,7 @@ macro_rules! ast_node { type IntoIter = crate::ast::AstChildren<$item_ty>; fn into_iter(self) -> Self::IntoIter { - support::children(self.syntax()) + rowan::ast::support::children(rowan::ast::AstNode::syntax(self)) } } diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs new file mode 100644 index 0000000000..823aa49cbe --- /dev/null +++ b/crates/parser2/src/ast/stmt.rs @@ -0,0 +1,364 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::SyntaxKind as SK; + +ast_node! { + /// A statement. + /// Use [`Self::kind`] to get the specific kind of the statement. + pub struct Stmt, + SK::LetStmt + | SK::AssignStmt + | SK::AugAssignStmt + | SK::ForStmt + | SK::WhileStmt + | SK::ContinueStmt + | SK::BreakStmt + | SK::AssertStmt + | SK::ReturnStmt + | SK::ExprStmt +} +impl Stmt { + /// Returns the specific kind of the statement. + pub fn kind(&self) -> StmtKind { + match self.syntax().kind() { + SK::LetStmt => StmtKind::Let(AstNode::cast(self.syntax().clone()).unwrap()), + SK::AssignStmt => StmtKind::Assign(AstNode::cast(self.syntax().clone()).unwrap()), + SK::AugAssignStmt => StmtKind::AugAssign(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ForStmt => StmtKind::For(AstNode::cast(self.syntax().clone()).unwrap()), + SK::WhileStmt => StmtKind::While(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ContinueStmt => StmtKind::Continue(AstNode::cast(self.syntax().clone()).unwrap()), + SK::BreakStmt => StmtKind::Break(AstNode::cast(self.syntax().clone()).unwrap()), + SK::AssertStmt => StmtKind::Assert(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ReturnStmt => StmtKind::Return(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ExprStmt => StmtKind::Expr(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `let x: i32 = 1` + pub struct LetStmt, + SK::LetStmt, +} +impl LetStmt { + /// Returns the pattern of the binding. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the type annotation. + pub fn type_annotation(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the initializer. + pub fn initializer(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `x = 1` + pub struct AssignStmt, + SK::AssignStmt, +} +impl AssignStmt { + /// Returns the pattern of the lhs of the assignment. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the expression of the rhs of the assignment. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `x += 1` + pub struct AugAssignStmt, + SK::AugAssignStmt, +} +impl AugAssignStmt { + /// Returns the pattern of the lhs of the assignment. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + pub fn op(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(|it| match it { + rowan::NodeOrToken::Node(it) => super::ArithBinOp::from_node(it), + rowan::NodeOrToken::Token(it) => super::ArithBinOp::from_token(it), + }) + } + + /// Returns the expression of the rhs of the assignment. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `for pat in expr {..}` + pub struct ForStmt, + SK::ForStmt +} +impl ForStmt { + /// Returns the pattern of the binding in the for loop. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the expression of the iterator in the for loop. + pub fn iterable(&self) -> Option { + support::child(self.syntax()) + } + + pub fn body(&self) -> Option { + let mut block_exprs = support::children(self.syntax()); + let first = block_exprs.next(); + match block_exprs.next() { + Some(expr) => Some(expr), + None => first, + } + } +} + +ast_node! { + /// `while cond {..}` + pub struct WhileStmt, + SK::WhileStmt +} +impl WhileStmt { + /// Returns the condition of the while loop. + pub fn cond(&self) -> Option { + support::child(self.syntax()) + } + + pub fn body(&self) -> Option { + let mut block_exprs = support::children(self.syntax()); + let first = block_exprs.next(); + match block_exprs.next() { + Some(expr) => Some(expr), + None => first, + } + } +} + +ast_node! { + /// `continue` + pub struct ContinueStmt, + SK::ContinueStmt +} + +ast_node! { + /// `break` + pub struct BreakStmt, + SK::BreakStmt +} + +ast_node! { + /// `assert cond` or + /// `assert cond, message` + pub struct AssertStmt, + SK::AssertStmt +} +impl AssertStmt { + /// Returns the condition of the assert statement. + pub fn cond(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the message of the assert statement. + pub fn message(&self) -> Option { + let mut exprs = support::children(self.syntax()); + let first = exprs.next(); + match exprs.next() { + Some(expr) => Some(expr), + None => first, + } + } +} + +ast_node! { + /// `return` or + /// `return expr` + pub struct ReturnStmt, + SK::ReturnStmt +} +impl ReturnStmt { + /// Returns the expression of the return statement. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct ExprStmt, + SK::ExprStmt +} +impl ExprStmt { + /// Returns the expression of the expression statement. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +pub enum StmtKind { + Let(LetStmt), + Assign(AssignStmt), + AugAssign(AugAssignStmt), + For(ForStmt), + While(WhileStmt), + Continue(ContinueStmt), + Break(BreakStmt), + Assert(AssertStmt), + Return(ReturnStmt), + Expr(ExprStmt), +} + +#[cfg(test)] +mod tests { + use crate::{ + ast::{PatKind, TypeKind}, + lexer::Lexer, + parser::Parser, + }; + + use super::*; + + fn parse_stmt(source: &str) -> Stmt { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::stmt::parse_stmt(&mut parser, None); + Stmt::cast(parser.finish().0).unwrap() + } + + #[test] + fn let_() { + let stmt = parse_stmt("let x: i32 = 1"); + let let_stmt = match stmt.kind() { + StmtKind::Let(it) => it, + _ => panic!("expected let statement"), + }; + assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(matches!( + let_stmt.type_annotation().unwrap().kind(), + TypeKind::Path(_) + )); + assert!(let_stmt.initializer().is_some()); + + let stmt = parse_stmt("let x"); + let let_stmt = match stmt.kind() { + StmtKind::Let(it) => it, + _ => panic!("expected let statement"), + }; + assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(let_stmt.type_annotation().is_none()); + assert!(let_stmt.initializer().is_none()); + } + + #[test] + fn assign() { + let stmt = parse_stmt(r#"Foo{x, y} = foo"#); + let assign_stmt = match stmt.kind() { + StmtKind::Assign(it) => it, + _ => panic!("expected assign statement"), + }; + assert!(matches!( + assign_stmt.pat().unwrap().kind(), + PatKind::Record(_) + )); + assert!(assign_stmt.expr().is_some()); + } + + #[test] + fn aug_assign() { + let stmt = parse_stmt("x += 1"); + let aug_assign_stmt = match stmt.kind() { + StmtKind::AugAssign(it) => it, + _ => panic!("expected aug assign statement"), + }; + + assert!(matches!( + aug_assign_stmt.pat().unwrap().kind(), + PatKind::Path(_) + )); + assert!(matches!( + aug_assign_stmt.op().unwrap(), + crate::ast::ArithBinOp::Add(_) + )); + + let stmt = parse_stmt("x <<= 1"); + let aug_assign_stmt = match stmt.kind() { + StmtKind::AugAssign(it) => it, + _ => panic!("expected aug assign statement"), + }; + + assert!(matches!( + aug_assign_stmt.pat().unwrap().kind(), + PatKind::Path(_) + )); + assert!(matches!( + aug_assign_stmt.op().unwrap(), + crate::ast::ArithBinOp::LShift(_) + )); + } + + #[test] + fn r#for() { + let source = r#" + for x in foo { + bar + } + "#; + + let stmt = parse_stmt(source); + let for_stmt = match stmt.kind() { + StmtKind::For(it) => it, + _ => panic!("expected for statement"), + }; + assert!(matches!(for_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(for_stmt.iterable().is_some()); + assert!(for_stmt.body().is_some()); + } + + #[test] + fn r#while() { + let source = r#" + while { x } { + bar + } + "#; + + let stmt = parse_stmt(source); + let while_stmt = match stmt.kind() { + StmtKind::While(it) => it, + _ => panic!("expected for statement"), + }; + assert!(while_stmt.cond().is_some()); + assert!(while_stmt.body().is_some()); + assert_ne!(while_stmt.cond(), while_stmt.body()); + } + + #[test] + fn r#return() { + let stmt = parse_stmt("return x"); + let return_stmt = match stmt.kind() { + StmtKind::Return(it) => it, + _ => panic!("expected return statement"), + }; + assert!(return_stmt.expr().is_some()); + + let stmt = parse_stmt("return"); + let return_stmt = match stmt.kind() { + StmtKind::Return(it) => it, + _ => panic!("expected return statement"), + }; + assert!(return_stmt.expr().is_none()); + } +} From ffb9d88d8d6285c76edbabe614555da3f561f213 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 9 Feb 2023 22:16:03 +0100 Subject: [PATCH 065/678] Fix subtraction overflow when `override` is not found in scope stack --- crates/parser2/src/parser/mod.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 927e697ed6..8416e4ca8b 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -325,6 +325,9 @@ impl Parser { { Some(RecoveryMethod::Inheritance(set)) => { recovery_set.extend(set.iter()); + if scope_index == 0 { + break; + } scope_index -= 1; } Some(RecoveryMethod::Override(set)) => { From ea4fefa52a31b5c87a91dd989e334fd20e17bf32 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 10 Feb 2023 20:27:03 +0100 Subject: [PATCH 066/678] Add ast for expr --- crates/parser2/src/ast/attr.rs | 4 +- crates/parser2/src/ast/expr.rs | 912 +++++++++++++++++++++++++++++ crates/parser2/src/ast/lit.rs | 4 + crates/parser2/src/ast/param.rs | 28 + crates/parser2/src/ast/pat.rs | 1 + crates/parser2/src/ast/stmt.rs | 22 +- crates/parser2/src/ast/type_.rs | 96 ++- crates/parser2/src/parser/type_.rs | 2 +- 8 files changed, 1050 insertions(+), 19 deletions(-) create mode 100644 crates/parser2/src/ast/expr.rs diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index dc48d63f4b..a01040ee50 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -88,8 +88,8 @@ impl AttrArg { pub fn value(&self) -> Option { self.syntax() .children_with_tokens() - .filter_map(|it| match it.into_token() { - Some(it) if it.kind() == SK::Ident => Some(it), + .filter_map(|c| match c.into_token() { + Some(c) if c.kind() == SK::Ident => Some(c), _ => None, }) .nth(1) diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs new file mode 100644 index 0000000000..8cdd70f205 --- /dev/null +++ b/crates/parser2/src/ast/expr.rs @@ -0,0 +1,912 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, GenericArgsOwner}; +use crate::{SyntaxKind as SK, SyntaxNode, SyntaxToken}; + +ast_node! { + /// An expression. + /// Use [`Self::kind`] to determine the type of expression. + pub struct Expr, + SK::BlockExpr + | SK::BinExpr + | SK::UnExpr + | SK::CallExpr + | SK::MethodCallExpr + | SK::PathExpr + | SK::RecordInitExpr + | SK::FieldExpr + | SK::IndexExpr + | SK::TupleExpr + | SK::ArrayExpr + | SK::ArrayRepExpr + | SK::LitExpr + | SK::IfExpr + | SK::MatchExpr + | SK::ParenExpr, +} + +impl Expr { + /// Returns the kind of expression. + pub fn kind(&self) -> ExprKind { + match self.syntax().kind() { + SK::BlockExpr => ExprKind::Block(AstNode::cast(self.syntax().clone()).unwrap()), + SK::BinExpr => ExprKind::Bin(AstNode::cast(self.syntax().clone()).unwrap()), + SK::UnExpr => ExprKind::Un(AstNode::cast(self.syntax().clone()).unwrap()), + SK::CallExpr => ExprKind::Call(AstNode::cast(self.syntax().clone()).unwrap()), + SK::MethodCallExpr => { + ExprKind::MethodCall(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::PathExpr => ExprKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::RecordInitExpr => { + ExprKind::RecordInit(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::FieldExpr => ExprKind::Field(AstNode::cast(self.syntax().clone()).unwrap()), + SK::IndexExpr => ExprKind::Index(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TupleExpr => ExprKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayExpr => ExprKind::Array(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayRepExpr => ExprKind::ArrayRep(AstNode::cast(self.syntax().clone()).unwrap()), + SK::LitExpr => ExprKind::Lit(AstNode::cast(self.syntax().clone()).unwrap()), + SK::IfExpr => ExprKind::If(AstNode::cast(self.syntax().clone()).unwrap()), + SK::MatchExpr => ExprKind::Match(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ParenExpr => ExprKind::Paren(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `{ stmt1\n stmt2\n ..}` + pub struct BlockExpr, + SK::BlockExpr, + IntoIterator, +} +impl BlockExpr { + /// Returns the statements in the block. + pub fn stmts(&self) -> impl Iterator { + self.iter() + } +} + +ast_node! { + /// `lhs op rhs` + pub struct BinExpr, + SK::BinExpr +} +impl BinExpr { + /// Returns the left-hand side of the binary operation. + pub fn lhs(&self) -> Option { + support::children(self.syntax()).next() + } + + /// Returns the right-hand side of the binary operation. + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + /// Returns the operator of the binary operation. + pub fn op(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(BinOp::from_node_or_token) + } +} + +ast_node! { + /// `op expr` + pub struct UnExpr, + SK::UnExpr +} +impl UnExpr { + /// Returns the operand of the unary operation. + pub fn expr(&self) -> Option { + support::children(self.syntax()).next() + } + + /// Returns the operator of the unary operation. + pub fn op(&self) -> Option { + self.syntax().children_with_tokens().find_map(|c| match c { + rowan::NodeOrToken::Token(token) => UnOp::from_token(token), + rowan::NodeOrToken::Node(_) => None, + }) + } +} + +ast_node! { + /// `func(arg1, arg2, ..)` + pub struct CallExpr, + SK::CallExpr, +} +impl GenericArgsOwner for CallExpr {} +impl CallExpr { + /// Returns the callee of the call expression. + pub fn callee(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the arguments of the call expression. + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `obj.method(arg1, arg2, ..)` + pub struct MethodCallExpr, + SK::MethodCallExpr +} +impl GenericArgsOwner for MethodCallExpr {} +impl MethodCallExpr { + /// Returns the receiver of the method call expression. + pub fn receiver(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the name of the method being called. + pub fn method_name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the arguments of the method call expression. + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `path` + pub struct PathExpr, + SK::PathExpr +} +impl PathExpr { + /// Returns the path of the path expression. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `path { field1: expr1, field2: expr2, .. }` + pub struct RecordInitExpr, + SK::RecordInitExpr +} +impl RecordInitExpr { + /// Returns the path of the record init expression. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the fields of the record init expression. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `expr.field` or `expr.0` + pub struct FieldExpr, + SK::FieldExpr +} +impl FieldExpr { + /// Returns the expression being accessed. + pub fn receiver(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the name of the field. + pub fn field_name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the index number of the field. + pub fn field_index(&self) -> Option { + support::token(self.syntax(), SK::Int) + } +} + +ast_node! { + /// `expr[index]` + pub struct IndexExpr, + SK::IndexExpr +} +impl IndexExpr { + /// Returns the expression being indexed. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the index of the index expression. + pub fn index(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +ast_node! { + /// `(expr1, expr2, ..)` + pub struct TupleExpr, + SK::TupleExpr, + IntoIterator, +} +impl TupleExpr { + /// Returns the expressions in the tuple. + pub fn elems(&self) -> impl Iterator { + self.iter() + } +} + +ast_node! { + /// `[expr1, expr2, ..]` + pub struct ArrayExpr, + SK::ArrayExpr, + IntoIterator, +} +impl ArrayExpr { + /// Returns the expressions in the array. + pub fn elems(&self) -> impl Iterator { + self.iter() + } +} + +ast_node! { + /// `[expr; size]` + pub struct ArrayRepExpr, + SK::ArrayRepExpr, +} +impl ArrayRepExpr { + /// Returns the expression being repeated. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the size of the array. + pub fn size(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +ast_node! { + pub struct LitExpr, + SK::LitExpr +} +impl LitExpr { + /// Returns the literal of the literal expression. + pub fn lit(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `if cond { then } else { else_ }` + pub struct IfExpr, + SK::IfExpr +} +impl IfExpr { + /// Returns the condition of the if expression. + pub fn cond(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the then block of the if expression. + pub fn then(&self) -> Option { + self.syntax().children().skip(1).find_map(BlockExpr::cast) + } + + /// Returns the else block of the if expression. + pub fn else_(&self) -> Option { + self.syntax() + .children() + .skip(1) + .filter_map(BlockExpr::cast) + .nth(1) + } + + /// Returns the else if expression of the if expression. + pub fn else_if(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `match expr { arm1, arm2, .. }` + pub struct MatchExpr, + SK::MatchExpr +} +impl MatchExpr { + /// Returns the expression being matched. + pub fn scrutinee(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the arms of the match expression. + pub fn arms(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(expr)` + pub struct ParenExpr, + SK::ParenExpr +} +impl ParenExpr { + /// Returns the expression in the parentheses. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ExprKind { + Block(BlockExpr), + Bin(BinExpr), + Un(UnExpr), + Call(CallExpr), + MethodCall(MethodCallExpr), + Path(PathExpr), + RecordInit(RecordInitExpr), + Field(FieldExpr), + Index(IndexExpr), + Tuple(TupleExpr), + Array(ArrayExpr), + ArrayRep(ArrayRepExpr), + Lit(LitExpr), + If(IfExpr), + Match(MatchExpr), + Paren(ParenExpr), +} + +ast_node! { + /// `{ label1: expr1, expr2 }` + pub struct RecordFieldList, + SK::RecordFieldList, + IntoIterator +} +ast_node! { + pub struct RecordField, + SK::RecordField, +} +impl RecordField { + /// Returns the name of the field. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the expression of the field. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct MatchArmList, + SK::MatchArmList, + IntoIterator +} +ast_node! { + pub struct MatchArm, + SK::MatchArm +} +impl MatchArm { + /// Returns the pattern of the match arm. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the body of the match arm. + pub fn body(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum BinOp { + Arith(ArithBinOp), + Comp(CompBinOp), + Logical(LogicalBinOp), +} +impl BinOp { + pub(super) fn from_node_or_token( + node_or_token: rowan::NodeOrToken, + ) -> Option { + match node_or_token { + rowan::NodeOrToken::Token(token) => Self::from_token(token), + rowan::NodeOrToken::Node(node) => Self::from_node(node), + } + } + pub(super) fn from_token(token: SyntaxToken) -> Option { + ArithBinOp::from_token(token.clone()) + .map(Self::Arith) + .or_else(|| CompBinOp::from_token(token.clone()).map(Self::Comp)) + .or_else(move || LogicalBinOp::from_token(token).map(Self::Logical)) + } + + pub(super) fn from_node(node: SyntaxNode) -> Option { + ArithBinOp::from_node(node.clone()) + .map(Self::Arith) + .or_else(|| CompBinOp::from_node(node).map(Self::Comp)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum UnOp { + /// `+` + Plus(SyntaxToken), + /// `-` + Minus(SyntaxToken), + /// `!` + Not(SyntaxToken), + /// `~` + BitNot(SyntaxToken), +} +impl UnOp { + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Plus => Some(Self::Plus(token)), + SK::Minus => Some(Self::Minus(token)), + SK::Not => Some(Self::Not(token)), + SK::Tilde => Some(Self::BitNot(token)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ArithBinOp { + /// `+` + Add(SyntaxToken), + /// `-` + Sub(SyntaxToken), + /// `*` + Mul(SyntaxToken), + /// `/` + Div(SyntaxToken), + /// `%` + Mod(SyntaxToken), + /// `**` + Pow(SyntaxToken), + /// `<<` + LShift(SyntaxNode), + /// `>>` + RShift(SyntaxNode), + /// `&` + BitAnd(SyntaxToken), + /// `|` + BitOr(SyntaxToken), + /// `^` + BitXor(SyntaxToken), +} +impl ArithBinOp { + pub(super) fn from_node_or_token( + node_or_token: rowan::NodeOrToken, + ) -> Option { + match node_or_token { + rowan::NodeOrToken::Token(token) => Self::from_token(token), + rowan::NodeOrToken::Node(node) => Self::from_node(node), + } + } + + // NOTE: We need to have `from_node` because `<<` and `>>` are not primitive + // tokens in our lexer. + pub(super) fn from_node(node: SyntaxNode) -> Option { + match node.kind() { + SK::LShift => Some(Self::LShift(node)), + SK::RShift => Some(Self::RShift(node)), + _ => None, + } + } + + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Plus => Some(Self::Add(token)), + SK::Minus => Some(Self::Sub(token)), + SK::Star => Some(Self::Mul(token)), + SK::Slash => Some(Self::Div(token)), + SK::Percent => Some(Self::Mod(token)), + SK::Star2 => Some(Self::Pow(token)), + SK::Amp => Some(Self::BitAnd(token)), + SK::Pipe => Some(Self::BitOr(token)), + SK::Hat => Some(Self::BitXor(token)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum CompBinOp { + /// `==` + Eq(SyntaxToken), + /// `!=` + NotEq(SyntaxToken), + /// `<` + Lt(SyntaxToken), + /// `<=` + LtEq(SyntaxNode), + /// `>` + Gt(SyntaxToken), + /// `>=` + GtEq(SyntaxNode), +} +impl CompBinOp { + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Eq2 => Some(Self::Eq(token)), + SK::NotEq => Some(Self::NotEq(token)), + SK::Lt => Some(Self::Lt(token)), + SK::Gt => Some(Self::Gt(token)), + _ => None, + } + } + + pub(super) fn from_node(node: SyntaxNode) -> Option { + match node.kind() { + SK::LtEq => Some(Self::LtEq(node)), + SK::GtEq => Some(Self::GtEq(node)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum LogicalBinOp { + /// `&&` + And(SyntaxToken), + /// `||` + Or(SyntaxToken), +} +impl LogicalBinOp { + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Amp2 => Some(Self::And(token)), + SK::Pipe2 => Some(Self::Or(token)), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ast::*, lexer::Lexer, parser::Parser}; + + fn parse_expr(source: &str) -> Expr { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::expr::parse_expr(&mut parser); + Expr::cast(parser.finish().0).unwrap() + } + + #[test] + fn bin_expr() { + let expr = parse_expr("1 + 2"); + let bin_expr = match expr.kind() { + ExprKind::Bin(bin_expr) => bin_expr, + _ => panic!("expected BinExpr"), + }; + assert!(matches!(bin_expr.lhs().unwrap().kind(), ExprKind::Lit(_))); + assert!(matches!( + bin_expr.op().unwrap(), + BinOp::Arith(ArithBinOp::Add(_)) + )); + assert!(matches!(bin_expr.rhs().unwrap().kind(), ExprKind::Lit(_))); + + let expr = parse_expr("1 <= 2"); + let bin_expr = match expr.kind() { + ExprKind::Bin(bin_expr) => bin_expr, + _ => panic!("expected BinExpr"), + }; + assert!(matches!( + bin_expr.op().unwrap(), + BinOp::Comp(CompBinOp::LtEq(_)) + )); + } + + #[test] + fn un_expr() { + let expr = parse_expr("-1"); + let un_expr = match expr.kind() { + ExprKind::Un(un_expr) => un_expr, + _ => panic!("expected UnExpr"), + }; + assert!(matches!(un_expr.op().unwrap(), UnOp::Minus(_))); + assert!(matches!(un_expr.expr().unwrap().kind(), ExprKind::Lit(_))); + } + + #[test] + fn call_expr() { + let expr = parse_expr("foo(1, label: 2, 3 + 4)"); + let call_expr = match expr.kind() { + ExprKind::Call(call_expr) => call_expr, + _ => panic!("expected CallExpr"), + }; + + assert!(matches!( + call_expr.callee().unwrap().kind(), + ExprKind::Path(_) + )); + assert!(matches!( + call_expr + .generic_args() + .unwrap() + .into_iter() + .collect::>() + .len(), + 2 + )); + + for (i, arg) in call_expr.args().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(arg.label().unwrap().text(), "label"); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Bin(_))) + } + _ => panic!("unexpected arg"), + } + } + } + + #[test] + fn method_call_expr() { + let expr = parse_expr("foo.bar(1, label: 2, 3 + 4)"); + + let method_call_expr = match expr.kind() { + ExprKind::MethodCall(method_call_expr) => method_call_expr, + _ => panic!("expected MethodCallExpr"), + }; + + assert!(matches!( + method_call_expr.receiver().unwrap().kind(), + ExprKind::Path(_) + )); + + assert_eq!(method_call_expr.method_name().unwrap().text(), "bar"); + + assert!(matches!( + method_call_expr + .generic_args() + .unwrap() + .into_iter() + .collect::>() + .len(), + 1 + )); + + for (i, arg) in method_call_expr.args().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(arg.label().unwrap().text(), "label"); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Bin(_))) + } + _ => panic!("unexpected arg"), + } + } + } + + #[test] + fn record_init_expr() { + let expr = parse_expr("Foo { a: 1, b: 2, c: 3 }"); + let record_init_expr = match expr.kind() { + ExprKind::RecordInit(record_init_expr) => record_init_expr, + _ => panic!("expected RecordInitExpr"), + }; + + assert!(record_init_expr.path().is_some()); + for (i, field) in record_init_expr.fields().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert_eq!(field.name().unwrap().text(), "a"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(field.name().unwrap().text(), "b"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert_eq!(field.name().unwrap().text(), "c"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + _ => panic!("unexpected field"), + } + } + } + + #[test] + fn field_expr() { + let expr = parse_expr("foo(1, 2).bar"); + let field_expr = match expr.kind() { + ExprKind::Field(field_expr) => field_expr, + _ => panic!("expected FieldExpr"), + }; + + assert!(matches!( + field_expr.receiver().unwrap().kind(), + ExprKind::Call(_) + )); + assert_eq!(field_expr.field_name().unwrap().text(), "bar"); + + let expr = parse_expr("(1, 2).1"); + let field_expr = match expr.kind() { + ExprKind::Field(field_expr) => field_expr, + _ => panic!("expected FieldExpr"), + }; + + assert!(matches!( + field_expr.receiver().unwrap().kind(), + ExprKind::Tuple(_) + )); + assert_eq!(field_expr.field_index().unwrap().text(), "1"); + } + + #[test] + fn tuple_expr() { + let expr = parse_expr("(1, 2, 3)"); + let tuple_expr = match expr.kind() { + ExprKind::Tuple(tuple_expr) => tuple_expr, + _ => panic!("expected TupleExpr"), + }; + + for (i, expr) in tuple_expr.elems().into_iter().enumerate() { + match i { + 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 2 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + _ => panic!("unexpected expr"), + } + } + } + + #[test] + fn array_expr() { + let expr = parse_expr("[1, 2, 3]"); + let array_expr = match expr.kind() { + ExprKind::Array(array_expr) => array_expr, + _ => panic!("expected ArrayExpr"), + }; + + for (i, expr) in array_expr.elems().into_iter().enumerate() { + match i { + 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 2 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + _ => panic!("unexpected expr"), + } + } + } + + #[test] + fn index_expr() { + let expr = parse_expr("foo[1]"); + let index_expr = match expr.kind() { + ExprKind::Index(index_expr) => index_expr, + _ => panic!("expected IndexExpr"), + }; + + assert!(matches!( + index_expr.expr().unwrap().kind(), + ExprKind::Path(_) + )); + assert!(matches!( + index_expr.index().unwrap().kind(), + ExprKind::Lit(_) + )); + } + + #[test] + fn array_rep_expr() { + let expr = parse_expr("[1; 2]"); + let array_rep_expr = match expr.kind() { + ExprKind::ArrayRep(array_rep_expr) => array_rep_expr, + _ => panic!("expected ArrayRepExpr"), + }; + + assert!(matches!( + array_rep_expr.expr().unwrap().kind(), + ExprKind::Lit(_) + )); + assert!(matches!( + array_rep_expr.size().unwrap().kind(), + ExprKind::Lit(_) + )); + } + + #[test] + fn if_expr() { + let expr = parse_expr("if true { 1 } else { 2 }"); + let if_expr = match expr.kind() { + ExprKind::If(if_expr) => if_expr, + _ => panic!("expected IfExpr"), + }; + assert!(matches!(if_expr.cond().unwrap().kind(), ExprKind::Lit(_))); + assert!(if_expr.then().is_some()); + assert_ne!(if_expr.then().unwrap(), if_expr.else_().unwrap(),); + assert!(if_expr.else_if().is_none()); + + let expr = parse_expr("if { true } { return } else { continue }"); + let if_expr = match expr.kind() { + ExprKind::If(if_expr) => if_expr, + _ => panic!("expected IfExpr"), + }; + if let ExprKind::Block(stmts) = if_expr.cond().unwrap().kind() { + assert!(matches!( + stmts.into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Expr(_) + )) + } else { + panic!("expected block statement"); + }; + matches!( + if_expr.then().unwrap().into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Return(_) + ); + matches!( + if_expr.else_().unwrap().into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Return(_) + ); + assert!(if_expr.else_if().is_none()); + + let expr = parse_expr("if false { return } else if true { continue }"); + let if_expr = match expr.kind() { + ExprKind::If(if_expr) => if_expr, + _ => panic!("expected IfExpr"), + }; + assert!(if_expr.else_().is_none()); + assert!(if_expr.else_if().is_some()); + } + + #[test] + fn match_expr() { + let source = r#" + match foo { + Foo::Bar => { 2 }, + Bar::Baz(Int) => (4), + _ => 5, + } + }"#; + + let expr = parse_expr(source); + let match_expr = match expr.kind() { + ExprKind::Match(match_expr) => match_expr, + _ => panic!("expected MatchExpr"), + }; + assert!(matches!( + match_expr.scrutinee().unwrap().kind(), + ExprKind::Path(_) + )); + let mut count = 0; + for arm in match_expr.arms().unwrap() { + match count { + 0 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::Path(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Block(_))); + } + + 1 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::PathTuple(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Paren(_))); + } + + 2 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::WildCard(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Lit(_))); + } + _ => panic!("unexpected arm"), + } + count += 1; + } + assert_eq!(count, 3) + } +} diff --git a/crates/parser2/src/ast/lit.rs b/crates/parser2/src/ast/lit.rs index 63eb70fb10..2bad548b37 100644 --- a/crates/parser2/src/ast/lit.rs +++ b/crates/parser2/src/ast/lit.rs @@ -20,6 +20,7 @@ impl Lit { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LitInt { token: SyntaxToken, } @@ -29,6 +30,7 @@ impl LitInt { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LitBool { token: SyntaxToken, } @@ -38,6 +40,7 @@ impl LitBool { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LitString { token: SyntaxToken, } @@ -47,6 +50,7 @@ impl LitString { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum LitKind { Int(LitInt), Bool(LitBool), diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index ec664371d1..054cc2edd4 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -38,11 +38,38 @@ impl GenericParam { /// A generic parameter kind. /// `Type` is either `T` or `T: Trait`. /// `Const` is `const N: usize`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum GenericParamKind { Type(TypeGenericParam), Const(ConstGenericParam), } +ast_node! { + /// `(label1: arg1, arg2, ..)` + pub struct CallArgList, + SK::CallArgList, + IntoIterator, +} + +ast_node! { + /// `label1: arg1` + pub struct CallArg, + SK::CallArg, +} +impl CallArg { + /// Returns the label of the argument. + /// `label1` in `label1: arg1`. + pub fn label(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the expression of the argument. + /// `arg1` in `label1: arg1`. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + ast_node! { /// A type generic parameter. /// `T` @@ -137,6 +164,7 @@ impl ConstGenericArg { /// A generic argument kind. /// `Type` is either `Type` or `T: Trait`. /// `Const` is either `{expr}` or `lit`. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum GenericArgKind { Type(TypeGenericArg), Const(ConstGenericArg), diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs index 0376c59d09..538112177b 100644 --- a/crates/parser2/src/ast/pat.rs +++ b/crates/parser2/src/ast/pat.rs @@ -156,6 +156,7 @@ impl OrPat { } /// A specific pattern kind. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum PatKind { WildCard(WildCardPat), Rest(RestPat), diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index 823aa49cbe..81d8e665d8 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -90,10 +90,7 @@ impl AugAssignStmt { pub fn op(&self) -> Option { self.syntax() .children_with_tokens() - .find_map(|it| match it { - rowan::NodeOrToken::Node(it) => super::ArithBinOp::from_node(it), - rowan::NodeOrToken::Token(it) => super::ArithBinOp::from_token(it), - }) + .find_map(|n| super::ArithBinOp::from_node_or_token(n)) } /// Returns the expression of the rhs of the assignment. @@ -208,6 +205,7 @@ impl ExprStmt { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum StmtKind { Let(LetStmt), Assign(AssignStmt), @@ -242,7 +240,7 @@ mod tests { fn let_() { let stmt = parse_stmt("let x: i32 = 1"); let let_stmt = match stmt.kind() { - StmtKind::Let(it) => it, + StmtKind::Let(n) => n, _ => panic!("expected let statement"), }; assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); @@ -254,7 +252,7 @@ mod tests { let stmt = parse_stmt("let x"); let let_stmt = match stmt.kind() { - StmtKind::Let(it) => it, + StmtKind::Let(n) => n, _ => panic!("expected let statement"), }; assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); @@ -280,7 +278,7 @@ mod tests { fn aug_assign() { let stmt = parse_stmt("x += 1"); let aug_assign_stmt = match stmt.kind() { - StmtKind::AugAssign(it) => it, + StmtKind::AugAssign(n) => n, _ => panic!("expected aug assign statement"), }; @@ -295,7 +293,7 @@ mod tests { let stmt = parse_stmt("x <<= 1"); let aug_assign_stmt = match stmt.kind() { - StmtKind::AugAssign(it) => it, + StmtKind::AugAssign(n) => n, _ => panic!("expected aug assign statement"), }; @@ -319,7 +317,7 @@ mod tests { let stmt = parse_stmt(source); let for_stmt = match stmt.kind() { - StmtKind::For(it) => it, + StmtKind::For(n) => n, _ => panic!("expected for statement"), }; assert!(matches!(for_stmt.pat().unwrap().kind(), PatKind::Path(_))); @@ -337,7 +335,7 @@ mod tests { let stmt = parse_stmt(source); let while_stmt = match stmt.kind() { - StmtKind::While(it) => it, + StmtKind::While(n) => n, _ => panic!("expected for statement"), }; assert!(while_stmt.cond().is_some()); @@ -349,14 +347,14 @@ mod tests { fn r#return() { let stmt = parse_stmt("return x"); let return_stmt = match stmt.kind() { - StmtKind::Return(it) => it, + StmtKind::Return(n) => n, _ => panic!("expected return statement"), }; assert!(return_stmt.expr().is_some()); let stmt = parse_stmt("return"); let return_stmt = match stmt.kind() { - StmtKind::Return(it) => it, + StmtKind::Return(n) => n, _ => panic!("expected return statement"), }; assert!(return_stmt.expr().is_none()); diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs index 0d94b546e9..dd0f38b301 100644 --- a/crates/parser2/src/ast/type_.rs +++ b/crates/parser2/src/ast/type_.rs @@ -55,10 +55,6 @@ impl PathType { pub fn path(&self) -> Option { support::child(self.syntax()) } - - pub fn generic_args(&self) -> Option { - support::child(self.syntax()) - } } impl super::GenericArgsOwner for PathType {} @@ -108,6 +104,7 @@ impl ArrayType { } /// A specific kind of type. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TypeKind { Ptr(PtrType), Path(PathType), @@ -115,3 +112,94 @@ pub enum TypeKind { Tuple(TupleType), Array(ArrayType), } + +#[cfg(test)] +mod tests { + use super::*; + use crate::ast::prelude::*; + use crate::{lexer::Lexer, parser}; + + fn parse_type(source: &str) -> Type { + let lexer = Lexer::new(source); + let mut parser = parser::Parser::new(lexer); + parser::type_::parse_type(&mut parser, None, true); + Type::cast(parser.finish().0).unwrap() + } + + #[test] + fn ptr_type() { + let ty = parse_type("*i32"); + let ptr_ty = match ty.kind() { + TypeKind::Ptr(ptr_ty) => ptr_ty, + _ => panic!(), + }; + + assert_eq!(ptr_ty.star().unwrap().text(), "*"); + assert!(matches!(ptr_ty.inner().unwrap().kind(), TypeKind::Path(_))); + } + + #[test] + fn path_type() { + let ty = parse_type("Foo::Bar"); + let path_type = match ty.kind() { + TypeKind::Path(ptr_ty) => ptr_ty, + _ => panic!(), + }; + + for (i, segment) in path_type.path().unwrap().segments().enumerate() { + match i { + 0 => assert_eq!(segment.ident().unwrap().text(), "Foo"), + 1 => assert_eq!(segment.ident().unwrap().text(), "Bar"), + _ => panic!(), + } + } + + let generic_args = path_type.generic_args().unwrap(); + for (i, arg) in generic_args.iter().enumerate() { + match i { + 0 => assert!(matches!(arg.kind(), crate::ast::GenericArgKind::Type(_))), + 1 => assert!(matches!(arg.kind(), crate::ast::GenericArgKind::Const(_))), + _ => panic!(), + } + } + } + + #[test] + fn self_type() { + let ty = parse_type("Self"); + assert!(matches!(ty.kind(), TypeKind::SelfType(_))); + } + + #[test] + fn tuple_type() { + let ty = parse_type("((i32, u32), foo::Bar, *usize"); + let tuple_type = match ty.kind() { + TypeKind::Tuple(tuple_type) => tuple_type, + _ => panic!(), + }; + + for (i, ty) in tuple_type.elem_tys().enumerate() { + match i { + 0 => assert!(matches!(ty.kind(), TypeKind::Tuple(_))), + 1 => assert!(matches!(ty.kind(), TypeKind::Path(_))), + 2 => assert!(matches!(ty.kind(), TypeKind::Ptr(_))), + _ => panic!(), + } + } + } + + #[test] + fn array_type() { + let ty = parse_type("[(i32, u32); 1]"); + let array_type = match ty.kind() { + TypeKind::Array(array_type) => array_type, + _ => panic!(), + }; + + assert!(matches!( + array_type.elem_ty().unwrap().kind(), + TypeKind::Tuple(_) + )); + assert!(array_type.len().is_some()); + } +} diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 8c3296c1eb..0e6bb56a25 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -5,7 +5,7 @@ use super::{ token_stream::TokenStream, Checkpoint, Parser, }; -pub(super) fn parse_type( +pub fn parse_type( parser: &mut Parser, checkpoint: Option, allow_bounds: bool, From 3edc766477e8fe948f0202da55cc8a592aca5105 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 10 Feb 2023 21:31:49 +0100 Subject: [PATCH 067/678] Add `WherePredicate` syntax kind --- crates/parser2/src/parser/param.rs | 35 +- crates/parser2/src/parser/type_.rs | 18 +- crates/parser2/src/syntax_kind.rs | 2 + .../test_files/error_recovery/items/func.snap | 54 ++-- .../error_recovery/items/impl_.snap | 25 +- .../error_recovery/items/impl_trait.snap | 54 ++-- .../error_recovery/items/struct_.snap | 38 +-- .../error_recovery/items/trait_.snap | 54 ++-- .../test_files/syntax_node/items/enums.snap | 72 +++-- .../test_files/syntax_node/items/func.snap | 84 ++--- .../test_files/syntax_node/items/impl.snap | 66 ++-- .../syntax_node/items/impl_trait.snap | 50 +-- .../test_files/syntax_node/items/trait.snap | 50 +-- .../syntax_node/structs/generics.fe | 6 +- .../syntax_node/structs/generics.snap | 298 +++++++++++------- 15 files changed, 514 insertions(+), 392 deletions(-) diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index d973f10afa..7b742c7340 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -4,9 +4,9 @@ use super::{ define_scope, expr::parse_expr, expr_atom::{BlockExprScope, LitExprScope}, - path::{is_path_segment, PathScope}, + path::PathScope, token_stream::TokenStream, - type_::parse_type, + type_::{is_type_start, parse_type}, Parser, }; @@ -283,7 +283,7 @@ impl super::Parse for CallArgScope { } } -define_scope! { WhereClauseScope, WhereClause, Inheritance(Newline) } +define_scope! { pub(crate) WhereClauseScope, WhereClause, Inheritance(Newline) } impl super::Parse for WhereClauseScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::WhereKw); @@ -291,17 +291,8 @@ impl super::Parse for WhereClauseScope { loop { parser.set_newline_as_trivia(true); match parser.current_kind() { - Some(kind) if is_path_segment(kind) => { - parse_type(parser, None, false); - parser.set_newline_as_trivia(false); - if parser.current_kind() == Some(SyntaxKind::Colon) { - parser.parse(TypeBoundListScope::default(), None); - if !parser.bump_if(SyntaxKind::Newline) { - parser.error_and_recover("expected newline after type bounds", None); - } - } else { - parser.error_and_recover("expected `:` for type bounds", None); - } + Some(kind) if is_type_start(kind) => { + parser.parse(WherePredicateScope::default(), None); } _ => break, } @@ -309,6 +300,22 @@ impl super::Parse for WhereClauseScope { } } +define_scope! { pub(crate) WherePredicateScope, WherePredicate, Inheritance } +impl super::Parse for WherePredicateScope { + fn parse(&mut self, parser: &mut Parser) { + parse_type(parser, None, false); + parser.set_newline_as_trivia(false); + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::default(), None); + if !parser.bump_if(SyntaxKind::Newline) { + parser.error_and_recover("expected newline after type bounds", None); + } + } else { + parser.error_and_recover("expected `:` for type bounds", None); + } + } +} + pub(crate) fn parse_where_clause_opt(parser: &mut Parser) { let newline_as_trivia = parser.set_newline_as_trivia(true); if parser.current_kind() == Some(SyntaxKind::WhereKw) { diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 0e6bb56a25..427ed1fe6e 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -1,8 +1,12 @@ use crate::SyntaxKind; use super::{ - define_scope, expr::parse_expr, param::GenericArgListScope, path::PathScope, - token_stream::TokenStream, Checkpoint, Parser, + define_scope, + expr::parse_expr, + param::GenericArgListScope, + path::{is_path_segment, PathScope}, + token_stream::TokenStream, + Checkpoint, Parser, }; pub fn parse_type( @@ -20,6 +24,16 @@ pub fn parse_type( .0 } +pub(crate) fn is_type_start(kind: SyntaxKind) -> bool { + match kind { + SyntaxKind::Star | SyntaxKind::SelfTypeKw | SyntaxKind::LParen | SyntaxKind::LBracket => { + true + } + kind if is_path_segment(kind) => true, + _ => false, + } +} + define_scope!(PtrTypeScope { allow_bounds: bool }, PtrType, Inheritance); impl super::Parse for PtrTypeScope { fn parse(&mut self, parser: &mut Parser) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index eeb48a727e..4c9521d920 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -438,6 +438,8 @@ pub enum SyntaxKind { TypeBound, /// `where Option: Trait1 + Trait2` WhereClause, + /// `Option: Trait1 + Trait2` + WherePredicate, /// Root node of the input source. Root, diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 7f467bc043..33a0d497b6 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -74,19 +74,20 @@ Root@0..133 WhereClause@57..74 WhereKw@57..62 "where" WhiteSpace@62..63 " " - PathType@63..64 - Path@63..64 - PathSegment@63..64 - Ident@63..64 "T" - TypeBoundList@64..72 - Colon@64..65 ":" - WhiteSpace@65..66 " " - TypeBound@66..72 - Path@66..72 - PathSegment@66..72 - Ident@66..72 "Trait2" - WhiteSpace@72..73 " " - Newline@73..74 "\n" + WherePredicate@63..74 + PathType@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "T" + TypeBoundList@64..72 + Colon@64..65 ":" + WhiteSpace@65..66 " " + TypeBound@66..72 + Path@66..72 + PathSegment@66..72 + Ident@66..72 "Trait2" + WhiteSpace@72..73 " " + Newline@73..74 "\n" BlockExpr@74..78 LBrace@74..75 "{" Newline@75..77 "\n\n" @@ -125,19 +126,20 @@ Root@0..133 WhereClause@111..128 WhereKw@111..116 "where" WhiteSpace@116..117 " " - PathType@117..118 - Path@117..118 - PathSegment@117..118 - Ident@117..118 "T" - TypeBoundList@118..126 - Colon@118..119 ":" - WhiteSpace@119..120 " " - TypeBound@120..126 - Path@120..126 - PathSegment@120..126 - Ident@120..126 "Trait2" - WhiteSpace@126..127 " " - Newline@127..128 "\n" + WherePredicate@117..128 + PathType@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "T" + TypeBoundList@118..126 + Colon@118..119 ":" + WhiteSpace@119..120 " " + TypeBound@120..126 + Path@120..126 + PathSegment@120..126 + Ident@120..126 "Trait2" + WhiteSpace@126..127 " " + Newline@127..128 "\n" BlockExpr@128..132 LBrace@128..129 "{" Newline@129..131 "\n\n" diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap index 1b2050e0b5..ea8bdf1119 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -33,18 +33,19 @@ Root@0..56 WhereClause@17..34 WhereKw@17..22 "where" WhiteSpace@22..23 " " - PathType@23..24 - Path@23..24 - PathSegment@23..24 - Ident@23..24 "T" - TypeBoundList@24..33 - Colon@24..25 ":" - WhiteSpace@25..26 " " - TypeBound@26..33 - Path@26..33 - PathSegment@26..33 - Ident@26..33 "Integer" - Newline@33..34 "\n" + WherePredicate@23..34 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + TypeBoundList@24..33 + Colon@24..25 ":" + WhiteSpace@25..26 " " + TypeBound@26..33 + Path@26..33 + PathSegment@26..33 + Ident@26..33 "Integer" + Newline@33..34 "\n" ImplItemList@34..37 LBrace@34..35 "{" WhiteSpace@35..36 " " diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.snap b/crates/parser2/test_files/error_recovery/items/impl_trait.snap index ea7204a75d..1e1b017b2a 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_trait.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.snap @@ -50,19 +50,20 @@ Root@0..90 WhereClause@23..34 WhereKw@23..28 "where" WhiteSpace@28..29 " " - PathType@29..30 - Path@29..30 - PathSegment@29..30 - Ident@29..30 "T" - TypeBoundList@30..33 - Colon@30..31 ":" - WhiteSpace@31..32 " " - TypeBound@32..33 - Path@32..33 - PathSegment@32..33 - Ident@32..33 "X" - WhiteSpace@33..34 " " - Error@34..34 + WherePredicate@29..34 + PathType@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "T" + TypeBoundList@30..33 + Colon@30..31 ":" + WhiteSpace@31..32 " " + TypeBound@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "X" + WhiteSpace@33..34 " " + Error@34..34 ImplTraitItemList@34..36 LBrace@34..35 "{" RBrace@35..36 "}" @@ -110,19 +111,20 @@ Root@0..90 WhereClause@58..69 WhereKw@58..63 "where" WhiteSpace@63..64 " " - PathType@64..65 - Path@64..65 - PathSegment@64..65 - Ident@64..65 "T" - TypeBoundList@65..68 - Colon@65..66 ":" - WhiteSpace@66..67 " " - TypeBound@67..68 - Path@67..68 - PathSegment@67..68 - Ident@67..68 "X" - WhiteSpace@68..69 " " - Error@69..69 + WherePredicate@64..69 + PathType@64..65 + Path@64..65 + PathSegment@64..65 + Ident@64..65 "T" + TypeBoundList@65..68 + Colon@65..66 ":" + WhiteSpace@66..67 " " + TypeBound@67..68 + Path@67..68 + PathSegment@67..68 + Ident@67..68 "X" + WhiteSpace@68..69 " " + Error@69..69 ImplTraitItemList@69..71 LBrace@69..70 "{" RBrace@70..71 "}" diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index 626d4d20cb..05f61427bb 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -25,26 +25,28 @@ Root@0..74 WhereClause@16..40 WhereKw@16..21 "where" WhiteSpace@21..22 " " - PathType@22..23 - Path@22..23 - PathSegment@22..23 - Ident@22..23 "T" - WhiteSpace@23..24 " " - Error@24..24 + WherePredicate@22..24 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + WhiteSpace@23..24 " " + Error@24..24 Newline@24..25 "\n" WhiteSpace@25..31 " " - PathType@31..32 - Path@31..32 - PathSegment@31..32 - Ident@31..32 "U" - TypeBoundList@32..39 - Colon@32..33 ":" - WhiteSpace@33..34 " " - TypeBound@34..39 - Path@34..39 - PathSegment@34..39 - Ident@34..39 "Trait" - Newline@39..40 "\n" + WherePredicate@31..40 + PathType@31..32 + Path@31..32 + PathSegment@31..32 + Ident@31..32 "U" + TypeBoundList@32..39 + Colon@32..33 ":" + WhiteSpace@33..34 " " + TypeBound@34..39 + Path@34..39 + PathSegment@34..39 + Ident@34..39 "Trait" + Newline@39..40 "\n" WhiteSpace@40..44 " " Newline@44..45 "\n" RecordFieldDefList@45..74 diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap index 812baa1e68..8df2b3d109 100644 --- a/crates/parser2/test_files/error_recovery/items/trait_.snap +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -81,19 +81,20 @@ Root@0..133 WhereClause@70..83 WhereKw@70..75 "where" WhiteSpace@75..76 " " - PathType@76..77 - Path@76..77 - PathSegment@76..77 - Ident@76..77 "T" - TypeBoundList@77..82 - Colon@77..78 ":" - WhiteSpace@78..79 " " - TypeBound@79..82 - Path@79..82 - PathSegment@79..82 - Ident@79..82 "Add" - WhiteSpace@82..83 " " - Error@83..83 + WherePredicate@76..83 + PathType@76..77 + Path@76..77 + PathSegment@76..77 + Ident@76..77 "T" + TypeBoundList@77..82 + Colon@77..78 ":" + WhiteSpace@78..79 " " + TypeBound@79..82 + Path@79..82 + PathSegment@79..82 + Ident@79..82 "Add" + WhiteSpace@82..83 " " + Error@83..83 TraitItemList@83..85 LBrace@83..84 "{" RBrace@84..85 "}" @@ -126,19 +127,20 @@ Root@0..133 WhereClause@115..129 WhereKw@115..120 "where" WhiteSpace@120..121 " " - PathType@121..122 - Path@121..122 - PathSegment@121..122 - Ident@121..122 "T" - TypeBoundList@122..127 - Colon@122..123 ":" - WhiteSpace@123..124 " " - TypeBound@124..127 - Path@124..127 - PathSegment@124..127 - Ident@124..127 "Add" - WhiteSpace@127..128 " " - Newline@128..129 "\n" + WherePredicate@121..129 + PathType@121..122 + Path@121..122 + PathSegment@121..122 + Ident@121..122 "T" + TypeBoundList@122..127 + Colon@122..123 ":" + WhiteSpace@123..124 " " + TypeBound@124..127 + Path@124..127 + PathSegment@124..127 + Ident@124..127 "Add" + WhiteSpace@127..128 " " + Newline@128..129 "\n" TraitItemList@129..133 LBrace@129..130 "{" Newline@130..132 "\n\n" diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 272c86eb92..071ab01ac8 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -60,18 +60,19 @@ Root@0..220 WhereClause@78..93 WhereKw@78..83 "where" WhiteSpace@83..84 " " - PathType@84..85 - Path@84..85 - PathSegment@84..85 - Ident@84..85 "T" - TypeBoundList@85..92 - Colon@85..86 ":" - WhiteSpace@86..87 " " - TypeBound@87..92 - Path@87..92 - PathSegment@87..92 - Ident@87..92 "Clone" - Newline@92..93 "\n" + WherePredicate@84..93 + PathType@84..85 + Path@84..85 + PathSegment@84..85 + Ident@84..85 "T" + TypeBoundList@85..92 + Colon@85..86 ":" + WhiteSpace@86..87 " " + TypeBound@87..92 + Path@87..92 + PathSegment@87..92 + Ident@87..92 "Clone" + Newline@92..93 "\n" VariantDefList@93..117 LBrace@93..94 "{" Newline@94..95 "\n" @@ -139,29 +140,30 @@ Root@0..220 WhereClause@164..189 WhereKw@164..169 "where" WhiteSpace@169..170 " " - PathType@170..181 - Path@170..178 - PathSegment@170..173 - Ident@170..173 "Foo" - Colon2@173..175 "::" - PathSegment@175..178 - Ident@175..178 "Bar" - GenericArgList@178..181 - Lt@178..179 "<" - TypeGenericArg@179..180 - PathType@179..180 - Path@179..180 - PathSegment@179..180 - Ident@179..180 "T" - Gt@180..181 ">" - TypeBoundList@181..188 - Colon@181..182 ":" - WhiteSpace@182..183 " " - TypeBound@183..188 - Path@183..188 - PathSegment@183..188 - Ident@183..188 "Trait" - Newline@188..189 "\n" + WherePredicate@170..189 + PathType@170..181 + Path@170..178 + PathSegment@170..173 + Ident@170..173 "Foo" + Colon2@173..175 "::" + PathSegment@175..178 + Ident@175..178 "Bar" + GenericArgList@178..181 + Lt@178..179 "<" + TypeGenericArg@179..180 + PathType@179..180 + Path@179..180 + PathSegment@179..180 + Ident@179..180 "T" + Gt@180..181 ">" + TypeBoundList@181..188 + Colon@181..182 ":" + WhiteSpace@182..183 " " + TypeBound@183..188 + Path@183..188 + PathSegment@183..188 + Ident@183..188 "Trait" + Newline@188..189 "\n" VariantDefList@189..220 LBrace@189..190 "{" Newline@190..191 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 7f19a8d2bb..af95ed76ed 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -217,48 +217,50 @@ Root@0..361 WhereClause@235..286 WhereKw@235..240 "where" WhiteSpace@240..241 " " - PathType@241..250 - Path@241..247 - PathSegment@241..247 - Ident@241..247 "Result" - GenericArgList@247..250 - Lt@247..248 "<" - TypeGenericArg@248..249 - PathType@248..249 - Path@248..249 - PathSegment@248..249 - Ident@248..249 "T" - Gt@249..250 ">" - TypeBoundList@250..257 - Colon@250..251 ":" - WhiteSpace@251..252 " " - TypeBound@252..257 - Path@252..257 - PathSegment@252..257 - Ident@252..257 "Trait" - WhiteSpace@257..258 " " - Newline@258..259 "\n" + WherePredicate@241..259 + PathType@241..250 + Path@241..247 + PathSegment@241..247 + Ident@241..247 "Result" + GenericArgList@247..250 + Lt@247..248 "<" + TypeGenericArg@248..249 + PathType@248..249 + Path@248..249 + PathSegment@248..249 + Ident@248..249 "T" + Gt@249..250 ">" + TypeBoundList@250..257 + Colon@250..251 ":" + WhiteSpace@251..252 " " + TypeBound@252..257 + Path@252..257 + PathSegment@252..257 + Ident@252..257 "Trait" + WhiteSpace@257..258 " " + Newline@258..259 "\n" WhiteSpace@259..269 " " - PathType@269..278 - Path@269..275 - PathSegment@269..275 - Ident@269..275 "Option" - GenericArgList@275..278 - Lt@275..276 "<" - TypeGenericArg@276..277 - PathType@276..277 - Path@276..277 - PathSegment@276..277 - Ident@276..277 "U" - Gt@277..278 ">" - TypeBoundList@278..285 - Colon@278..279 ":" - WhiteSpace@279..280 " " - TypeBound@280..285 - Path@280..285 - PathSegment@280..285 - Ident@280..285 "Clone" - Newline@285..286 "\n" + WherePredicate@269..286 + PathType@269..278 + Path@269..275 + PathSegment@269..275 + Ident@269..275 "Option" + GenericArgList@275..278 + Lt@275..276 "<" + TypeGenericArg@276..277 + PathType@276..277 + Path@276..277 + PathSegment@276..277 + Ident@276..277 "U" + Gt@277..278 ">" + TypeBoundList@278..285 + Colon@278..279 ":" + WhiteSpace@279..280 " " + TypeBound@280..285 + Path@280..285 + PathSegment@280..285 + Ident@280..285 "Clone" + Newline@285..286 "\n" WhiteSpace@286..296 " " Newline@296..297 "\n" BlockExpr@297..306 diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 951c9a2e75..122d8a4d75 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -127,26 +127,27 @@ Root@0..266 WhereClause@149..169 WhereKw@149..154 "where" WhiteSpace@154..155 " " - PathType@155..161 - Path@155..158 - PathSegment@155..158 - Ident@155..158 "Foo" - GenericArgList@158..161 - Lt@158..159 "<" - TypeGenericArg@159..160 - PathType@159..160 - Path@159..160 - PathSegment@159..160 - Ident@159..160 "T" - Gt@160..161 ">" - TypeBoundList@161..168 - Colon@161..162 ":" - WhiteSpace@162..163 " " - TypeBound@163..168 - Path@163..168 - PathSegment@163..168 - Ident@163..168 "Clone" - Newline@168..169 "\n" + WherePredicate@155..169 + PathType@155..161 + Path@155..158 + PathSegment@155..158 + Ident@155..158 "Foo" + GenericArgList@158..161 + Lt@158..159 "<" + TypeGenericArg@159..160 + PathType@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "T" + Gt@160..161 ">" + TypeBoundList@161..168 + Colon@161..162 ":" + WhiteSpace@162..163 " " + TypeBound@163..168 + Path@163..168 + PathSegment@163..168 + Ident@163..168 "Clone" + Newline@168..169 "\n" ImplItemList@169..266 LBrace@169..170 "{" Newline@170..171 "\n" @@ -196,18 +197,19 @@ Root@0..266 WhereClause@216..230 WhereKw@216..221 "where" WhiteSpace@221..222 " " - PathType@222..223 - Path@222..223 - PathSegment@222..223 - Ident@222..223 "T" - TypeBoundList@223..229 - Colon@223..224 ":" - WhiteSpace@224..225 " " - TypeBound@225..229 - Path@225..229 - PathSegment@225..229 - Ident@225..229 "Copy" - Newline@229..230 "\n" + WherePredicate@222..230 + PathType@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "T" + TypeBoundList@223..229 + Colon@223..224 ":" + WhiteSpace@224..225 " " + TypeBound@225..229 + Path@225..229 + PathSegment@225..229 + Ident@225..229 "Copy" + Newline@229..230 "\n" WhiteSpace@230..234 " " BlockExpr@234..264 LBrace@234..235 "{" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index 32444f2c1e..c651b72e5c 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -106,31 +106,33 @@ Root@0..317 WhereClause@93..121 WhereKw@93..98 "where" WhiteSpace@98..99 " " - PathType@99..100 - Path@99..100 - PathSegment@99..100 - Ident@99..100 "T" - TypeBoundList@100..107 - Colon@100..101 ":" - WhiteSpace@101..102 " " - TypeBound@102..107 - Path@102..107 - PathSegment@102..107 - Ident@102..107 "Clone" - Newline@107..108 "\n" + WherePredicate@99..108 + PathType@99..100 + Path@99..100 + PathSegment@99..100 + Ident@99..100 "T" + TypeBoundList@100..107 + Colon@100..101 ":" + WhiteSpace@101..102 " " + TypeBound@102..107 + Path@102..107 + PathSegment@102..107 + Ident@102..107 "Clone" + Newline@107..108 "\n" WhiteSpace@108..114 " " - PathType@114..115 - Path@114..115 - PathSegment@114..115 - Ident@114..115 "U" - TypeBoundList@115..120 - Colon@115..116 ":" - WhiteSpace@116..117 " " - TypeBound@117..120 - Path@117..120 - PathSegment@117..120 - Ident@117..120 "Bar" - Newline@120..121 "\n" + WherePredicate@114..121 + PathType@114..115 + Path@114..115 + PathSegment@114..115 + Ident@114..115 "U" + TypeBoundList@115..120 + Colon@115..116 ":" + WhiteSpace@116..117 " " + TypeBound@117..120 + Path@117..120 + PathSegment@117..120 + Ident@117..120 "Bar" + Newline@120..121 "\n" ImplTraitItemList@121..196 LBrace@121..122 "{" Newline@122..123 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index a38a042c1e..ac9c0d8559 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -226,18 +226,19 @@ Root@0..588 WhereClause@255..270 WhereKw@255..260 "where" WhiteSpace@260..261 " " - PathType@261..264 - Path@261..264 - PathSegment@261..264 - Ident@261..264 "RHS" - TypeBoundList@264..269 - Colon@264..265 ":" - WhiteSpace@265..266 " " - TypeBound@266..269 - Path@266..269 - PathSegment@266..269 - Ident@266..269 "Sub" - Newline@269..270 "\n" + WherePredicate@261..270 + PathType@261..264 + Path@261..264 + PathSegment@261..264 + Ident@261..264 "RHS" + TypeBoundList@264..269 + Colon@264..265 ":" + WhiteSpace@265..266 " " + TypeBound@266..269 + Path@266..269 + PathSegment@266..269 + Ident@266..269 "Sub" + Newline@269..270 "\n" RBrace@270..271 "}" Newline@271..274 "\n\n\n" Trait@274..355 @@ -326,18 +327,19 @@ Root@0..588 WhereClause@390..405 WhereKw@390..395 "where" WhiteSpace@395..396 " " - PathType@396..397 - Path@396..397 - PathSegment@396..397 - Ident@396..397 "S" - TypeBoundList@397..404 - Colon@397..398 ":" - WhiteSpace@398..399 " " - TypeBound@399..404 - Path@399..404 - PathSegment@399..404 - Ident@399..404 "Clone" - Newline@404..405 "\n" + WherePredicate@396..405 + PathType@396..397 + Path@396..397 + PathSegment@396..397 + Ident@396..397 "S" + TypeBoundList@397..404 + Colon@397..398 ":" + WhiteSpace@398..399 " " + TypeBound@399..404 + Path@399..404 + PathSegment@399..404 + Ident@399..404 "Clone" + Newline@404..405 "\n" ImplItemList@405..588 LBrace@405..406 "{" Newline@406..407 "\n" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe index 26bdeba066..c808d82bf3 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.fe +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -29,6 +29,10 @@ pub struct StructWithGenericParam3< z: U } -pub struct MyArr { +pub struct MyArr + where + (T, U): Trait + Trait +{ __inner: [T; N] + __inner2: (T, U) } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 18a611032b..f414ac2ebe 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/structs/generics.fe --- -Root@0..480 - ItemList@0..480 +Root@0..553 + ItemList@0..553 Struct@0..74 ItemModifier@0..3 PubKw@0..3 "pub" @@ -208,81 +208,84 @@ Root@0..480 WhereKw@283..288 "where" Newline@288..289 "\n" WhiteSpace@289..293 " " - PathType@293..294 - Path@293..294 - PathSegment@293..294 - Ident@293..294 "T" - TypeBoundList@294..311 - Colon@294..295 ":" - WhiteSpace@295..296 " " - TypeBound@296..302 - Path@296..302 - PathSegment@296..302 - Ident@296..302 "Trait1" - WhiteSpace@302..303 " " - Plus@303..304 "+" - WhiteSpace@304..305 " " - TypeBound@305..311 - Path@305..311 - PathSegment@305..311 - Ident@305..311 "Trait2" - Newline@311..312 "\n" + WherePredicate@293..312 + PathType@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "T" + TypeBoundList@294..311 + Colon@294..295 ":" + WhiteSpace@295..296 " " + TypeBound@296..302 + Path@296..302 + PathSegment@296..302 + Ident@296..302 "Trait1" + WhiteSpace@302..303 " " + Plus@303..304 "+" + WhiteSpace@304..305 " " + TypeBound@305..311 + Path@305..311 + PathSegment@305..311 + Ident@305..311 "Trait2" + Newline@311..312 "\n" WhiteSpace@312..316 " " - PathType@316..325 - Path@316..322 - PathSegment@316..322 - Ident@316..322 "Option" - GenericArgList@322..325 - Lt@322..323 "<" - TypeGenericArg@323..324 - PathType@323..324 - Path@323..324 - PathSegment@323..324 - Ident@323..324 "T" - Gt@324..325 ">" - TypeBoundList@325..342 - Colon@325..326 ":" - WhiteSpace@326..327 " " - TypeBound@327..333 - Path@327..333 - PathSegment@327..333 - Ident@327..333 "Trait1" - WhiteSpace@333..334 " " - Plus@334..335 "+" - WhiteSpace@335..336 " " - TypeBound@336..342 - Path@336..342 - PathSegment@336..342 - Ident@336..342 "Trait2" - Newline@342..343 "\n" + WherePredicate@316..343 + PathType@316..325 + Path@316..322 + PathSegment@316..322 + Ident@316..322 "Option" + GenericArgList@322..325 + Lt@322..323 "<" + TypeGenericArg@323..324 + PathType@323..324 + Path@323..324 + PathSegment@323..324 + Ident@323..324 "T" + Gt@324..325 ">" + TypeBoundList@325..342 + Colon@325..326 ":" + WhiteSpace@326..327 " " + TypeBound@327..333 + Path@327..333 + PathSegment@327..333 + Ident@327..333 "Trait1" + WhiteSpace@333..334 " " + Plus@334..335 "+" + WhiteSpace@335..336 " " + TypeBound@336..342 + Path@336..342 + PathSegment@336..342 + Ident@336..342 "Trait2" + Newline@342..343 "\n" WhiteSpace@343..347 " " - PathType@347..356 - Path@347..353 - PathSegment@347..353 - Ident@347..353 "Result" - GenericArgList@353..356 - Lt@353..354 "<" - TypeGenericArg@354..355 - PathType@354..355 - Path@354..355 - PathSegment@354..355 - Ident@354..355 "U" - Gt@355..356 ">" - TypeBoundList@356..373 - Colon@356..357 ":" - WhiteSpace@357..358 " " - TypeBound@358..364 - Path@358..364 - PathSegment@358..364 - Ident@358..364 "Trait2" - WhiteSpace@364..365 " " - Plus@365..366 "+" - WhiteSpace@366..367 " " - TypeBound@367..373 - Path@367..373 - PathSegment@367..373 - Ident@367..373 "Trait3" - Newline@373..374 "\n" + WherePredicate@347..374 + PathType@347..356 + Path@347..353 + PathSegment@347..353 + Ident@347..353 "Result" + GenericArgList@353..356 + Lt@353..354 "<" + TypeGenericArg@354..355 + PathType@354..355 + Path@354..355 + PathSegment@354..355 + Ident@354..355 "U" + Gt@355..356 ">" + TypeBoundList@356..373 + Colon@356..357 ":" + WhiteSpace@357..358 " " + TypeBound@358..364 + Path@358..364 + PathSegment@358..364 + Ident@358..364 "Trait2" + WhiteSpace@364..365 " " + Plus@365..366 "+" + WhiteSpace@366..367 " " + TypeBound@367..373 + Path@367..373 + PathSegment@367..373 + Ident@367..373 "Trait3" + Newline@373..374 "\n" RecordFieldDefList@374..404 LBrace@374..375 "{" Newline@375..376 "\n" @@ -318,14 +321,14 @@ Root@0..480 Newline@402..403 "\n" RBrace@403..404 "}" Newline@404..406 "\n\n" - Struct@406..480 + Struct@406..553 ItemModifier@406..409 PubKw@406..409 "pub" WhiteSpace@409..410 " " StructKw@410..416 "struct" WhiteSpace@416..417 " " Ident@417..422 "MyArr" - GenericParamList@422..456 + GenericParamList@422..459 Lt@422..423 "<" TypeGenericParam@423..439 Ident@423..424 "T" @@ -344,39 +347,112 @@ Root@0..480 Ident@436..439 "Add" Comma@439..440 "," WhiteSpace@440..441 " " - ConstGenericParam@441..455 - ConstKw@441..446 "const" - WhiteSpace@446..447 " " - Ident@447..448 "N" - Colon@448..449 ":" + TypeGenericParam@441..442 + Ident@441..442 "U" + Comma@442..443 "," + WhiteSpace@443..444 " " + ConstGenericParam@444..458 + ConstKw@444..449 "const" WhiteSpace@449..450 " " - PathType@450..455 - Path@450..455 - PathSegment@450..455 - Ident@450..455 "usize" - Gt@455..456 ">" - WhiteSpace@456..457 " " - RecordFieldDefList@457..480 - LBrace@457..458 "{" - Newline@458..459 "\n" - WhiteSpace@459..463 " " - RecordFieldDef@463..478 - Ident@463..470 "__inner" - Colon@470..471 ":" - WhiteSpace@471..472 " " - ArrayType@472..478 - LBracket@472..473 "[" - PathType@473..474 - Path@473..474 - PathSegment@473..474 - Ident@473..474 "T" - SemiColon@474..475 ";" - WhiteSpace@475..476 " " - PathExpr@476..477 - Path@476..477 - PathSegment@476..477 - Ident@476..477 "N" - RBracket@477..478 "]" - Newline@478..479 "\n" - RBrace@479..480 "}" + Ident@450..451 "N" + Colon@451..452 ":" + WhiteSpace@452..453 " " + PathType@453..458 + Path@453..458 + PathSegment@453..458 + Ident@453..458 "usize" + Gt@458..459 ">" + WhiteSpace@459..460 " " + Newline@460..461 "\n" + WhiteSpace@461..465 " " + WhereClause@465..509 + WhereKw@465..470 "where" + Newline@470..471 "\n" + WhiteSpace@471..479 " " + WherePredicate@479..509 + TupleType@479..485 + LParen@479..480 "(" + PathType@480..481 + Path@480..481 + PathSegment@480..481 + Ident@480..481 "T" + Comma@481..482 "," + WhiteSpace@482..483 " " + PathType@483..484 + Path@483..484 + PathSegment@483..484 + Ident@483..484 "U" + RParen@484..485 ")" + TypeBoundList@485..508 + Colon@485..486 ":" + WhiteSpace@486..487 " " + TypeBound@487..492 + Path@487..492 + PathSegment@487..492 + Ident@487..492 "Trait" + WhiteSpace@492..493 " " + Plus@493..494 "+" + WhiteSpace@494..495 " " + TypeBound@495..508 + Path@495..500 + PathSegment@495..500 + Ident@495..500 "Trait" + GenericArgList@500..508 + Lt@500..501 "<" + TypeGenericArg@501..504 + PathType@501..504 + Path@501..504 + PathSegment@501..504 + Ident@501..504 "i32" + Comma@504..505 "," + WhiteSpace@505..506 " " + TypeGenericArg@506..507 + PathType@506..507 + Path@506..507 + PathSegment@506..507 + Ident@506..507 "Y" + Gt@507..508 ">" + Newline@508..509 "\n" + RecordFieldDefList@509..553 + LBrace@509..510 "{" + Newline@510..511 "\n" + WhiteSpace@511..515 " " + RecordFieldDef@515..530 + Ident@515..522 "__inner" + Colon@522..523 ":" + WhiteSpace@523..524 " " + ArrayType@524..530 + LBracket@524..525 "[" + PathType@525..526 + Path@525..526 + PathSegment@525..526 + Ident@525..526 "T" + SemiColon@526..527 ";" + WhiteSpace@527..528 " " + PathExpr@528..529 + Path@528..529 + PathSegment@528..529 + Ident@528..529 "N" + RBracket@529..530 "]" + Newline@530..531 "\n" + WhiteSpace@531..535 " " + RecordFieldDef@535..551 + Ident@535..543 "__inner2" + Colon@543..544 ":" + WhiteSpace@544..545 " " + TupleType@545..551 + LParen@545..546 "(" + PathType@546..547 + Path@546..547 + PathSegment@546..547 + Ident@546..547 "T" + Comma@547..548 "," + WhiteSpace@548..549 " " + PathType@549..550 + Path@549..550 + PathSegment@549..550 + Ident@549..550 "U" + RParen@550..551 ")" + Newline@551..552 "\n" + RBrace@552..553 "}" From 4a0cda8f49298ef51759ed6accc19b12fa8dca12 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 10 Feb 2023 22:36:35 +0100 Subject: [PATCH 068/678] Add `derive_more` for `try_into` implementation --- Cargo.lock | 20 +++++- crates/parser2/Cargo.toml | 1 + crates/parser2/src/ast/attr.rs | 1 + crates/parser2/src/ast/expr.rs | 111 ++++++++------------------------ crates/parser2/src/ast/lit.rs | 2 +- crates/parser2/src/ast/mod.rs | 3 +- crates/parser2/src/ast/param.rs | 76 +++++++++++++++++++++- crates/parser2/src/ast/pat.rs | 59 ++++++----------- crates/parser2/src/ast/stmt.rs | 75 +++++++-------------- crates/parser2/src/ast/type_.rs | 50 ++++++-------- 10 files changed, 187 insertions(+), 211 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 95b698ea52..b1cae935b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -242,6 +242,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "countme" version = "3.0.1" @@ -390,8 +396,10 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version 0.4.0", "syn", ] @@ -765,6 +773,7 @@ dependencies = [ name = "fe-parser2" version = "0.20.0-alpha" dependencies = [ + "derive_more", "dir-test", "fe-compiler-test-utils", "fxhash", @@ -1637,7 +1646,7 @@ dependencies = [ "cfg-if 0.1.10", "proc-macro2", "quote", - "rustc_version", + "rustc_version 0.2.3", "syn", ] @@ -1662,6 +1671,15 @@ dependencies = [ "semver 0.9.0", ] +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.16", +] + [[package]] name = "rustversion" version = "1.0.11" diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 5a1955b25a..b308b10cbe 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -14,6 +14,7 @@ rowan = "0.15.10" logos = "0.12.1" fxhash = "0.2.1" lazy_static = "1.4.0" +derive_more = "0.99" [dev-dependencies] fe-compiler-test-utils = { path = "../test-utils" } diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index a01040ee50..ddcbe954c0 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -107,6 +107,7 @@ impl DocCommentAttr { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum AttrKind { /// A normal attribute. Normal(NormalAttr), diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 8cdd70f205..3395f2652b 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -334,7 +334,7 @@ impl ParenExpr { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum ExprKind { Block(BlockExpr), Bin(BinExpr), @@ -567,20 +567,23 @@ mod tests { use super::*; use crate::{ast::*, lexer::Lexer, parser::Parser}; - fn parse_expr(source: &str) -> Expr { + fn parse_expr(source: &str) -> T + where + T: TryFrom, + { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::expr::parse_expr(&mut parser); - Expr::cast(parser.finish().0).unwrap() + Expr::cast(parser.finish().0) + .unwrap() + .kind() + .try_into() + .unwrap() } #[test] fn bin_expr() { - let expr = parse_expr("1 + 2"); - let bin_expr = match expr.kind() { - ExprKind::Bin(bin_expr) => bin_expr, - _ => panic!("expected BinExpr"), - }; + let bin_expr: BinExpr = parse_expr("1 + 2"); assert!(matches!(bin_expr.lhs().unwrap().kind(), ExprKind::Lit(_))); assert!(matches!( bin_expr.op().unwrap(), @@ -588,11 +591,7 @@ mod tests { )); assert!(matches!(bin_expr.rhs().unwrap().kind(), ExprKind::Lit(_))); - let expr = parse_expr("1 <= 2"); - let bin_expr = match expr.kind() { - ExprKind::Bin(bin_expr) => bin_expr, - _ => panic!("expected BinExpr"), - }; + let bin_expr: BinExpr = parse_expr("1 <= 2"); assert!(matches!( bin_expr.op().unwrap(), BinOp::Comp(CompBinOp::LtEq(_)) @@ -601,22 +600,14 @@ mod tests { #[test] fn un_expr() { - let expr = parse_expr("-1"); - let un_expr = match expr.kind() { - ExprKind::Un(un_expr) => un_expr, - _ => panic!("expected UnExpr"), - }; + let un_expr: UnExpr = parse_expr("-1"); assert!(matches!(un_expr.op().unwrap(), UnOp::Minus(_))); assert!(matches!(un_expr.expr().unwrap().kind(), ExprKind::Lit(_))); } #[test] fn call_expr() { - let expr = parse_expr("foo(1, label: 2, 3 + 4)"); - let call_expr = match expr.kind() { - ExprKind::Call(call_expr) => call_expr, - _ => panic!("expected CallExpr"), - }; + let call_expr: CallExpr = parse_expr("foo(1, label: 2, 3 + 4)"); assert!(matches!( call_expr.callee().unwrap().kind(), @@ -653,12 +644,7 @@ mod tests { #[test] fn method_call_expr() { - let expr = parse_expr("foo.bar(1, label: 2, 3 + 4)"); - - let method_call_expr = match expr.kind() { - ExprKind::MethodCall(method_call_expr) => method_call_expr, - _ => panic!("expected MethodCallExpr"), - }; + let method_call_expr: MethodCallExpr = parse_expr("foo.bar(1, label: 2, 3 + 4)"); assert!(matches!( method_call_expr.receiver().unwrap().kind(), @@ -698,11 +684,7 @@ mod tests { #[test] fn record_init_expr() { - let expr = parse_expr("Foo { a: 1, b: 2, c: 3 }"); - let record_init_expr = match expr.kind() { - ExprKind::RecordInit(record_init_expr) => record_init_expr, - _ => panic!("expected RecordInitExpr"), - }; + let record_init_expr: RecordInitExpr = parse_expr("Foo { a: 1, b: 2, c: 3 }"); assert!(record_init_expr.path().is_some()); for (i, field) in record_init_expr.fields().unwrap().into_iter().enumerate() { @@ -726,11 +708,7 @@ mod tests { #[test] fn field_expr() { - let expr = parse_expr("foo(1, 2).bar"); - let field_expr = match expr.kind() { - ExprKind::Field(field_expr) => field_expr, - _ => panic!("expected FieldExpr"), - }; + let field_expr: FieldExpr = parse_expr("foo(1, 2).bar"); assert!(matches!( field_expr.receiver().unwrap().kind(), @@ -738,11 +716,7 @@ mod tests { )); assert_eq!(field_expr.field_name().unwrap().text(), "bar"); - let expr = parse_expr("(1, 2).1"); - let field_expr = match expr.kind() { - ExprKind::Field(field_expr) => field_expr, - _ => panic!("expected FieldExpr"), - }; + let field_expr: FieldExpr = parse_expr("(1, 2).1"); assert!(matches!( field_expr.receiver().unwrap().kind(), @@ -753,11 +727,7 @@ mod tests { #[test] fn tuple_expr() { - let expr = parse_expr("(1, 2, 3)"); - let tuple_expr = match expr.kind() { - ExprKind::Tuple(tuple_expr) => tuple_expr, - _ => panic!("expected TupleExpr"), - }; + let tuple_expr: TupleExpr = parse_expr("(1, 2, 3)"); for (i, expr) in tuple_expr.elems().into_iter().enumerate() { match i { @@ -771,11 +741,7 @@ mod tests { #[test] fn array_expr() { - let expr = parse_expr("[1, 2, 3]"); - let array_expr = match expr.kind() { - ExprKind::Array(array_expr) => array_expr, - _ => panic!("expected ArrayExpr"), - }; + let array_expr: ArrayExpr = parse_expr("[1, 2, 3]"); for (i, expr) in array_expr.elems().into_iter().enumerate() { match i { @@ -789,11 +755,7 @@ mod tests { #[test] fn index_expr() { - let expr = parse_expr("foo[1]"); - let index_expr = match expr.kind() { - ExprKind::Index(index_expr) => index_expr, - _ => panic!("expected IndexExpr"), - }; + let index_expr: IndexExpr = parse_expr("foo[1]"); assert!(matches!( index_expr.expr().unwrap().kind(), @@ -807,11 +769,7 @@ mod tests { #[test] fn array_rep_expr() { - let expr = parse_expr("[1; 2]"); - let array_rep_expr = match expr.kind() { - ExprKind::ArrayRep(array_rep_expr) => array_rep_expr, - _ => panic!("expected ArrayRepExpr"), - }; + let array_rep_expr: ArrayRepExpr = parse_expr("[1; 2]"); assert!(matches!( array_rep_expr.expr().unwrap().kind(), @@ -825,21 +783,13 @@ mod tests { #[test] fn if_expr() { - let expr = parse_expr("if true { 1 } else { 2 }"); - let if_expr = match expr.kind() { - ExprKind::If(if_expr) => if_expr, - _ => panic!("expected IfExpr"), - }; + let if_expr: IfExpr = parse_expr("if true { 1 } else { 2 }"); assert!(matches!(if_expr.cond().unwrap().kind(), ExprKind::Lit(_))); assert!(if_expr.then().is_some()); assert_ne!(if_expr.then().unwrap(), if_expr.else_().unwrap(),); assert!(if_expr.else_if().is_none()); - let expr = parse_expr("if { true } { return } else { continue }"); - let if_expr = match expr.kind() { - ExprKind::If(if_expr) => if_expr, - _ => panic!("expected IfExpr"), - }; + let if_expr: IfExpr = parse_expr("if { true } { return } else { continue }"); if let ExprKind::Block(stmts) = if_expr.cond().unwrap().kind() { assert!(matches!( stmts.into_iter().next().unwrap().kind(), @@ -858,11 +808,7 @@ mod tests { ); assert!(if_expr.else_if().is_none()); - let expr = parse_expr("if false { return } else if true { continue }"); - let if_expr = match expr.kind() { - ExprKind::If(if_expr) => if_expr, - _ => panic!("expected IfExpr"), - }; + let if_expr: IfExpr = parse_expr("if false { return } else if true { continue }"); assert!(if_expr.else_().is_none()); assert!(if_expr.else_if().is_some()); } @@ -877,11 +823,8 @@ mod tests { } }"#; - let expr = parse_expr(source); - let match_expr = match expr.kind() { - ExprKind::Match(match_expr) => match_expr, - _ => panic!("expected MatchExpr"), - }; + let match_expr: MatchExpr = parse_expr(source); + assert!(matches!( match_expr.scrutinee().unwrap().kind(), ExprKind::Path(_) diff --git a/crates/parser2/src/ast/lit.rs b/crates/parser2/src/ast/lit.rs index 2bad548b37..86e7a7c8db 100644 --- a/crates/parser2/src/ast/lit.rs +++ b/crates/parser2/src/ast/lit.rs @@ -50,7 +50,7 @@ impl LitString { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum LitKind { Int(LitInt), Bool(LitBool), diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 1915b374de..f3a8782455 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -22,7 +22,8 @@ pub type AstChildren = rowan::ast::AstChildren; pub type SyntaxText = rowan::SyntaxText; pub mod prelude { - pub use super::{GenericArgsOwner, GenericParamsOwner}; + pub use super::AttrListOwner; + pub use super::{GenericArgsOwner, GenericParamsOwner, WhereClauseOwner}; } macro_rules! ast_node { diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 054cc2edd4..991716b550 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -38,7 +38,7 @@ impl GenericParam { /// A generic parameter kind. /// `Type` is either `T` or `T: Trait`. /// `Const` is `const N: usize`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum GenericParamKind { Type(TypeGenericParam), Const(ConstGenericParam), @@ -161,10 +161,34 @@ impl ConstGenericArg { } } +ast_node! { + /// `where T: Trait` + pub struct WhereClause, + SK::WhereClause, + IntoIterator, +} + +ast_node! { + /// `T: Trait` + pub struct WherePredicate, + SK::WherePredicate, +} +impl WherePredicate { + /// Returns `T` in `T: Trait`. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns `Trait` in `T: Trait`. + pub fn bounds(&self) -> Option { + support::child(self.syntax()) + } +} + /// A generic argument kind. /// `Type` is either `Type` or `T: Trait`. /// `Const` is either `{expr}` or `lit`. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum GenericArgKind { Type(TypeGenericArg), Const(ConstGenericArg), @@ -213,13 +237,22 @@ pub trait GenericArgsOwner: AstNode { } } +/// A trait for AST nodes that can have a where clause. +pub trait WhereClauseOwner: AstNode { + /// Returns the where clause of the node. + fn where_clause(&self) -> Option { + support::child(self.syntax()) + } +} + #[cfg(test)] mod tests { use super::*; use crate::{ + ast::TypeKind, lexer::Lexer, parser::{ - param::{GenericArgListScope, GenericParamListScope}, + param::{GenericArgListScope, GenericParamListScope, WhereClauseScope}, Parser, }, }; @@ -237,6 +270,13 @@ mod tests { GenericArgList::cast(parser.finish().0).unwrap() } + fn parse_where_clause(source: &str) -> WhereClause { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(WhereClauseScope::default(), None); + WhereClause::cast(parser.finish().0).unwrap() + } + #[test] fn generic_param() { let source = r#", U, const N: usize>"#; @@ -307,4 +347,34 @@ mod tests { }; assert!(a2.expr().is_some()); } + + #[test] + fn where_clause() { + let source = r#"where + T: Trait + Trait2 + *U: Trait3 + (T, U): Trait4 + Trait5 + "#; + let wc = parse_where_clause(source); + let mut count = 0; + for pred in wc { + match count { + 0 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Path(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 2); + } + 1 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Ptr(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 1); + } + 2 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Tuple(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 2); + } + _ => panic!("unexpected predicate"), + } + count += 1; + } + assert!(count == 3); + } } diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs index 538112177b..33b03fad02 100644 --- a/crates/parser2/src/ast/pat.rs +++ b/crates/parser2/src/ast/pat.rs @@ -156,7 +156,7 @@ impl OrPat { } /// A specific pattern kind. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum PatKind { WildCard(WildCardPat), Rest(RestPat), @@ -174,43 +174,41 @@ mod tests { use super::*; - fn parse_pat(source: &str) -> Pat { + fn parse_pat(source: &str) -> T + where + T: TryFrom, + { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::pat::parse_pat(&mut parser); - Pat::cast(parser.finish().0).unwrap() + Pat::cast(parser.finish().0) + .unwrap() + .kind() + .try_into() + .unwrap() } #[test] fn wildcard() { - let pat = parse_pat("_"); - assert!(matches!(pat.kind(), PatKind::WildCard(_))) + let _: WildCardPat = parse_pat("_"); } #[test] fn rest() { - let pat = parse_pat(".."); - assert!(matches!(pat.kind(), PatKind::Rest(_))); + let _: RestPat = parse_pat(".."); } #[test] fn lit() { - let lit_int = parse_pat("0x1"); - let lit_bool = parse_pat("true"); - let lit_str = parse_pat(r#""foo""#); - assert!(matches!(lit_int.kind(), PatKind::Lit(_))); - assert!(matches!(lit_bool.kind(), PatKind::Lit(_))); - assert!(matches!(lit_str.kind(), PatKind::Lit(_))); + let _: LitPat = parse_pat("0x1"); + let _: LitPat = parse_pat("true"); + let _: LitPat = parse_pat(r#""foo""#); } #[test] fn tuple() { let source = r#"(Foo::Bar, true, ..)"#; - let pat = parse_pat(source); - let tuple_pat = match pat.kind() { - PatKind::Tuple(tuple_pat) => tuple_pat, - _ => panic!("expected tuple pat"), - }; + let tuple_pat: TuplePat = parse_pat(source); for (i, pat) in tuple_pat.elems().unwrap().iter().enumerate() { match i { @@ -221,23 +219,14 @@ mod tests { } } - let pat = parse_pat("()"); - let tuple_pat = match pat.kind() { - PatKind::Tuple(tuple_pat) => tuple_pat, - _ => panic!("expected tuple pat"), - }; - + let tuple_pat: TuplePat = parse_pat("()"); assert!(tuple_pat.elems().unwrap().iter().next().is_none()); } #[test] fn path_tuple() { let source = r#"Self::Bar(1, Foo::Bar)"#; - let pat = parse_pat(source); - let path_tuple_pat = match pat.kind() { - PatKind::PathTuple(path_tuple_pat) => path_tuple_pat, - _ => panic!("expected path tuple pat"), - }; + let path_tuple_pat: PathTuplePat = parse_pat(source); for (i, seg) in path_tuple_pat.path().unwrap().segments().enumerate() { match i { @@ -259,11 +248,7 @@ mod tests { #[test] fn record() { let source = r#"Foo::Bar{a: 1, b: Foo::baz, c}"#; - let pat = parse_pat(source); - let record_pat = match pat.kind() { - PatKind::Record(record_pat) => record_pat, - _ => panic!("expected record pat"), - }; + let record_pat: RecordPat = parse_pat(source); for (i, seg) in record_pat.path().unwrap().segments().enumerate() { match i { @@ -295,11 +280,7 @@ mod tests { #[test] fn or() { let source = r#"Foo::Int | Foo::Float | Foo::Str "#; - let pat = parse_pat(source); - let or_pat = match pat.kind() { - PatKind::Or(or_pat) => or_pat, - _ => panic!("expected or pat"), - }; + let or_pat: OrPat = parse_pat(source); assert!(matches!(or_pat.lhs().unwrap().kind(), PatKind::Path(_))); assert!(matches!(or_pat.rhs().unwrap().kind(), PatKind::Or(_))); diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index 81d8e665d8..6f9f8c0465 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -205,7 +205,7 @@ impl ExprStmt { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum StmtKind { Let(LetStmt), Assign(AssignStmt), @@ -229,20 +229,24 @@ mod tests { use super::*; - fn parse_stmt(source: &str) -> Stmt { + fn parse_stmt(source: &str) -> T + where + T: TryFrom, + { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::stmt::parse_stmt(&mut parser, None); - Stmt::cast(parser.finish().0).unwrap() + Stmt::cast(parser.finish().0) + .unwrap() + .kind() + .try_into() + .unwrap() } #[test] fn let_() { - let stmt = parse_stmt("let x: i32 = 1"); - let let_stmt = match stmt.kind() { - StmtKind::Let(n) => n, - _ => panic!("expected let statement"), - }; + let let_stmt: LetStmt = parse_stmt("let x: i32 = 1"); + assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); assert!(matches!( let_stmt.type_annotation().unwrap().kind(), @@ -250,11 +254,7 @@ mod tests { )); assert!(let_stmt.initializer().is_some()); - let stmt = parse_stmt("let x"); - let let_stmt = match stmt.kind() { - StmtKind::Let(n) => n, - _ => panic!("expected let statement"), - }; + let let_stmt: LetStmt = parse_stmt("let x"); assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); assert!(let_stmt.type_annotation().is_none()); assert!(let_stmt.initializer().is_none()); @@ -262,11 +262,7 @@ mod tests { #[test] fn assign() { - let stmt = parse_stmt(r#"Foo{x, y} = foo"#); - let assign_stmt = match stmt.kind() { - StmtKind::Assign(it) => it, - _ => panic!("expected assign statement"), - }; + let assign_stmt: AssignStmt = parse_stmt(r#"Foo{x, y} = foo"#); assert!(matches!( assign_stmt.pat().unwrap().kind(), PatKind::Record(_) @@ -276,12 +272,7 @@ mod tests { #[test] fn aug_assign() { - let stmt = parse_stmt("x += 1"); - let aug_assign_stmt = match stmt.kind() { - StmtKind::AugAssign(n) => n, - _ => panic!("expected aug assign statement"), - }; - + let aug_assign_stmt: AugAssignStmt = parse_stmt("x += 1"); assert!(matches!( aug_assign_stmt.pat().unwrap().kind(), PatKind::Path(_) @@ -291,11 +282,7 @@ mod tests { crate::ast::ArithBinOp::Add(_) )); - let stmt = parse_stmt("x <<= 1"); - let aug_assign_stmt = match stmt.kind() { - StmtKind::AugAssign(n) => n, - _ => panic!("expected aug assign statement"), - }; + let aug_assign_stmt: AugAssignStmt = parse_stmt("x <<= 1"); assert!(matches!( aug_assign_stmt.pat().unwrap().kind(), @@ -315,11 +302,7 @@ mod tests { } "#; - let stmt = parse_stmt(source); - let for_stmt = match stmt.kind() { - StmtKind::For(n) => n, - _ => panic!("expected for statement"), - }; + let for_stmt: ForStmt = parse_stmt(source); assert!(matches!(for_stmt.pat().unwrap().kind(), PatKind::Path(_))); assert!(for_stmt.iterable().is_some()); assert!(for_stmt.body().is_some()); @@ -333,11 +316,7 @@ mod tests { } "#; - let stmt = parse_stmt(source); - let while_stmt = match stmt.kind() { - StmtKind::While(n) => n, - _ => panic!("expected for statement"), - }; + let while_stmt: WhileStmt = parse_stmt(source); assert!(while_stmt.cond().is_some()); assert!(while_stmt.body().is_some()); assert_ne!(while_stmt.cond(), while_stmt.body()); @@ -345,18 +324,10 @@ mod tests { #[test] fn r#return() { - let stmt = parse_stmt("return x"); - let return_stmt = match stmt.kind() { - StmtKind::Return(n) => n, - _ => panic!("expected return statement"), - }; - assert!(return_stmt.expr().is_some()); - - let stmt = parse_stmt("return"); - let return_stmt = match stmt.kind() { - StmtKind::Return(n) => n, - _ => panic!("expected return statement"), - }; - assert!(return_stmt.expr().is_none()); + let ret_stmt: ReturnStmt = parse_stmt("return x"); + assert!(ret_stmt.expr().is_some()); + + let ret_stmt: ReturnStmt = parse_stmt("return"); + assert!(ret_stmt.expr().is_none()); } } diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs index dd0f38b301..e1888225b5 100644 --- a/crates/parser2/src/ast/type_.rs +++ b/crates/parser2/src/ast/type_.rs @@ -104,7 +104,7 @@ impl ArrayType { } /// A specific kind of type. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum TypeKind { Ptr(PtrType), Path(PathType), @@ -119,20 +119,23 @@ mod tests { use crate::ast::prelude::*; use crate::{lexer::Lexer, parser}; - fn parse_type(source: &str) -> Type { + fn parse_type(source: &str) -> T + where + T: TryFrom, + { let lexer = Lexer::new(source); let mut parser = parser::Parser::new(lexer); parser::type_::parse_type(&mut parser, None, true); - Type::cast(parser.finish().0).unwrap() + Type::cast(parser.finish().0) + .unwrap() + .kind() + .try_into() + .unwrap() } #[test] fn ptr_type() { - let ty = parse_type("*i32"); - let ptr_ty = match ty.kind() { - TypeKind::Ptr(ptr_ty) => ptr_ty, - _ => panic!(), - }; + let ptr_ty: PtrType = parse_type("*i32"); assert_eq!(ptr_ty.star().unwrap().text(), "*"); assert!(matches!(ptr_ty.inner().unwrap().kind(), TypeKind::Path(_))); @@ -140,13 +143,9 @@ mod tests { #[test] fn path_type() { - let ty = parse_type("Foo::Bar"); - let path_type = match ty.kind() { - TypeKind::Path(ptr_ty) => ptr_ty, - _ => panic!(), - }; + let path_ty: PathType = parse_type("Foo::Bar"); - for (i, segment) in path_type.path().unwrap().segments().enumerate() { + for (i, segment) in path_ty.path().unwrap().segments().enumerate() { match i { 0 => assert_eq!(segment.ident().unwrap().text(), "Foo"), 1 => assert_eq!(segment.ident().unwrap().text(), "Bar"), @@ -154,7 +153,7 @@ mod tests { } } - let generic_args = path_type.generic_args().unwrap(); + let generic_args = path_ty.generic_args().unwrap(); for (i, arg) in generic_args.iter().enumerate() { match i { 0 => assert!(matches!(arg.kind(), crate::ast::GenericArgKind::Type(_))), @@ -166,19 +165,14 @@ mod tests { #[test] fn self_type() { - let ty = parse_type("Self"); - assert!(matches!(ty.kind(), TypeKind::SelfType(_))); + let _: SelfType = parse_type("Self"); } #[test] fn tuple_type() { - let ty = parse_type("((i32, u32), foo::Bar, *usize"); - let tuple_type = match ty.kind() { - TypeKind::Tuple(tuple_type) => tuple_type, - _ => panic!(), - }; + let tuple_ty: TupleType = parse_type("((i32, u32), foo::Bar, *usize"); - for (i, ty) in tuple_type.elem_tys().enumerate() { + for (i, ty) in tuple_ty.elem_tys().enumerate() { match i { 0 => assert!(matches!(ty.kind(), TypeKind::Tuple(_))), 1 => assert!(matches!(ty.kind(), TypeKind::Path(_))), @@ -190,16 +184,12 @@ mod tests { #[test] fn array_type() { - let ty = parse_type("[(i32, u32); 1]"); - let array_type = match ty.kind() { - TypeKind::Array(array_type) => array_type, - _ => panic!(), - }; + let array_ty: ArrayType = parse_type("[(i32, u32); 1]"); assert!(matches!( - array_type.elem_ty().unwrap().kind(), + array_ty.elem_ty().unwrap().kind(), TypeKind::Tuple(_) )); - assert!(array_type.len().is_some()); + assert!(array_ty.len().is_some()); } } From c84d5f99abb307ca8d476121243f3fbfa67bdf96 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 10 Feb 2023 22:51:04 +0100 Subject: [PATCH 069/678] Rename `FnArg` to `FnParam` --- crates/parser2/src/parser/func.rs | 8 ++--- crates/parser2/src/parser/param.rs | 16 +++++----- crates/parser2/src/syntax_kind.rs | 4 +-- .../error_recovery/items/extern_.snap | 11 ++++--- .../test_files/error_recovery/items/func.snap | 12 +++---- .../test_files/syntax_node/items/extern.snap | 17 +++++----- .../test_files/syntax_node/items/func.snap | 28 ++++++++-------- .../test_files/syntax_node/items/impl.snap | 12 +++---- .../syntax_node/items/impl_trait.snap | 10 +++--- .../test_files/syntax_node/items/trait.snap | 32 +++++++++---------- 10 files changed, 76 insertions(+), 74 deletions(-) diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index b2491d882c..916ab804c6 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -3,7 +3,7 @@ use crate::SyntaxKind; use super::{ define_scope, expr_atom::BlockExprScope, - param::{parse_where_clause_opt, FnArgListScope, GenericParamListScope}, + param::{parse_where_clause_opt, FnParamListScope, GenericParamListScope}, token_stream::TokenStream, type_::parse_type, Parser, @@ -65,7 +65,7 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { parser.with_next_expected_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); + parser.parse(FnParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } @@ -114,7 +114,7 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { parser.with_recovery_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); + parser.parse(FnParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } @@ -152,7 +152,7 @@ fn parse_extern_fn_def_impl(parser: &mut Parser) { parser.with_recovery_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnArgListScope::default(), None); + parser.parse(FnParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 7b742c7340..a46fb84ebd 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -11,11 +11,11 @@ use super::{ }; define_scope! { - pub(crate) FnArgListScope, - FnArgList, + pub(crate) FnParamListScope, + FnParamList, Override(RParen, Comma) } -impl super::Parse for FnArgListScope { +impl super::Parse for FnParamListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { @@ -23,12 +23,12 @@ impl super::Parse for FnArgListScope { } parser.with_next_expected_tokens( - |parser| parser.parse(FnArgScope::default(), None), + |parser| parser.parse(FnParamScope::default(), None), &[SyntaxKind::Comma, SyntaxKind::RParen], ); while parser.bump_if(SyntaxKind::Comma) { parser.with_next_expected_tokens( - |parser| parser.parse(FnArgScope::default(), None), + |parser| parser.parse(FnParamScope::default(), None), &[SyntaxKind::Comma, SyntaxKind::RParen], ); } @@ -38,11 +38,11 @@ impl super::Parse for FnArgListScope { } define_scope! { - FnArgScope, - FnArg, + FnParamScope, + FnParam, Inheritance } -impl super::Parse for FnArgScope { +impl super::Parse for FnParamScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_if(SyntaxKind::MutKw); diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 4c9521d920..ea489c4e01 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -427,10 +427,10 @@ pub enum SyntaxKind { GenericParamList, /// `(x: i32, _ y: mut i32)` - FnArgList, + FnParamList, /// `_ x: mut i32` - FnArg, + FnParam, /// `foo::Trait1 + Trait2` TypeBoundList, diff --git a/crates/parser2/test_files/error_recovery/items/extern_.snap b/crates/parser2/test_files/error_recovery/items/extern_.snap index 8c017a1e0f..373eb022c6 100644 --- a/crates/parser2/test_files/error_recovery/items/extern_.snap +++ b/crates/parser2/test_files/error_recovery/items/extern_.snap @@ -1,6 +1,7 @@ --- -source: crates/parser2/tests/errro_recovery.rs -expression: snapshot +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/extern_.fe --- Root@0..90 ItemList@0..90 @@ -20,9 +21,9 @@ Root@0..90 FnKw@24..26 "fn" WhiteSpace@26..27 " " Ident@27..30 "Foo" - FnArgList@30..41 + FnParamList@30..41 LParen@30..31 "(" - FnArg@31..40 + FnParam@31..40 Ident@31..32 "x" Colon@32..33 ":" WhiteSpace@33..34 " " @@ -54,7 +55,7 @@ Root@0..90 FnKw@80..82 "fn" WhiteSpace@82..83 " " Ident@83..86 "foo" - FnArgList@86..88 + FnParamList@86..88 LParen@86..87 "(" RParen@87..88 ")" Newline@88..89 "\n" diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 33a0d497b6..7bebf11c78 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -23,9 +23,9 @@ Root@0..133 Gt@15..16 ">" Error@16..17 Gt@16..17 ">" - FnArgList@17..47 + FnParamList@17..47 LParen@17..18 "(" - FnArg@18..24 + FnParam@18..24 Ident@18..19 "x" Colon@19..20 ":" WhiteSpace@20..21 " " @@ -35,7 +35,7 @@ Root@0..133 Ident@21..24 "i32" Comma@24..25 "," WhiteSpace@25..26 " " - FnArg@26..38 + FnParam@26..38 Underscore@26..27 "_" WhiteSpace@27..28 " " Error@28..33 @@ -50,7 +50,7 @@ Root@0..133 Ident@35..38 "u32" Comma@38..39 "," WhiteSpace@39..40 " " - FnArg@40..46 + FnParam@40..46 Ident@40..41 "z" Colon@41..42 ":" WhiteSpace@42..43 " " @@ -110,9 +110,9 @@ Root@0..133 Gt@97..98 ">" Error@98..98 Error@98..98 - FnArgList@98..106 + FnParamList@98..106 LParen@98..99 "(" - FnArg@99..105 + FnParam@99..105 Ident@99..100 "x" Colon@100..101 ":" WhiteSpace@101..102 " " diff --git a/crates/parser2/test_files/syntax_node/items/extern.snap b/crates/parser2/test_files/syntax_node/items/extern.snap index 8c2df581f4..08b5f3a644 100644 --- a/crates/parser2/test_files/syntax_node/items/extern.snap +++ b/crates/parser2/test_files/syntax_node/items/extern.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/extern.fe --- Root@0..146 ItemList@0..146 @@ -28,9 +29,9 @@ Root@0..146 FnKw@37..39 "fn" WhiteSpace@39..40 " " Ident@40..45 "write" - FnArgList@45..68 + FnParamList@45..68 LParen@45..46 "(" - FnArg@46..55 + FnParam@46..55 Ident@46..49 "loc" Colon@49..50 ":" WhiteSpace@50..51 " " @@ -42,7 +43,7 @@ Root@0..146 Ident@52..55 "u32" Comma@55..56 "," WhiteSpace@56..57 " " - FnArg@57..67 + FnParam@57..67 Ident@57..62 "value" Colon@62..63 ":" WhiteSpace@63..64 " " @@ -69,9 +70,9 @@ Root@0..146 FnKw@92..94 "fn" WhiteSpace@94..95 " " Ident@95..99 "read" - FnArgList@99..122 + FnParamList@99..122 LParen@99..100 "(" - FnArg@100..109 + FnParam@100..109 Ident@100..103 "loc" Colon@103..104 ":" WhiteSpace@104..105 " " @@ -83,7 +84,7 @@ Root@0..146 Ident@106..109 "u32" Comma@109..110 "," WhiteSpace@110..111 " " - FnArg@111..121 + FnParam@111..121 Ident@111..114 "len" Colon@114..115 ":" WhiteSpace@115..116 " " @@ -105,7 +106,7 @@ Root@0..146 FnKw@136..138 "fn" WhiteSpace@138..139 " " Ident@139..142 "foo" - FnArgList@142..144 + FnParamList@142..144 LParen@142..143 "(" RParen@143..144 ")" Newline@144..145 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index af95ed76ed..dca917002e 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -12,7 +12,7 @@ Root@0..361 FnKw@4..6 "fn" WhiteSpace@6..7 " " Ident@7..10 "foo" - FnArgList@10..12 + FnParamList@10..12 LParen@10..11 "(" RParen@11..12 ")" WhiteSpace@12..13 " " @@ -40,9 +40,9 @@ Root@0..361 FnKw@32..34 "fn" WhiteSpace@34..35 " " Ident@35..38 "bar" - FnArgList@38..63 + FnParamList@38..63 LParen@38..39 "(" - FnArg@39..47 + FnParam@39..47 Ident@39..42 "bar" Colon@42..43 ":" WhiteSpace@43..44 " " @@ -52,7 +52,7 @@ Root@0..361 Ident@44..47 "i32" Comma@47..48 "," WhiteSpace@48..49 " " - FnArg@49..62 + FnParam@49..62 MutKw@49..52 "mut" WhiteSpace@52..53 " " Ident@53..56 "baz" @@ -86,9 +86,9 @@ Root@0..361 FnKw@82..84 "fn" WhiteSpace@84..85 " " Ident@85..88 "baz" - FnArgList@88..161 + FnParamList@88..161 LParen@88..89 "(" - FnArg@89..109 + FnParam@89..109 Ident@89..93 "from" WhiteSpace@93..94 " " Ident@94..100 "sender" @@ -100,7 +100,7 @@ Root@0..361 Ident@102..109 "address" Comma@109..110 "," WhiteSpace@110..111 " " - FnArg@111..136 + FnParam@111..136 MutKw@111..114 "mut" WhiteSpace@114..115 " " Ident@115..117 "to" @@ -114,7 +114,7 @@ Root@0..361 Ident@129..136 "address" Comma@136..137 "," WhiteSpace@137..138 " " - FnArg@138..149 + FnParam@138..149 Underscore@138..139 "_" WhiteSpace@139..140 " " Ident@140..143 "val" @@ -126,7 +126,7 @@ Root@0..361 Ident@145..149 "u256" Comma@149..150 "," WhiteSpace@150..151 " " - FnArg@151..160 + FnParam@151..160 Underscore@151..152 "_" WhiteSpace@152..153 " " Underscore@153..154 "_" @@ -176,9 +176,9 @@ Root@0..361 TypeGenericParam@203..204 Ident@203..204 "U" Gt@204..205 ">" - FnArgList@205..225 + FnParamList@205..225 LParen@205..206 "(" - FnArg@206..210 + FnParam@206..210 Ident@206..207 "t" Colon@207..208 ":" WhiteSpace@208..209 " " @@ -188,7 +188,7 @@ Root@0..361 Ident@209..210 "T" Comma@210..211 "," WhiteSpace@211..212 " " - FnArg@212..224 + FnParam@212..224 Ident@212..213 "u" Colon@213..214 ":" WhiteSpace@214..215 " " @@ -288,9 +288,9 @@ Root@0..361 TypeGenericParam@319..320 Ident@319..320 "U" Gt@320..321 ">" - FnArgList@321..340 + FnParamList@321..340 LParen@321..322 "(" - FnArg@322..339 + FnParam@322..339 Ident@322..323 "t" Colon@323..324 ":" WhiteSpace@324..325 " " diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 122d8a4d75..748be7f5a7 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -42,13 +42,13 @@ Root@0..266 FnKw@32..34 "fn" WhiteSpace@34..35 " " Ident@35..38 "add" - FnArgList@38..55 + FnParamList@38..55 LParen@38..39 "(" - FnArg@39..43 + FnParam@39..43 SelfKw@39..43 "self" Comma@43..44 "," WhiteSpace@44..45 " " - FnArg@45..54 + FnParam@45..54 Ident@45..48 "rhs" Colon@48..49 ":" WhiteSpace@49..50 " " @@ -176,13 +176,13 @@ Root@0..266 Ident@189..190 "T" Gt@190..191 ">" Gt@191..192 ">" - FnArgList@192..206 + FnParamList@192..206 LParen@192..193 "(" - FnArg@193..197 + FnParam@193..197 SelfKw@193..197 "self" Comma@197..198 "," WhiteSpace@198..199 " " - FnArg@199..205 + FnParam@199..205 Ident@199..202 "rhs" Colon@202..203 ":" WhiteSpace@203..204 " " diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index c651b72e5c..48686870b8 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -44,7 +44,7 @@ Root@0..317 FnKw@29..31 "fn" WhiteSpace@31..32 " " Ident@32..35 "foo" - FnArgList@35..37 + FnParamList@35..37 LParen@35..36 "(" RParen@36..37 ")" WhiteSpace@37..38 " " @@ -161,9 +161,9 @@ Root@0..317 Ident@148..149 "U" Gt@149..150 ">" Gt@150..151 ">" - FnArgList@151..157 + FnParamList@151..157 LParen@151..152 "(" - FnArg@152..156 + FnParam@152..156 Ident@152..153 "t" Colon@153..154 ":" WhiteSpace@154..155 " " @@ -293,9 +293,9 @@ Root@0..317 Ident@269..270 "U" Gt@270..271 ">" Gt@271..272 ">" - FnArgList@272..278 + FnParamList@272..278 LParen@272..273 "(" - FnArg@273..277 + FnParam@273..277 Ident@273..274 "t" Colon@274..275 ":" WhiteSpace@275..276 " " diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index ac9c0d8559..2bfa70cf7c 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -54,9 +54,9 @@ Root@0..588 PathSegment@63..66 Ident@63..66 "i32" Gt@66..67 ">" - FnArgList@67..79 + FnParamList@67..79 LParen@67..68 "(" - FnArg@68..72 + FnParam@68..72 Ident@68..69 "t" Colon@69..70 ":" WhiteSpace@70..71 " " @@ -66,7 +66,7 @@ Root@0..588 Ident@71..72 "T" Comma@72..73 "," WhiteSpace@73..74 " " - FnArg@74..78 + FnParam@74..78 Ident@74..75 "u" Colon@75..76 ":" WhiteSpace@76..77 " " @@ -100,9 +100,9 @@ Root@0..588 PathSegment@112..115 Ident@112..115 "Sub" Gt@115..116 ">" - FnArgList@116..132 + FnParamList@116..132 LParen@116..117 "(" - FnArg@117..123 + FnParam@117..123 Ident@117..120 "lhs" Colon@120..121 ":" WhiteSpace@121..122 " " @@ -112,7 +112,7 @@ Root@0..588 Ident@122..123 "T" Comma@123..124 "," WhiteSpace@124..125 " " - FnArg@125..131 + FnParam@125..131 Ident@125..128 "rhs" Colon@128..129 ":" WhiteSpace@129..130 " " @@ -200,13 +200,13 @@ Root@0..588 FnKw@215..217 "fn" WhiteSpace@217..218 " " Ident@218..221 "add" - FnArgList@221..237 + FnParamList@221..237 LParen@221..222 "(" - FnArg@222..226 + FnParam@222..226 SelfKw@222..226 "self" Comma@226..227 "," WhiteSpace@227..228 " " - FnArg@228..236 + FnParam@228..236 Ident@228..231 "rhs" Colon@231..232 ":" WhiteSpace@232..233 " " @@ -269,15 +269,15 @@ Root@0..588 PathSegment@308..319 Ident@308..319 "TokenStream" Gt@319..320 ">" - FnArgList@320..353 + FnParamList@320..353 LParen@320..321 "(" - FnArg@321..329 + FnParam@321..329 MutKw@321..324 "mut" WhiteSpace@324..325 " " SelfKw@325..329 "self" Comma@329..330 "," WhiteSpace@330..331 " " - FnArg@331..352 + FnParam@331..352 MutKw@331..334 "mut" WhiteSpace@334..335 " " Ident@335..341 "parser" @@ -363,15 +363,15 @@ Root@0..588 PathSegment@427..432 Ident@427..432 "Parse" Gt@432..433 ">" - FnArgList@433..489 + FnParamList@433..489 LParen@433..434 "(" - FnArg@434..442 + FnParam@434..442 MutKw@434..437 "mut" WhiteSpace@437..438 " " SelfKw@438..442 "self" Comma@442..443 "," WhiteSpace@443..444 " " - FnArg@444..456 + FnParam@444..456 MutKw@444..447 "mut" WhiteSpace@447..448 " " Ident@448..453 "scope" @@ -383,7 +383,7 @@ Root@0..588 Ident@455..456 "T" Comma@456..457 "," WhiteSpace@457..458 " " - FnArg@458..488 + FnParam@458..488 Ident@458..468 "checkpoint" Colon@468..469 ":" WhiteSpace@469..470 " " From d6e03d622fbf42df0f8f873712f313ff750c2144 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 11 Feb 2023 20:19:36 +0100 Subject: [PATCH 070/678] Add ast for `UseTree` --- crates/parser2/src/ast/use_tree.rs | 104 ++++++++++++++++++ crates/parser2/src/parser/use_tree.rs | 49 +++++---- .../test_files/syntax_node/items/use.snap | 18 ++- 3 files changed, 145 insertions(+), 26 deletions(-) create mode 100644 crates/parser2/src/ast/use_tree.rs diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs new file mode 100644 index 0000000000..93a9ee228e --- /dev/null +++ b/crates/parser2/src/ast/use_tree.rs @@ -0,0 +1,104 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; + +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A use tree. + /// `Foo::Foo2::{Bar::*, Baz::{x, y}}` + pub struct UseTree, + SK::UseTree, +} +impl UseTree { + /// Returns the path of this use tree. + /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` + /// + /// NOTE: If the tree root is started with `{}`, then this method will + /// return `None`. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the children of this use tree. + /// + /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. + pub fn children(&self) -> Option { + support::child(self.syntax()) + } + + //// Returns the alias of this use tree. + /// `Bar` in `Foo as Bar;` + pub fn alias(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct UseTreeList, + SK::UseTreeList, + IntoIterator, +} + +ast_node! { + pub struct UsePath, + SK::UsePath, + IntoIterator, +} + +ast_node! { + pub struct UsePathSegment, + SK::UsePathSegment, +} +impl UsePathSegment { + pub fn kind(&self) -> Option { + match self.syntax().first_child_or_token() { + Some(node) => match node.kind() { + SK::SelfKw => Some(UsePathSegmentKind::SelfPath(node.into_token().unwrap())), + SK::Ident => Some(UsePathSegmentKind::Ident(node.into_token().unwrap())), + SK::Star => Some(UsePathSegmentKind::Wildcard(node.into_token().unwrap())), + _ => None, + }, + _ => None, + } + } + + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn self_token(&self) -> Option { + support::token(self.syntax(), SK::SelfKw) + } + + pub fn wildcard(&self) -> Option { + support::token(self.syntax(), SK::Star) + } +} + +ast_node! { + pub struct UseTreeAlias, + SK::UseTreeRename, +} +impl UseTreeAlias { + //// Returns `Some` if the alias is specified as an ident. + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns `Some` if the alias is specified as `_`. + pub fn underscore(&self) -> Option { + support::token(self.syntax(), SK::Underscore) + } +} + +/// A path segment in a use tree. +pub enum UsePathSegmentKind { + /// `self` + SelfPath(SyntaxToken), + /// `foo` + Ident(SyntaxToken), + /// `*` + /// This is only allowed in the last segment of a path. + Wildcard(SyntaxToken), +} diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index 6be45d11a7..b04f4f8c5d 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -1,3 +1,5 @@ +use std::{cell::Cell, rc::Rc}; + use crate::SyntaxKind; use super::{define_scope, token_stream::TokenStream, Parser}; @@ -10,36 +12,38 @@ define_scope! { impl super::Parse for UseTreeScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - match parser.current_kind() { - Some(SyntaxKind::LBrace) => { - parser.parse(UseTreeListScope::default(), None); - return; - } - Some(SyntaxKind::Star) => { - parser.bump(); - return; - } - _ => {} + if let Some(SyntaxKind::LBrace) = parser.current_kind() { + parser.parse(UseTreeListScope::default(), None); + return; } - parser.parse(UsePathScope::default(), None); + let use_path_scope = UsePathScope::default(); + parser.parse(use_path_scope.clone(), None); + let has_wildcard = use_path_scope.has_wildcard.get(); - if !parser.bump_if(SyntaxKind::Colon2) { + if parser.current_kind() == Some(SyntaxKind::AsKw) { + if has_wildcard { + parser.error_and_recover("cant use `as` with wildcard", None); + } if parser.current_kind() == Some(SyntaxKind::AsKw) { parser.parse(UseTreeRenameScope::default(), None); } return; } + if !parser.bump_if(SyntaxKind::Colon2) { + return; + } match parser.current_kind() { - Some(SyntaxKind::LBrace) => { - parser.parse(UseTreeListScope::default(), None); - } - Some(SyntaxKind::Star) => { - parser.bump(); + Some(SyntaxKind::LBrace) if !has_wildcard => { + if has_wildcard { + parser.error_and_recover("can't use `*` with `{}`", None); + } else { + parser.parse(UseTreeListScope::default(), None); + } } _ => { - parser.error_and_recover("expected identifier or `self`", None); + parser.error_and_recover("expected identifier, `*` or `self`", None); } }; } @@ -74,7 +78,7 @@ impl super::Parse for UseTreeListScope { } define_scope! { - UsePathScope, + UsePathScope{ has_wildcard: Rc>}, UsePath, Inheritance(Colon2) } @@ -90,7 +94,12 @@ impl super::Parse for UsePathScope { }); if is_path_segment { parser.bump_expected(SyntaxKind::Colon2); + self.has_wildcard + .set(parser.current_kind() == Some(SyntaxKind::Star)); parser.parse(UsePathSegmentScope::default(), None); + if self.has_wildcard.get() { + break; + } } else { break; } @@ -136,5 +145,5 @@ impl super::Parse for UseTreeRenameScope { fn is_use_path_segment(kind: SyntaxKind) -> bool { use SyntaxKind::*; - matches!(kind, Ident | SelfKw) + matches!(kind, Ident | SelfKw | Star) } diff --git a/crates/parser2/test_files/syntax_node/items/use.snap b/crates/parser2/test_files/syntax_node/items/use.snap index 26517d5ba7..4d5ab8d839 100644 --- a/crates/parser2/test_files/syntax_node/items/use.snap +++ b/crates/parser2/test_files/syntax_node/items/use.snap @@ -1,6 +1,7 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/use.fe --- Root@0..278 ItemList@0..278 @@ -33,11 +34,12 @@ Root@0..278 UseKw@30..33 "use" WhiteSpace@33..34 " " UseTree@34..40 - UsePath@34..37 + UsePath@34..40 UsePathSegment@34..37 Ident@34..37 "Foo" - Colon2@37..39 "::" - Star@39..40 "*" + Colon2@37..39 "::" + UsePathSegment@39..40 + Star@39..40 "*" Newline@40..41 "\n" Use@41..61 UseKw@41..44 "use" @@ -189,7 +191,9 @@ Root@0..278 Comma@198..199 "," WhiteSpace@199..200 " " UseTree@200..201 - Star@200..201 "*" + UsePath@200..201 + UsePathSegment@200..201 + Star@200..201 "*" RBrace@201..202 "}" Newline@202..204 "\n\n" Use@204..272 @@ -253,5 +257,7 @@ Root@0..278 UseKw@273..276 "use" WhiteSpace@276..277 " " UseTree@277..278 - Star@277..278 "*" + UsePath@277..278 + UsePathSegment@277..278 + Star@277..278 "*" From 2b6c379d82d517ebfdde6286dd304a9f81c0ad02 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 11 Feb 2023 20:19:45 +0100 Subject: [PATCH 071/678] Add ast for items --- crates/parser2/src/ast/attr.rs | 6 +- crates/parser2/src/ast/expr.rs | 11 + crates/parser2/src/ast/item.rs | 704 ++++++++++++++++++++++++++++++++ crates/parser2/src/ast/mod.rs | 7 +- crates/parser2/src/ast/param.rs | 92 +++++ crates/parser2/src/ast/stmt.rs | 2 +- crates/parser2/src/ast/type_.rs | 3 +- 7 files changed, 817 insertions(+), 8 deletions(-) create mode 100644 crates/parser2/src/ast/item.rs diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index ddcbe954c0..31a55da931 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -102,7 +102,7 @@ ast_node! { } impl DocCommentAttr { /// Returns the underlying token of the doc comment, which includes `///`. - pub fn text(&self) -> Option { + pub fn doc(&self) -> Option { support::token(self.syntax(), SK::DocComment) } } @@ -150,8 +150,8 @@ mod tests { let attr_list = parse_attr_list(source); for (i, attr) in attr_list.doc_attrs().enumerate() { match i { - 0 => assert_eq!(attr.text().unwrap().text(), "/// Doc1"), - 1 => assert_eq!(attr.text().unwrap().text(), "/// Doc2"), + 0 => assert_eq!(attr.doc().unwrap().text(), "/// Doc1"), + 1 => assert_eq!(attr.doc().unwrap().text(), "/// Doc2"), _ => unreachable!(), } } diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 3395f2652b..42130652b2 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -581,6 +581,17 @@ mod tests { .unwrap() } + #[test] + fn block_expr() { + let source = r#"{ + let a = 1 + let b = a + 2 + return b + }"#; + let block_expr: BlockExpr = parse_expr(source); + assert_eq!(block_expr.stmts().count(), 3); + } + #[test] fn bin_expr() { let bin_expr: BinExpr = parse_expr("1 + 2"); diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs new file mode 100644 index 0000000000..63a3f23d89 --- /dev/null +++ b/crates/parser2/src/ast/item.rs @@ -0,0 +1,704 @@ +use super::ast_node; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; + +use rowan::ast::{support, AstNode}; + +ast_node! { + /// The top-level node of the AST tree. + pub struct Root, + SK::Root, +} +impl Root { + pub fn items(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A list of items in a module. + pub struct ItemList, + SK::ItemList, + IntoIterator +} + +ast_node! { + /// A single item in a module. + /// Use `[Item::kind]` to get the specific type of item. + pub struct Item, + SK::Fn + | SK::Struct + | SK::Contract + | SK::Enum + | SK::TypeAlias + | SK::Impl + | SK::Trait + | SK::ImplTrait + | SK::Const + | SK::Use + | SK::Extern, +} +impl Item { + pub fn kind(&self) -> ItemKind { + match self.syntax().kind() { + SK::Fn => ItemKind::Fn(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Struct => ItemKind::Struct(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Contract => ItemKind::Contract(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Enum => ItemKind::Enum(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TypeAlias => ItemKind::TypeAlias(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Impl => ItemKind::Impl(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Trait => ItemKind::Trait(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ImplTrait => ItemKind::ImplTrait(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Const => ItemKind::Const(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Use => ItemKind::Use(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Extern => ItemKind::Extern(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `pub fn foo(_ x: T, from u: U) -> T where T: Trait2 { ... }` + pub struct Fn, + SK::Fn, +} +impl super::GenericParamsOwner for Fn {} +impl super::WhereClauseOwner for Fn {} +impl super::AttrListOwner for Fn {} +impl super::ItemModifierOwner for Fn {} +impl Fn { + /// Returns the name of the function. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the function's parameter list. + pub fn params(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the function's return type. + pub fn ret_ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the function's body. + pub fn body(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Struct, + SK::Struct, +} +impl super::GenericParamsOwner for Struct {} +impl super::WhereClauseOwner for Struct {} +impl super::AttrListOwner for Struct {} +impl super::ItemModifierOwner for Struct {} +impl Struct { + /// Returns the name of the struct. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the struct's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Contract, + SK::Contract, +} +impl super::AttrListOwner for Contract {} +impl super::ItemModifierOwner for Contract {} +impl Contract { + /// Returns the name of the contract. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the contract's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Enum, + SK::Enum, +} +impl super::GenericParamsOwner for Enum {} +impl super::WhereClauseOwner for Enum {} +impl super::AttrListOwner for Enum {} +impl super::ItemModifierOwner for Enum {} +impl Enum { + /// Returns the name of the enum. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the enum's variant def list. + pub fn variants(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `type Foo = Bar` + pub struct TypeAlias, + SK::TypeAlias, +} +impl super::GenericParamsOwner for TypeAlias {} +impl super::WhereClauseOwner for TypeAlias {} +impl super::AttrListOwner for TypeAlias {} +impl super::ItemModifierOwner for TypeAlias {} +impl TypeAlias { + /// Returns the name of the type alias. + /// `Foo` in `type Foo = Bar` + pub fn alias(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type alias's type. + /// `Bar` in `type Foo = Bar` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `trait Foo<..> where .. { .. }` + pub struct Trait, + SK::Trait, +} +impl super::GenericParamsOwner for Trait {} +impl super::WhereClauseOwner for Trait {} +impl super::AttrListOwner for Trait {} +impl super::ItemModifierOwner for Trait {} +impl Trait { + /// Returns the name of the trait. + /// `Foo` in `trait Foo<..> where .. { .. }` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the trait's item list. + /// `{ .. }` in `trait Foo<..> where .. { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `impl Foo::Bar where .. { .. }` + pub struct Impl, + SK::Impl, +} +impl super::GenericParamsOwner for Impl {} +impl super::WhereClauseOwner for Impl {} +impl super::AttrListOwner for Impl {} +impl super::ItemModifierOwner for Impl {} +impl Impl { + /// Returns the type of the impl. + /// `Foo::Bar` in `impl Foo::Bar where .. { .. }` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the impl item list. + /// `{ .. }` in `impl Foo::Bar where .. { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `impl Foo for Bar { .. }` + pub struct ImplTrait, + SK::ImplTrait, +} +impl super::GenericParamsOwner for ImplTrait {} +impl super::WhereClauseOwner for ImplTrait {} +impl super::AttrListOwner for ImplTrait {} +impl super::ItemModifierOwner for ImplTrait {} +impl ImplTrait { + /// Returns the trait of the impl. + /// `Foo` in `impl Foo for Bar { .. }` + pub fn trait_(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the type of the impl. + /// `Bar` in `impl Foo for Bar { .. }` + pub fn ty(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + /// Returns the trait impl item list. + /// `{ .. }` in `impl Foo for Bar { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `const FOO: u32 = 42;` + pub struct Const, + SK::Const, +} +impl super::AttrListOwner for Const {} +impl Const { + /// Returns the name of the const. + /// `FOO` in `const FOO: u32 = 42;` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the const. + /// `u32` in `const FOO: u32 = 42;` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the value of the const. + /// `42` in `const FOO: u32 = 42;` + pub fn value(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `use foo::{bar, Baz::*}` + pub struct Use, + SK::Use, +} +impl super::AttrListOwner for Use {} +impl Use { + /// Returns the use tree. + /// `foo::{bar, Baz::*}` in `use foo::{bar, Baz::*}` + pub fn use_tree(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `extern { .. }` + pub struct Extern, + SK::Extern, +} +impl super::AttrListOwner for Extern {} +impl Extern { + /// Returns the item list. + /// NOTE: Currently only supports `fn` items. + pub fn extern_block(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct RecordFieldDefList, + SK::RecordFieldDefList, + IntoIterator +} +ast_node! { + pub struct RecordFieldDef, + SK::RecordFieldDef, +} +impl RecordFieldDef { + /// Returns the pub keyword if exists. + pub fn pub_kw(&self) -> Option { + support::token(self.syntax(), SK::PubKw) + } + + /// Returns the name of the field. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the field. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct EnumVariantDefList, + SK::VariantDefList, + IntoIterator +} + +ast_node! { + /// `Foo(i32, u32)` + pub struct EnumVariantDef, + SK::VariantDef, +} +impl EnumVariantDef { + /// Returns the name of the variant. + /// `Foo` in `Foo(i32, u32)` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the variant. + /// `(i32, u32)` in `Foo(i32, u32)` + /// Currently only tuple variants are supported. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct TraitItemList, + SK::TraitItemList, + IntoIterator, +} + +ast_node! { + pub struct ImplItemList, + SK::ImplItemList, + IntoIterator, +} + +ast_node! { + pub struct ImplTraitItemList, + SK::ImplTraitItemList, + IntoIterator, +} + +ast_node! { + pub struct ExternItemList, + SK::ExternItemList, + IntoIterator, +} + +ast_node! { + /// A modifier on an item. + /// `pub unsafe` + pub struct ItemModifier, + SK::ItemModifier, +} +impl ItemModifier { + pub fn pub_kw(&self) -> Option { + support::token(self.syntax(), SK::PubKw) + } + + pub fn unsafe_kw(&self) -> Option { + support::token(self.syntax(), SK::UnsafeKw) + } +} + +pub trait ItemModifierOwner: AstNode { + fn item_modifier(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum ItemKind { + Fn(Fn), + Struct(Struct), + Contract(Contract), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), + Const(Const), + Use(Use), + Extern(Extern), +} + +#[cfg(test)] +mod tests { + use crate::{ + ast::{prelude::*, ExprKind, TypeKind}, + lexer::Lexer, + parser::{ItemListScope, Parser}, + }; + + use super::*; + + fn parse_item(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + + parser.parse(ItemListScope::default(), None); + let item_list = ItemList::cast(parser.finish().0).unwrap(); + let mut items = item_list.into_iter().collect::>(); + assert_eq!(items.len(), 1); + items.pop().unwrap().kind().try_into().unwrap() + } + + #[test] + fn func() { + let source = r#" + /// This is doc comment + #evm + pub unsafe fn foo(_ x: T, from u: U) -> (T, U) where T: Trait2 { return } + "#; + let func: Fn = parse_item(source); + + assert_eq!(func.name().unwrap().text(), "foo"); + assert_eq!(func.attr_list().unwrap().iter().count(), 2); + assert_eq!(func.generic_params().unwrap().iter().count(), 2); + assert!(func.where_clause().is_some()); + assert!(func.body().is_some()); + assert!(matches!(func.ret_ty().unwrap().kind(), TypeKind::Tuple(_))); + let modifier = func.item_modifier().unwrap(); + assert!(modifier.pub_kw().is_some()); + assert!(modifier.unsafe_kw().is_some()); + } + + #[test] + fn r#struct() { + let source = r#" + pub struct Foo where T: Trait2 { + pub x: T, + y: (U, i32), + } + "#; + let s: Struct = parse_item(source); + assert_eq!(s.name().unwrap().text(), "Foo"); + let mut count = 0; + for field in s.fields().unwrap() { + match count { + 0 => { + assert!(field.pub_kw().is_some()); + assert_eq!(field.name().unwrap().text(), "x"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Path(_))); + } + 1 => { + assert!(field.pub_kw().is_none()); + assert_eq!(field.name().unwrap().text(), "y"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Tuple(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + fn contract() { + let source = r#" + pub contract Foo { + pub x: u32, + y: (i32, u32), + } + "#; + let c: Contract = parse_item(source); + assert_eq!(c.name().unwrap().text(), "Foo"); + let mut count = 0; + for field in c.fields().unwrap() { + match count { + 0 => { + assert!(field.pub_kw().is_some()); + assert_eq!(field.name().unwrap().text(), "x"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Path(_))); + } + 1 => { + assert!(field.pub_kw().is_none()); + assert_eq!(field.name().unwrap().text(), "y"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Tuple(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + fn r#enum() { + let source = r#" + pub enum Foo where T: Trait2 { + Bar + Baz(T, U) + } + "#; + let e: Enum = parse_item(source); + assert_eq!(e.name().unwrap().text(), "Foo"); + + let mut count = 0; + for variant in e.variants().unwrap() { + match count { + 0 => { + assert_eq!(variant.name().unwrap().text(), "Bar"); + assert!(variant.ty().is_none()); + } + 1 => { + assert_eq!(variant.name().unwrap().text(), "Baz"); + assert!(matches!(variant.ty().unwrap().kind(), TypeKind::Tuple(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + fn r#type() { + let source = r#" + type MyError where T: Debug = Error + "#; + let t: TypeAlias = parse_item(source); + assert_eq!(t.alias().unwrap().text(), "MyError"); + assert!(matches!(t.ty().unwrap().kind(), TypeKind::Path(_))); + } + + #[test] + fn r#impl() { + let source = r#" + impl Foo { + pub fn foo(self, t: T) -> T { return t } + pub fn bar(self) -> u32 { return 1 } + pub fn baz(mut self) { self.x = 1 } + } + "#; + let i: Impl = parse_item(source); + assert!(matches!(i.ty().unwrap().kind(), TypeKind::Path(_))); + assert_eq!(i.item_list().unwrap().iter().count(), 3); + } + + #[test] + fn r#trait() { + let source = r#" + pub trait Foo { + pub fn foo(self, t: T) -> T + pub fn default(self) -> u32 { return 1 } + } + "#; + let t: Trait = parse_item(source); + assert_eq!(t.name().unwrap().text(), "Foo"); + + let mut count = 0; + for f in t.item_list().unwrap() { + match count { + 0 => { + assert!(f.body().is_none()); + } + 1 => { + assert!(f.body().is_some()); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + fn impl_trait() { + let source = r#" + impl Trait::Foo for (i32) { + fn foo(self, _t: T) -> u32 { return 1 }; + }"#; + let i: ImplTrait = parse_item(source); + assert!(matches!(i.trait_().unwrap().kind(), TypeKind::Path(_))); + assert!(matches!(i.ty().unwrap().kind(), TypeKind::Tuple(_))); + assert!(i.item_list().unwrap().iter().count() == 1); + } + + #[test] + fn r#const() { + let source = r#" + pub const FOO: u32 = 1 + 1 + "#; + let c: Const = parse_item(source); + assert_eq!(c.name().unwrap().text(), "FOO"); + assert!(matches!(c.ty().unwrap().kind(), TypeKind::Path(_))); + assert!(matches!(c.value().unwrap().kind(), ExprKind::Bin(_))); + } + + #[test] + fn r#use() { + let source = r#" + use foo::bar::{bar::*, baz::{Baz, Baz2}} + "#; + let u: Use = parse_item(source); + let use_tree = u.use_tree().unwrap(); + let mut count = 0; + for segment in use_tree.path().unwrap() { + match count { + 0 => { + assert_eq!(segment.ident().unwrap().text(), "foo"); + } + 1 => { + assert_eq!(segment.ident().unwrap().text(), "bar"); + } + _ => unreachable!(), + } + count += 1; + } + + count = 0; + let children = use_tree.children().unwrap(); + for child in children { + match count { + 0 => { + let mut segments = child.path().unwrap().iter(); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "bar"); + assert!(segments.next().unwrap().wildcard().is_some()); + assert!(segments.next().is_none()); + assert!(child.children().is_none()); + } + 1 => { + let mut segments = child.path().unwrap().iter(); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "baz"); + assert!(child.children().unwrap().iter().count() == 2); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + + let source = r#" + use {foo as _foo, bar::Baz as _} + "#; + let u: Use = parse_item(source); + let use_tree = u.use_tree().unwrap(); + assert!(use_tree.path().is_none()); + let mut count = 0; + for child in use_tree.children().unwrap() { + match count { + 0 => { + let alias = child.alias().unwrap(); + assert_eq!(alias.ident().unwrap().text(), "_foo"); + } + 1 => { + let alias = child.alias().unwrap(); + assert!(alias.underscore().is_some()); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + fn r#extern() { + let source = r#" + extern { + pub unsafe fn foo() + pub unsafe fn bar() + }"#; + let e: Extern = parse_item(source); + + for f in e.extern_block().unwrap() { + assert!(f.body().is_none()); + } + assert_eq!(e.extern_block().unwrap().iter().count(), 2); + } +} diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index f3a8782455..2bd6905c4d 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -7,6 +7,7 @@ pub mod pat; pub mod path; pub mod stmt; pub mod type_; +pub mod use_tree; pub use attr::*; pub use expr::*; @@ -17,13 +18,15 @@ pub use pat::*; pub use path::*; pub use stmt::*; pub use type_::*; +pub use use_tree::*; pub type AstChildren = rowan::ast::AstChildren; pub type SyntaxText = rowan::SyntaxText; pub mod prelude { - pub use super::AttrListOwner; - pub use super::{GenericArgsOwner, GenericParamsOwner, WhereClauseOwner}; + pub use super::{ + AttrListOwner, GenericArgsOwner, GenericParamsOwner, ItemModifierOwner, WhereClauseOwner, + }; } macro_rules! ast_node { diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 991716b550..343fb4caf9 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -3,6 +3,63 @@ use rowan::ast::{support, AstNode}; use super::ast_node; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; +ast_node! { + /// A list of parameters. + /// `(self, a: u256, b: u256)` + pub struct FnParamList, + SK::FnParamList, + IntoIterator, +} + +ast_node! { + /// A single parameter. + /// `self` + /// `label a: u256` + pub struct FnParam, + SK::FnParam, +} +impl FnParam { + /// Returns the `mut` keyword if the parameter is mutable. + pub fn mut_token(&self) -> Option { + support::token(self.syntax(), SK::MutKw) + } + + /// Returns the `label` if the parameter is labeled. + /// `label` in `label a: u256`. + pub fn label(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(|child| match child { + rowan::NodeOrToken::Token(token) => FnParamLabel::from_token(token), + _ => None, + }) + } + + /// Returns the name of the parameter. + /// `a` in `label a: u256`. + pub fn name(&self) -> Option { + let mut param_names = self.syntax().children_with_tokens().filter_map(|child| { + if let rowan::NodeOrToken::Token(token) = child { + FnParamName::from_token(token) + } else { + None + } + }); + + let first = param_names.next(); + match param_names.next() { + Some(second) => Some(second), + None => first, + } + } + + /// Returns the type of the parameter. + /// `u256` in `a: u256`. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + ast_node! { /// A list of generic parameters. /// `` @@ -245,6 +302,41 @@ pub trait WhereClauseOwner: AstNode { } } +pub enum FnParamLabel { + /// `label` in `label a: u256` + Ident(SyntaxToken), + /// `_` in `_ a: u256`. + Underscore(SyntaxToken), +} +impl FnParamLabel { + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Ident => Some(FnParamLabel::Ident(token)), + SK::Underscore => Some(FnParamLabel::Underscore(token)), + _ => None, + } + } +} + +pub enum FnParamName { + /// `a` in `label a: u256` + Ident(SyntaxToken), + /// `self` parameter. + SelfParam(SyntaxToken), + /// `_` parameter. + Underscore(SyntaxToken), +} +impl FnParamName { + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Ident => Some(FnParamName::Ident(token)), + SK::SelfKw => Some(FnParamName::SelfParam(token)), + SK::Underscore => Some(FnParamName::Underscore(token)), + _ => None, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index 6f9f8c0465..771a850c30 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -90,7 +90,7 @@ impl AugAssignStmt { pub fn op(&self) -> Option { self.syntax() .children_with_tokens() - .find_map(|n| super::ArithBinOp::from_node_or_token(n)) + .find_map(super::ArithBinOp::from_node_or_token) } /// Returns the expression of the rhs of the assignment. diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs index e1888225b5..f0dc28be76 100644 --- a/crates/parser2/src/ast/type_.rs +++ b/crates/parser2/src/ast/type_.rs @@ -116,8 +116,7 @@ pub enum TypeKind { #[cfg(test)] mod tests { use super::*; - use crate::ast::prelude::*; - use crate::{lexer::Lexer, parser}; + use crate::{ast::prelude::*, lexer::Lexer, parser}; fn parse_type(source: &str) -> T where From bc3e4ed8c3b3587f6d66e74de8b6fa5d4aaa78f1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 26 Feb 2023 20:34:14 +0100 Subject: [PATCH 072/678] Add `wasm-bindgen-test` for ast tests --- crates/parser2/src/ast/attr.rs | 3 +++ crates/parser2/src/ast/expr.rs | 15 +++++++++++++++ crates/parser2/src/ast/item.rs | 29 +++++++++++++++++++++-------- crates/parser2/src/ast/param.rs | 6 ++++++ crates/parser2/src/ast/pat.rs | 9 +++++++++ crates/parser2/src/ast/path.rs | 4 ++++ crates/parser2/src/ast/stmt.rs | 14 +++++++++++--- crates/parser2/src/ast/type_.rs | 7 +++++++ 8 files changed, 76 insertions(+), 11 deletions(-) diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index 31a55da931..ed667dee2d 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -130,6 +130,8 @@ mod tests { parser::{attr::AttrListScope, Parser}, }; + use wasm_bindgen_test::wasm_bindgen_test; + use super::*; fn parse_attr_list(source: &str) -> AttrList { @@ -140,6 +142,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn attr_list() { let source = r#" #foo diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 42130652b2..fefe8eb9a2 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -567,6 +567,8 @@ mod tests { use super::*; use crate::{ast::*, lexer::Lexer, parser::Parser}; + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_expr(source: &str) -> T where T: TryFrom, @@ -582,6 +584,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn block_expr() { let source = r#"{ let a = 1 @@ -593,6 +596,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn bin_expr() { let bin_expr: BinExpr = parse_expr("1 + 2"); assert!(matches!(bin_expr.lhs().unwrap().kind(), ExprKind::Lit(_))); @@ -610,6 +614,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn un_expr() { let un_expr: UnExpr = parse_expr("-1"); assert!(matches!(un_expr.op().unwrap(), UnOp::Minus(_))); @@ -617,6 +622,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn call_expr() { let call_expr: CallExpr = parse_expr("foo(1, label: 2, 3 + 4)"); @@ -654,6 +660,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn method_call_expr() { let method_call_expr: MethodCallExpr = parse_expr("foo.bar(1, label: 2, 3 + 4)"); @@ -694,6 +701,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn record_init_expr() { let record_init_expr: RecordInitExpr = parse_expr("Foo { a: 1, b: 2, c: 3 }"); @@ -718,6 +726,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn field_expr() { let field_expr: FieldExpr = parse_expr("foo(1, 2).bar"); @@ -737,6 +746,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn tuple_expr() { let tuple_expr: TupleExpr = parse_expr("(1, 2, 3)"); @@ -751,6 +761,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn array_expr() { let array_expr: ArrayExpr = parse_expr("[1, 2, 3]"); @@ -765,6 +776,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn index_expr() { let index_expr: IndexExpr = parse_expr("foo[1]"); @@ -779,6 +791,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn array_rep_expr() { let array_rep_expr: ArrayRepExpr = parse_expr("[1; 2]"); @@ -793,6 +806,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn if_expr() { let if_expr: IfExpr = parse_expr("if true { 1 } else { 2 }"); assert!(matches!(if_expr.cond().unwrap().kind(), ExprKind::Lit(_))); @@ -825,6 +839,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn match_expr() { let source = r#" match foo { diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 63a3f23d89..8051575462 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -423,6 +423,8 @@ mod tests { use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_item(source: &str) -> T where T: TryFrom, @@ -438,6 +440,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn func() { let source = r#" /// This is doc comment @@ -458,7 +461,8 @@ mod tests { } #[test] - fn r#struct() { + #[wasm_bindgen_test] + fn struct_() { let source = r#" pub struct Foo where T: Trait2 { pub x: T, @@ -488,6 +492,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn contract() { let source = r#" pub contract Foo { @@ -518,7 +523,8 @@ mod tests { } #[test] - fn r#enum() { + #[wasm_bindgen_test] + fn enum_() { let source = r#" pub enum Foo where T: Trait2 { Bar @@ -547,7 +553,8 @@ mod tests { } #[test] - fn r#type() { + #[wasm_bindgen_test] + fn type_() { let source = r#" type MyError where T: Debug = Error "#; @@ -557,7 +564,8 @@ mod tests { } #[test] - fn r#impl() { + #[wasm_bindgen_test] + fn impl_() { let source = r#" impl Foo { pub fn foo(self, t: T) -> T { return t } @@ -571,7 +579,8 @@ mod tests { } #[test] - fn r#trait() { + #[wasm_bindgen_test] + fn trait_() { let source = r#" pub trait Foo { pub fn foo(self, t: T) -> T @@ -598,6 +607,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn impl_trait() { let source = r#" impl Trait::Foo for (i32) { @@ -610,7 +620,8 @@ mod tests { } #[test] - fn r#const() { + #[wasm_bindgen_test] + fn const_() { let source = r#" pub const FOO: u32 = 1 + 1 "#; @@ -621,7 +632,8 @@ mod tests { } #[test] - fn r#use() { + #[wasm_bindgen_test] + fn use_() { let source = r#" use foo::bar::{bar::*, baz::{Baz, Baz2}} "#; @@ -688,7 +700,8 @@ mod tests { } #[test] - fn r#extern() { + #[wasm_bindgen_test] + fn extern_() { let source = r#" extern { pub unsafe fn foo() diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 343fb4caf9..a47ae10980 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -348,6 +348,9 @@ mod tests { Parser, }, }; + + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_generic_params(source: &str) -> GenericParamList { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); @@ -370,6 +373,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn generic_param() { let source = r#", U, const N: usize>"#; let gp = parse_generic_params(source); @@ -424,6 +428,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn generic_arg() { let source = r#""#; let ga = parse_generic_arg(source); @@ -441,6 +446,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn where_clause() { let source = r#"where T: Trait + Trait2 diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs index 33b03fad02..ec6379970d 100644 --- a/crates/parser2/src/ast/pat.rs +++ b/crates/parser2/src/ast/pat.rs @@ -172,6 +172,8 @@ pub enum PatKind { mod tests { use crate::{lexer::Lexer, parser::Parser}; + use wasm_bindgen_test::wasm_bindgen_test; + use super::*; fn parse_pat(source: &str) -> T @@ -189,16 +191,19 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn wildcard() { let _: WildCardPat = parse_pat("_"); } #[test] + #[wasm_bindgen_test] fn rest() { let _: RestPat = parse_pat(".."); } #[test] + #[wasm_bindgen_test] fn lit() { let _: LitPat = parse_pat("0x1"); let _: LitPat = parse_pat("true"); @@ -206,6 +211,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn tuple() { let source = r#"(Foo::Bar, true, ..)"#; let tuple_pat: TuplePat = parse_pat(source); @@ -224,6 +230,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn path_tuple() { let source = r#"Self::Bar(1, Foo::Bar)"#; let path_tuple_pat: PathTuplePat = parse_pat(source); @@ -246,6 +253,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn record() { let source = r#"Foo::Bar{a: 1, b: Foo::baz, c}"#; let record_pat: RecordPat = parse_pat(source); @@ -278,6 +286,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn or() { let source = r#"Foo::Int | Foo::Float | Foo::Str "#; let or_pat: OrPat = parse_pat(source); diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs index baea1e3de0..47a59a7faa 100644 --- a/crates/parser2/src/ast/path.rs +++ b/crates/parser2/src/ast/path.rs @@ -47,6 +47,8 @@ mod tests { parser::{path::PathScope, Parser}, }; + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_path(source: &str) -> Path { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); @@ -55,6 +57,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn path_ast() { let source = r#"self::Foo"#; let path = parse_path(source); @@ -66,6 +69,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn path_ast2() { let source = r#"Self::Dep"#; let path = parse_path(source); diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index 771a850c30..ba1e276f17 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -229,6 +229,8 @@ mod tests { use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_stmt(source: &str) -> T where T: TryFrom, @@ -244,6 +246,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn let_() { let let_stmt: LetStmt = parse_stmt("let x: i32 = 1"); @@ -261,6 +264,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn assign() { let assign_stmt: AssignStmt = parse_stmt(r#"Foo{x, y} = foo"#); assert!(matches!( @@ -271,6 +275,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn aug_assign() { let aug_assign_stmt: AugAssignStmt = parse_stmt("x += 1"); assert!(matches!( @@ -295,7 +300,8 @@ mod tests { } #[test] - fn r#for() { + #[wasm_bindgen_test] + fn for_() { let source = r#" for x in foo { bar @@ -309,7 +315,8 @@ mod tests { } #[test] - fn r#while() { + #[wasm_bindgen_test] + fn while_() { let source = r#" while { x } { bar @@ -323,7 +330,8 @@ mod tests { } #[test] - fn r#return() { + #[wasm_bindgen_test] + fn return_() { let ret_stmt: ReturnStmt = parse_stmt("return x"); assert!(ret_stmt.expr().is_some()); diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/type_.rs index f0dc28be76..0f8dc7f09e 100644 --- a/crates/parser2/src/ast/type_.rs +++ b/crates/parser2/src/ast/type_.rs @@ -118,6 +118,8 @@ mod tests { use super::*; use crate::{ast::prelude::*, lexer::Lexer, parser}; + use wasm_bindgen_test::wasm_bindgen_test; + fn parse_type(source: &str) -> T where T: TryFrom, @@ -133,6 +135,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn ptr_type() { let ptr_ty: PtrType = parse_type("*i32"); @@ -141,6 +144,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn path_type() { let path_ty: PathType = parse_type("Foo::Bar"); @@ -163,11 +167,13 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn self_type() { let _: SelfType = parse_type("Self"); } #[test] + #[wasm_bindgen_test] fn tuple_type() { let tuple_ty: TupleType = parse_type("((i32, u32), foo::Bar, *usize"); @@ -182,6 +188,7 @@ mod tests { } #[test] + #[wasm_bindgen_test] fn array_type() { let array_ty: ArrayType = parse_type("[(i32, u32); 1]"); From 6b6fa7efa8e1572b77d44aa90a01054e53075820 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 27 Feb 2023 17:10:29 +0100 Subject: [PATCH 073/678] Initialize `fe-hir` --- Cargo.lock | 259 +++++++++++++++++++++++++++++++++++++++++- crates/hir/Cargo.toml | 15 +++ crates/hir/src/lib.rs | 0 3 files changed, 270 insertions(+), 4 deletions(-) create mode 100644 crates/hir/Cargo.toml create mode 100644 crates/hir/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index b1cae935b8..4901678210 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,17 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.8", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.20" @@ -11,6 +22,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "arc-swap" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" + [[package]] name = "arrayvec" version = "0.7.2" @@ -299,6 +316,20 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + [[package]] name = "crossbeam-channel" version = "0.5.6" @@ -333,6 +364,16 @@ dependencies = [ "scopeguard", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.14" @@ -390,6 +431,19 @@ dependencies = [ "syn", ] +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core 0.9.7", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -570,6 +624,16 @@ dependencies = [ "sha3", ] +[[package]] +name = "eyre" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +dependencies = [ + "indenter", + "once_cell", +] + [[package]] name = "fe" version = "0.20.0-alpha" @@ -608,7 +672,7 @@ dependencies = [ "insta", "num-bigint", "num-traits", - "parking_lot_core", + "parking_lot_core 0.8.0", "petgraph", "pretty_assertions", "rstest", @@ -721,6 +785,15 @@ dependencies = [ "vfs", ] +[[package]] +name = "fe-hir" +version = "0.20.0-alpha" +dependencies = [ + "fe-parser2", + "salsa-2022", + "tracing", +] + [[package]] name = "fe-library" version = "0.20.0-alpha" @@ -912,6 +985,18 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashlink" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa" +dependencies = [ + "hashbrown", +] [[package]] name = "heck" @@ -1291,7 +1376,17 @@ checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.0", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.7", ] [[package]] @@ -1304,11 +1399,24 @@ dependencies = [ "cloudabi", "instant", "libc", - "redox_syscall", + "redox_syscall 0.1.57", "smallvec", "winapi", ] +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "windows-sys", +] + [[package]] name = "petgraph" version = "0.6.2" @@ -1319,6 +1427,12 @@ dependencies = [ "indexmap", ] +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + [[package]] name = "plotters" version = "0.3.4" @@ -1568,6 +1682,15 @@ version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + [[package]] name = "regex" version = "1.7.1" @@ -1703,12 +1826,42 @@ dependencies = [ "lock_api", "log", "oorandom", - "parking_lot", + "parking_lot 0.11.1", "rustc-hash", "salsa-macros", "smallvec", ] +[[package]] +name = "salsa-2022" +version = "0.1.0" +source = "git+https://github.com/salsa-rs/salsa#ef7c0f12c8159e7025316e959c26f6278a576fa5" +dependencies = [ + "arc-swap", + "crossbeam", + "crossbeam-utils", + "dashmap", + "hashlink", + "indexmap", + "log", + "parking_lot 0.12.1", + "rustc-hash", + "salsa-2022-macros", + "smallvec", +] + +[[package]] +name = "salsa-2022-macros" +version = "0.1.0" +source = "git+https://github.com/salsa-rs/salsa#ef7c0f12c8159e7025316e959c26f6278a576fa5" +dependencies = [ + "eyre", + "heck 0.4.1", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "salsa-macros" version = "0.16.0" @@ -2023,6 +2176,38 @@ dependencies = [ "toml_datetime", ] +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", +] + [[package]] name = "triehash" version = "0.8.4" @@ -2253,6 +2438,72 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + [[package]] name = "wyz" version = "0.5.1" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml new file mode 100644 index 0000000000..f159765519 --- /dev/null +++ b/crates/hir/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "fe-hir" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR definition and lowering for Fe lang." + +[dependencies] +tracing = "0.1" +# We may need to fix this to a specific version, +# but I want to keep up with the latest version until the new Fe implemeentation is merged into the master. +salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +fe-parser2 = { path = "../parser2" } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs new file mode 100644 index 0000000000..e69de29bb2 From 406043b17d59575a321027759d7d2da26758c49a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 2 Mar 2023 12:24:05 +0100 Subject: [PATCH 074/678] Allow leading `mut` before `pat` --- crates/parser2/src/ast/pat.rs | 8 +++++++- crates/parser2/src/parser/pat.rs | 19 ++++++++++++------- crates/parser2/src/parser/stmt.rs | 1 - .../test_files/syntax_node/stmts/let.snap | 6 +++--- 4 files changed, 22 insertions(+), 12 deletions(-) diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs index ec6379970d..2676057a3c 100644 --- a/crates/parser2/src/ast/pat.rs +++ b/crates/parser2/src/ast/pat.rs @@ -33,6 +33,11 @@ impl Pat { _ => unreachable!(), } } + + /// Returns the `mut` keyword if the patter is mutable. + pub fn mut_token(&self) -> Option { + support::token(self.syntax(), SK::MutKw) + } } ast_node! { @@ -255,7 +260,7 @@ mod tests { #[test] #[wasm_bindgen_test] fn record() { - let source = r#"Foo::Bar{a: 1, b: Foo::baz, c}"#; + let source = r#"Foo::Bar{a: 1, b: Foo::baz, mut c}"#; let record_pat: RecordPat = parse_pat(source); for (i, seg) in record_pat.path().unwrap().segments().enumerate() { @@ -279,6 +284,7 @@ mod tests { 2 => { assert!(field.name().is_none()); assert!(matches!(field.pat().unwrap().kind(), PatKind::Path(_))); + assert!(field.pat().unwrap().mut_token().is_some()); } _ => panic!("unexpected record pat"), } diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs index fb6c572c0f..dc76f2a842 100644 --- a/crates/parser2/src/parser/pat.rs +++ b/crates/parser2/src/parser/pat.rs @@ -7,13 +7,18 @@ use super::{define_scope, path::PathScope, token_stream::TokenStream, Parser}; pub fn parse_pat(parser: &mut Parser) -> bool { use SyntaxKind::*; - let (success, checkpoint) = match parser.current_kind() { - Some(Underscore) => parser.parse(WildCardPatScope::default(), None), - Some(Dot2) => parser.parse(RestPatScope::default(), None), - Some(LParen) => parser.parse(TuplePatScope::default(), None), - Some(kind) if is_lit(kind) => parser.parse(LitPatScope::default(), None), - _ => parser.parse(PathPatScope::default(), None), - }; + parser.bump_trivias(); + let checkpoint = parser.checkpoint(); + parser.bump_if(SyntaxKind::MutKw); + + let success = match parser.current_kind() { + Some(Underscore) => parser.parse(WildCardPatScope::default(), Some(checkpoint)), + Some(Dot2) => parser.parse(RestPatScope::default(), Some(checkpoint)), + Some(LParen) => parser.parse(TuplePatScope::default(), Some(checkpoint)), + Some(kind) if is_lit(kind) => parser.parse(LitPatScope::default(), Some(checkpoint)), + _ => parser.parse(PathPatScope::default(), Some(checkpoint)), + } + .0; if parser.current_kind() == Some(SyntaxKind::Pipe) { parser.parse(OrPatScope::default(), Some(checkpoint)).0 && success diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 6e52215a60..d929a544a5 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -39,7 +39,6 @@ impl super::Parse for LetStmtScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LetKw); parser.set_newline_as_trivia(false); - parser.bump_if(SyntaxKind::MutKw); if !parse_pat(parser) { parser.error_and_recover("expected pattern", None); return; diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap index d0bc0fe2de..e6b68254e2 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -49,9 +49,9 @@ Root@0..231 LetStmt@32..50 LetKw@32..35 "let" WhiteSpace@35..36 " " - MutKw@36..39 "mut" - WhiteSpace@39..40 " " - PathPat@40..41 + PathPat@36..41 + MutKw@36..39 "mut" + WhiteSpace@39..40 " " Path@40..41 PathSegment@40..41 Ident@40..41 "x" From 60905204e01541824cc9360e953446a05f3a42f3 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:27:43 +0100 Subject: [PATCH 075/678] Remove `assert` statement from the language --- crates/parser2/src/ast/expr.rs | 1 + crates/parser2/src/ast/item.rs | 4 +- crates/parser2/src/ast/mod.rs | 6 +- crates/parser2/src/ast/stmt.rs | 44 ++----------- crates/parser2/src/ast/{type_.rs => types.rs} | 0 crates/parser2/src/ast/use_tree.rs | 2 +- crates/parser2/src/parser/expr_atom.rs | 1 - crates/parser2/src/parser/item.rs | 11 +++- crates/parser2/src/parser/stmt.rs | 61 ++++++++++++------- crates/parser2/src/syntax_kind.rs | 5 -- .../test_files/syntax_node/stmts/assert.fe | 2 - .../test_files/syntax_node/stmts/assert.snap | 41 ------------- .../test_files/syntax_node/stmts/let.snap | 10 +-- 13 files changed, 61 insertions(+), 127 deletions(-) rename crates/parser2/src/ast/{type_.rs => types.rs} (100%) delete mode 100644 crates/parser2/test_files/syntax_node/stmts/assert.fe delete mode 100644 crates/parser2/test_files/syntax_node/stmts/assert.snap diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index fefe8eb9a2..579c5c9242 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -403,6 +403,7 @@ pub enum BinOp { Comp(CompBinOp), Logical(LogicalBinOp), } + impl BinOp { pub(super) fn from_node_or_token( node_or_token: rowan::NodeOrToken, diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 8051575462..453851c78a 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -200,7 +200,6 @@ ast_node! { impl super::GenericParamsOwner for Impl {} impl super::WhereClauseOwner for Impl {} impl super::AttrListOwner for Impl {} -impl super::ItemModifierOwner for Impl {} impl Impl { /// Returns the type of the impl. /// `Foo::Bar` in `impl Foo::Bar where .. { .. }` @@ -224,7 +223,6 @@ ast_node! { impl super::GenericParamsOwner for ImplTrait {} impl super::WhereClauseOwner for ImplTrait {} impl super::AttrListOwner for ImplTrait {} -impl super::ItemModifierOwner for ImplTrait {} impl ImplTrait { /// Returns the trait of the impl. /// `Foo` in `impl Foo for Bar { .. }` @@ -660,7 +658,7 @@ mod tests { 0 => { let mut segments = child.path().unwrap().iter(); assert_eq!(segments.next().unwrap().ident().unwrap().text(), "bar"); - assert!(segments.next().unwrap().wildcard().is_some()); + assert!(segments.next().unwrap().glob().is_some()); assert!(segments.next().is_none()); assert!(child.children().is_none()); } diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 2bd6905c4d..53d2b06014 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -6,7 +6,7 @@ pub mod param; pub mod pat; pub mod path; pub mod stmt; -pub mod type_; +pub mod types; pub mod use_tree; pub use attr::*; @@ -17,16 +17,18 @@ pub use param::*; pub use pat::*; pub use path::*; pub use stmt::*; -pub use type_::*; +pub use types::*; pub use use_tree::*; pub type AstChildren = rowan::ast::AstChildren; pub type SyntaxText = rowan::SyntaxText; +pub type AstPtr = rowan::ast::AstPtr; pub mod prelude { pub use super::{ AttrListOwner, GenericArgsOwner, GenericParamsOwner, ItemModifierOwner, WhereClauseOwner, }; + pub use rowan::ast::AstNode; } macro_rules! ast_node { diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index ba1e276f17..57b359cb62 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -1,7 +1,7 @@ use rowan::ast::{support, AstNode}; use super::ast_node; -use crate::SyntaxKind as SK; +use crate::{SyntaxKind as SK, SyntaxToken}; ast_node! { /// A statement. @@ -14,7 +14,6 @@ ast_node! { | SK::WhileStmt | SK::ContinueStmt | SK::BreakStmt - | SK::AssertStmt | SK::ReturnStmt | SK::ExprStmt } @@ -29,7 +28,6 @@ impl Stmt { SK::WhileStmt => StmtKind::While(AstNode::cast(self.syntax().clone()).unwrap()), SK::ContinueStmt => StmtKind::Continue(AstNode::cast(self.syntax().clone()).unwrap()), SK::BreakStmt => StmtKind::Break(AstNode::cast(self.syntax().clone()).unwrap()), - SK::AssertStmt => StmtKind::Assert(AstNode::cast(self.syntax().clone()).unwrap()), SK::ReturnStmt => StmtKind::Return(AstNode::cast(self.syntax().clone()).unwrap()), SK::ExprStmt => StmtKind::Expr(AstNode::cast(self.syntax().clone()).unwrap()), _ => unreachable!(), @@ -82,9 +80,9 @@ ast_node! { SK::AugAssignStmt, } impl AugAssignStmt { - /// Returns the pattern of the lhs of the assignment. - pub fn pat(&self) -> Option { - support::child(self.syntax()) + /// Returns the identifier of the lhs of the aug assignment. + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) } pub fn op(&self) -> Option { @@ -158,29 +156,6 @@ ast_node! { SK::BreakStmt } -ast_node! { - /// `assert cond` or - /// `assert cond, message` - pub struct AssertStmt, - SK::AssertStmt -} -impl AssertStmt { - /// Returns the condition of the assert statement. - pub fn cond(&self) -> Option { - support::child(self.syntax()) - } - - /// Returns the message of the assert statement. - pub fn message(&self) -> Option { - let mut exprs = support::children(self.syntax()); - let first = exprs.next(); - match exprs.next() { - Some(expr) => Some(expr), - None => first, - } - } -} - ast_node! { /// `return` or /// `return expr` @@ -214,7 +189,6 @@ pub enum StmtKind { While(WhileStmt), Continue(ContinueStmt), Break(BreakStmt), - Assert(AssertStmt), Return(ReturnStmt), Expr(ExprStmt), } @@ -278,10 +252,7 @@ mod tests { #[wasm_bindgen_test] fn aug_assign() { let aug_assign_stmt: AugAssignStmt = parse_stmt("x += 1"); - assert!(matches!( - aug_assign_stmt.pat().unwrap().kind(), - PatKind::Path(_) - )); + assert!(matches!(aug_assign_stmt.ident().unwrap().text(), "x",)); assert!(matches!( aug_assign_stmt.op().unwrap(), crate::ast::ArithBinOp::Add(_) @@ -289,10 +260,7 @@ mod tests { let aug_assign_stmt: AugAssignStmt = parse_stmt("x <<= 1"); - assert!(matches!( - aug_assign_stmt.pat().unwrap().kind(), - PatKind::Path(_) - )); + assert!(matches!(aug_assign_stmt.ident().unwrap().text(), "x",)); assert!(matches!( aug_assign_stmt.op().unwrap(), crate::ast::ArithBinOp::LShift(_) diff --git a/crates/parser2/src/ast/type_.rs b/crates/parser2/src/ast/types.rs similarity index 100% rename from crates/parser2/src/ast/type_.rs rename to crates/parser2/src/ast/types.rs diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs index 93a9ee228e..b0a57cdbf7 100644 --- a/crates/parser2/src/ast/use_tree.rs +++ b/crates/parser2/src/ast/use_tree.rs @@ -71,7 +71,7 @@ impl UsePathSegment { support::token(self.syntax(), SK::SelfKw) } - pub fn wildcard(&self) -> Option { + pub fn glob(&self) -> Option { support::token(self.syntax(), SK::Star) } } diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 71f6346cff..5c30183b51 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -53,7 +53,6 @@ define_scope! { WhileKw, ContinueKw, BreakKw, - AssertKw, ReturnKw ) } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index fd4ef86216..33a1e806d9 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -8,7 +8,7 @@ use super::{ func::FnDefScope, param::{parse_where_clause_opt, GenericParamListScope}, struct_::RecordFieldDefListScope, - token_stream::TokenStream, + token_stream::{LexicalToken, TokenStream}, type_::{parse_type, TupleTypeScope}, use_tree::UseTreeScope, Parser, @@ -58,8 +58,13 @@ impl super::Parse for ItemListScope { if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { parser.error("expected `fn` after `unsafe` keyword"); - } else if modifier.is_pub() && parser.current_kind() == Some(ExternKw) { - parser.error("`pub` can't be used for `extern` block"); + } else if modifier.is_pub() && matches!(parser.current_kind(), Some(ImplKw | ExternKw)) + { + let error_msg = format!( + "`pub` can't be used for `{}`", + parser.current_token().unwrap().text() + ); + parser.error(&error_msg); } match parser.current_kind() { diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index d929a544a5..68e97ee6d6 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -19,12 +19,14 @@ pub fn parse_stmt(parser: &mut Parser, checkpoint: Option parser.parse(WhileStmtScope::default(), checkpoint), Some(ContinueKw) => parser.parse(ContinueStmtScope::default(), checkpoint), Some(BreakKw) => parser.parse(BreakStmtScope::default(), checkpoint), - Some(AssertKw) => parser.parse(AssertStmtScope::default(), checkpoint), Some(ReturnKw) => parser.parse(ReturnStmtScope::default(), checkpoint), _ => { - let is_assign_stmt = - parser.dry_run(|parser| parser.parse(AssignStmtScope::default(), None).0); - if is_assign_stmt { + // 1. Try to parse the statement as an augmented assignment statement. + // 2. If 1. fails, try to parse the statement as an assignment statement. + // 3. If 2. fails, try to parse the statement as an expression statement. + if parser.dry_run(|parser| parser.parse(AugAssignStmtScope::default(), None).0) { + parser.parse(AugAssignStmtScope::default(), checkpoint) + } else if parser.dry_run(|parser| parser.parse(AssignStmtScope::default(), None).0) { parser.parse(AssignStmtScope::default(), checkpoint) } else { parser.parse(ExprStmtScope::default(), checkpoint) @@ -102,18 +104,6 @@ impl super::Parse for BreakStmtScope { } } -define_scope! { AssertStmtScope, AssertStmt, Inheritance } -impl super::Parse for AssertStmtScope { - fn parse(&mut self, parser: &mut Parser) { - parser.bump_expected(SyntaxKind::AssertKw); - parser.set_newline_as_trivia(false); - parse_expr(parser); - if parser.bump_if(SyntaxKind::Comma) { - parse_expr(parser); - } - } -} - define_scope! { ReturnStmtScope, ReturnStmt, Inheritance } impl super::Parse for ReturnStmtScope { fn parse(&mut self, parser: &mut Parser) { @@ -127,16 +117,26 @@ impl super::Parse for ReturnStmtScope { } } -define_scope! { AssignStmtScope, AssignStmt, Inheritance } -impl super::Parse for AssignStmtScope { +define_scope! { AugAssignStmtScope, AugAssignStmt, Inheritance } +impl super::Parse for AugAssignStmtScope { fn parse(&mut self, parser: &mut Parser) { - parser.with_recovery_tokens(parse_pat, &[SyntaxKind::Eq]); - parser.set_newline_as_trivia(false); + + parser.with_recovery_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expeced identifier for the assignment", + None, + ) + }, + &[SyntaxKind::Eq], + ); + parser.with_next_expected_tokens( |parser| { - if bump_aug_assign_op_opt(parser) { - self.set_kind(SyntaxKind::AugAssignStmt); + if !bump_aug_assign_op(parser) { + parser.error_and_recover("expected augmented assignment operator", None); } }, &[SyntaxKind::Eq], @@ -151,6 +151,21 @@ impl super::Parse for AssignStmtScope { } } +define_scope! { AssignStmtScope, AssignStmt, Inheritance } +impl super::Parse for AssignStmtScope { + fn parse(&mut self, parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + parser.with_recovery_tokens(parse_pat, &[SyntaxKind::Eq]); + if !parser.bump_if(SyntaxKind::Eq) { + parser.error_and_recover("expected `=`", None); + return; + } + + parse_expr(parser); + } +} + define_scope! { ExprStmtScope, ExprStmt, Inheritance } impl super::Parse for ExprStmtScope { fn parse(&mut self, parser: &mut Parser) { @@ -158,7 +173,7 @@ impl super::Parse for ExprStmtScope { } } -fn bump_aug_assign_op_opt(parser: &mut Parser) -> bool { +fn bump_aug_assign_op(parser: &mut Parser) -> bool { use SyntaxKind::*; match parser.current_kind() { Some(Pipe | Hat | Amp | Plus | Minus | Star | Slash | Percent | Star2) => { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index ea489c4e01..0917b15b41 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -176,9 +176,6 @@ pub enum SyntaxKind { /// `while` #[token("while")] WhileKw, - /// `assert` - #[token("assert")] - AssertKw, /// `pub` #[token("pub")] PubKw, @@ -297,8 +294,6 @@ pub enum SyntaxKind { /// `break` BreakStmt, - /// `assert x == 2` - AssertStmt, /// `return 1` ReturnStmt, /// `1` diff --git a/crates/parser2/test_files/syntax_node/stmts/assert.fe b/crates/parser2/test_files/syntax_node/stmts/assert.fe deleted file mode 100644 index 2e4d7b8c44..0000000000 --- a/crates/parser2/test_files/syntax_node/stmts/assert.fe +++ /dev/null @@ -1,2 +0,0 @@ -assert 0 < x -assert 0 < x, "`x` must be positive" \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/assert.snap b/crates/parser2/test_files/syntax_node/stmts/assert.snap deleted file mode 100644 index ca935998e2..0000000000 --- a/crates/parser2/test_files/syntax_node/stmts/assert.snap +++ /dev/null @@ -1,41 +0,0 @@ ---- -source: crates/parser2/tests/syntax_node.rs -expression: node -input_file: crates/parser2/test_files/syntax_node/stmts/assert.fe ---- -Root@0..49 - AssertStmt@0..12 - AssertKw@0..6 "assert" - WhiteSpace@6..7 " " - BinExpr@7..12 - LitExpr@7..8 - Lit@7..8 - Int@7..8 "0" - WhiteSpace@8..9 " " - Lt@9..10 "<" - WhiteSpace@10..11 " " - PathExpr@11..12 - Path@11..12 - PathSegment@11..12 - Ident@11..12 "x" - Newline@12..13 "\n" - AssertStmt@13..49 - AssertKw@13..19 "assert" - WhiteSpace@19..20 " " - BinExpr@20..25 - LitExpr@20..21 - Lit@20..21 - Int@20..21 "0" - WhiteSpace@21..22 " " - Lt@22..23 "<" - WhiteSpace@23..24 " " - PathExpr@24..25 - Path@24..25 - PathSegment@24..25 - Ident@24..25 "x" - Comma@25..26 "," - WhiteSpace@26..27 " " - LitExpr@27..49 - Lit@27..49 - String@27..49 "\"`x` must be positive\"" - diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap index e6b68254e2..6ab5ad194a 100644 --- a/crates/parser2/test_files/syntax_node/stmts/let.snap +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -69,10 +69,7 @@ Root@0..231 Int@49..50 "1" Newline@50..52 "\n\n" AugAssignStmt@52..62 - PathPat@52..53 - Path@52..53 - PathSegment@52..53 - Ident@52..53 "x" + Ident@52..53 "x" WhiteSpace@53..54 " " Plus@54..55 "+" Eq@55..56 "=" @@ -89,10 +86,7 @@ Root@0..231 Int@61..62 "1" Newline@62..63 "\n" AugAssignStmt@63..75 - PathPat@63..64 - Path@63..64 - PathSegment@63..64 - Ident@63..64 "y" + Ident@63..64 "y" WhiteSpace@64..65 " " LShift@65..67 Lt@65..66 "<" From 8f04adfc44f472585ea58ffa2faab16cf9aa1f05 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:35:08 +0100 Subject: [PATCH 076/678] Add HIR `item` def --- crates/hir/src/hir_def/item.rs | 206 +++++++++++++++++++++++++++++++++ 1 file changed, 206 insertions(+) create mode 100644 crates/hir/src/hir_def/item.rs diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs new file mode 100644 index 0000000000..50c6099aa4 --- /dev/null +++ b/crates/hir/src/hir_def/item.rs @@ -0,0 +1,206 @@ +use fe_parser2::ast; + +use crate::span::HirOrigin; + +use super::{ + AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TypeId, WherePredicateId, +}; + +#[salsa::tracked] +pub struct Fn { + #[id] + pub name: super::IdentId, + pub generic_params: GenericParamListId, + pub where_predicate: WherePredicateId, + pub params: FnParamListId, + pub ret_ty: Option, + pub modifier: ItemModifier, + pub attributes: AttrListId, + pub body: Option, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Struct { + #[id] + pub name: super::IdentId, + + pub is_pub: bool, + pub generic_params: GenericParamListId, + pub where_predicate: WherePredicateId, + pub attributes: AttrListId, + pub fields: RecordFieldListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Contract { + #[id] + pub name: super::IdentId, + + pub is_pub: bool, + pub attributes: AttrListId, + pub fields: RecordFieldListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Enum { + #[id] + pub name: super::IdentId, + + pub is_pub: bool, + pub generic_params: GenericParamListId, + pub attributes: AttrListId, + pub where_predicate: WherePredicateId, + pub variants: EnumVariantListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct TypeAlias { + #[id] + pub name: super::IdentId, + + pub is_pub: bool, + pub generic_params: GenericParamListId, + pub attributes: AttrListId, + pub where_predicate: WherePredicateId, + pub ty: TypeId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Impl { + #[id] + pub ty: super::TypeId, + + pub generic_params: GenericParamListId, + pub attributes: AttrListId, + pub where_predicate: WherePredicateId, + pub items: ImplItemListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Trait { + #[id] + pub name: super::IdentId, + + pub generic_params: GenericParamListId, + pub attributes: AttrListId, + pub where_predicate: WherePredicateId, + pub items: TraitItemListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct ImplTrait { + #[id] + pub trait_path: super::PathId, + #[id] + pub ty: TypeId, + + pub generic_params: GenericParamListId, + pub attributes: AttrListId, + pub where_predicate: WherePredicateId, + pub items: ImplTraitItemListId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Const { + #[id] + pub name: super::IdentId, + pub body: Body, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Use { + pub name: super::UseTreeId, + + pub(crate) origin: HirOrigin>, +} + +#[salsa::tracked] +pub struct Extern { + pub items: ExternItemListId, + + pub(crate) origin: HirOrigin>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub enum ItemKind { + Fn(Fn), + Struct(Struct), + Contract(Contract), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), + Const(Const), + Use(Use), + Extern(Extern), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ItemId { + Ident(IdentId), + Ty(TypeId), + Ty2(TypeId, TypeId), + Extern(u32), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ItemModifier { + Pub, + Unsafe, + PubAndUnsafe, + None, +} + +#[salsa::interned] +pub struct RecordFieldListId { + #[return_ref] + fields: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RecordField { + name: IdentId, + ty: TypeId, + is_pub: bool, +} + +#[salsa::interned] +pub struct EnumVariantListId { + #[return_ref] + variants: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct EnumVariant { + name: IdentId, + ty: TypeId, +} + +#[salsa::interned] +pub struct ImplItemListId { + #[return_ref] + items: Vec, +} + +pub type TraitItemListId = ImplItemListId; +pub type ImplTraitItemListId = ImplItemListId; +pub type ExternItemListId = ImplItemListId; From d883dd2bbed5a3b9185f9898c49507208aa0b1ff Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:35:31 +0100 Subject: [PATCH 077/678] Add `HIR `attr` def --- crates/hir/src/hir_def/attr.rs | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 crates/hir/src/hir_def/attr.rs diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs new file mode 100644 index 0000000000..00d81ce349 --- /dev/null +++ b/crates/hir/src/hir_def/attr.rs @@ -0,0 +1,31 @@ +use super::IdentId; + +#[salsa::interned] +pub struct AttrListId { + #[return_ref] + attrs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Attr { + Normal(NormalAttr), + DocComment(DocCommentAttr), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NormalAttr { + pub name: IdentId, + pub args: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct DocCommentAttr { + /// This is the text of the doc comment, excluding the `///` prefix. + pub text: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AttrArg { + key: IdentId, + value: IdentId, +} From 85fc0b8c38a59f9864413c5669a012f5f7832a77 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:35:39 +0100 Subject: [PATCH 078/678] Add HIR `body` def --- crates/hir/src/hir_def/body.rs | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 crates/hir/src/hir_def/body.rs diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs new file mode 100644 index 0000000000..8d5fbf6ff4 --- /dev/null +++ b/crates/hir/src/hir_def/body.rs @@ -0,0 +1,29 @@ +use cranelift_entity::{PrimaryMap, SecondaryMap}; +use fe_parser2::ast::{self, Stmt}; + +use crate::span::HirOrigin; + +use super::{Expr, ExprId, Pat, PatId, StmtId}; + +#[salsa::tracked] +pub struct Body { + #[id] + pub kind: BodyKind, + + pub stmts: PrimaryMap, + pub exprs: PrimaryMap, + pub pats: PrimaryMap, + + pub(crate) stmt_source_map: SecondaryMap>>, + pub(crate) expr_source_map: SecondaryMap>>, + pub(crate) pat_source_map: SecondaryMap>>, + pub(crate) ast: HirOrigin>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum BodyKind { + /// This is a body appearing in a item, e.g., a function or const item. + DefBlock(super::ItemKind), + /// This is a body appearing in array types or + NamelessConst, +} From a4c4fa523eec19f2c588e6fac0b107e40cc6bb46 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:35:48 +0100 Subject: [PATCH 079/678] Add HIR `expr` def --- crates/hir/src/hir_def/expr.rs | 134 +++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 crates/hir/src/hir_def/expr.rs diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs new file mode 100644 index 0000000000..d6e6f9a7f9 --- /dev/null +++ b/crates/hir/src/hir_def/expr.rs @@ -0,0 +1,134 @@ +use cranelift_entity::entity_impl; + +use super::{Body, IdentId, IntegerId, LitKind, PatId, PathId, StmtId}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Expr { + Lit(LitKind), + BlockExpr(Vec), + /// The first `ExprId` is the lhs, the second is the rhs. + /// + /// **NOTE:** The `AugAssign` statement is desugared to a `Assign` statement + /// and a `BinOp`. + Bin(ExprId, ExprId, BinOp), + Un(ExprId, UnOp), + /// The first `ExprId` is the callee, the second is the arguments. + Call(ExprId, Vec), + /// The first `ExprId` is the method receiver, the second is the method + /// name, the third is the arguments. + MethodCall(ExprId, IdentId, Vec), + Path(PathId), + /// The record construction expression. + /// The fist `PathId` is the record type, the second is the record fields. + Record(PathId, Vec<(IdentId, ExprId)>), + Field(ExprId, FieldIndex), + Tuple(Vec), + /// The first `ExprId` is the indexed expression, the second is the index. + Index(ExprId, ExprId), + ArrayExpr(Vec), + + /// The size of the rep should be the body instead of expression, becuase it + /// should be resolved as a contatnt expressison. + ArrayRepExpr(ExprId, Body), + + /// The first `ExprId` is the condition, the second is the then branch, the + /// third is the else branch. + If(ExprId, ExprId, Option), + + /// The first `ExprId` is the scrutinee, the second is the arms. + Match(ExprId, Vec), + + /// Represents an invalid expressions. + /// These expressions are used to represent errors in the AST. + Invalid, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ExprId(u32); +entity_impl!(ExprId); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum FieldIndex { + /// The field is indexed by its name. + /// `field.foo`. + Ident(IdentId), + /// The field is indexed by its integer. + /// `field.0`. + Index(IntegerId), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum MatchArm { + /// The first `Part` is the pattern, the second is + /// the arm body. + MatchArm(PatId, ExprId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum BinOp { + Arith(ArithBinOp), + Comp(CompBinOp), + Logical(LogicalBinOp), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ArithBinOp { + /// `+` + Add, + /// `-` + Sub, + /// `*` + Mul, + /// `/` + Div, + /// `%` + Mod, + /// `**` + Pow, + /// `<<` + LShift, + /// `>>` + RShift, + /// `&` + BitAnd, + /// `|` + BitOr, + /// `^` + BitXor, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum CompBinOp { + /// `==` + Eq, + /// `!=` + NotEq, + /// `<` + Lt, + /// `<=` + LtEq, + /// `>` + Gt, + /// `>=` + GtEq, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum LogicalBinOp { + /// `&&` + And, + /// `||` + Or, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UnOp { + /// `+` + Plus, + /// `-` + Minus, + /// `!` + Not, + /// `~` + BitNot, +} From acbf468f974fcc7c28b0f7ab92b016fc449ef7f7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:36:00 +0100 Subject: [PATCH 080/678] Add HIR `params` def --- crates/hir/src/hir_def/params.rs | 87 ++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100644 crates/hir/src/hir_def/params.rs diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs new file mode 100644 index 0000000000..9200f27d12 --- /dev/null +++ b/crates/hir/src/hir_def/params.rs @@ -0,0 +1,87 @@ +use crate::hir_def::TypeId; + +use super::{Expr, IdentId, PathId}; + +#[salsa::interned] +pub struct GenericArgListId { + #[return_ref] + pub args: Vec, +} + +#[salsa::interned] +pub struct WhereClauseId { + #[return_ref] + pub predicates: Vec, +} + +#[salsa::interned] +pub struct FnParamListId { + #[return_ref] + args: Vec, +} + +#[salsa::interned] +pub struct GenericParamListId { + #[return_ref] + pub params: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FnParam { + pub name: IdentId, + pub label: Option, + pub ty: TypeId, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WherePredicateId { + pub ty: TypeId, + pub bound: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum GenericParam { + Type(TypeGenericParam), + Const(ConstGenericParam), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeGenericParam { + pub name: IdentId, + pub bounds: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstGenericParam { + pub name: IdentId, + pub expr: Expr, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum GenericArg { + Type(TypeGenericArg), + Const(ConstGenericArg), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeGenericArg { + pub path: PathId, + pub bounds: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstGenericArg { + pub expr: Expr, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeBound { + pub path: PathId, + pub generic_args: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WherePredicate { + pub ty: TypeId, + pub bound: Vec, +} From 9fe4f53b0ccb2669cf19f417d8e7bb8c157904cc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:36:08 +0100 Subject: [PATCH 081/678] Add HIR `pat` def --- crates/hir/src/hir_def/pat.rs | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 crates/hir/src/hir_def/pat.rs diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs new file mode 100644 index 0000000000..7f565dc7de --- /dev/null +++ b/crates/hir/src/hir_def/pat.rs @@ -0,0 +1,27 @@ +use cranelift_entity::entity_impl; + +use super::{IdentId, LitKind, PathId}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Pat { + WildCard, + Rest, + Lit(LitKind), + Tuple(Vec), + Path(PathId), + PathTuple(PathId, Vec), + Record(PathId, Vec), + Or(PatId, PatId), + + Invalid, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct PatId(u32); +entity_impl!(PatId); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RecordPatField { + pub label: Option, + pub pat: PatId, +} From 2cd506b9f32d20d34e28742fd8c7eabc2c39d847 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:36:30 +0100 Subject: [PATCH 082/678] Add HIR `stmt` def --- crates/hir/src/hir_def/stmt.rs | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 crates/hir/src/hir_def/stmt.rs diff --git a/crates/hir/src/hir_def/stmt.rs b/crates/hir/src/hir_def/stmt.rs new file mode 100644 index 0000000000..28e060df45 --- /dev/null +++ b/crates/hir/src/hir_def/stmt.rs @@ -0,0 +1,29 @@ +use cranelift_entity::entity_impl; + +use super::{ExprId, PatId, TypeId}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Stmt { + /// The `let` statement. The first `PatId` is the pattern for binding, the + /// second `Option` is the type annotation, and the third + /// `Option` is the expression for initialization. + Let(PatId, Option, Option), + /// The `Assign` statement. The first `PatId` is the pattern for binding, + /// and the second `ExprId` is the rhs value of the binding. + Assign(PatId, ExprId), + /// The first `PatId` is the pattern for binding which can be used in the + /// for-loop body. + /// + /// The second `ExprId` is the iterator expression. + /// + /// The third `ExprId` is the for-loop body. + For(PatId, ExprId, ExprId), + Continue, + Break, + Return(Option), + Expr(ExprId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StmtId(u32); +entity_impl!(StmtId); From eebf7455035749dc71ae70d1088a16b68d661058 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:36:38 +0100 Subject: [PATCH 083/678] Add HIR `type` def --- crates/hir/src/hir_def/types.rs | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 crates/hir/src/hir_def/types.rs diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs new file mode 100644 index 0000000000..ab6005941f --- /dev/null +++ b/crates/hir/src/hir_def/types.rs @@ -0,0 +1,33 @@ +use super::{Body, GenericArg, PathId}; + +#[salsa::interned] +pub struct TypeId { + kind: TypeKind, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub enum TypeKind { + Ptr(TypeId), + Path(PathType), + SelfType, + Tuple(TupleType), + Array(ArrayType), + Invalid, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct PathType { + pub path: PathId, + pub args: Vec, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct TupleType { + pub elems: Vec, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct ArrayType { + pub elem_ty: TypeId, + pub len: Body, +} From cff88476163e1d962c3f4f4ae18e5fc0aa441898 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:36:49 +0100 Subject: [PATCH 084/678] Add HIR `use_tree` def --- crates/hir/src/hir_def/use_tree.rs | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 crates/hir/src/hir_def/use_tree.rs diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs new file mode 100644 index 0000000000..0435a5c857 --- /dev/null +++ b/crates/hir/src/hir_def/use_tree.rs @@ -0,0 +1,21 @@ +use super::{IdentId, PathId}; + +#[salsa::interned] +pub struct UseTreeId { + pub path: Option, + pub subtree: Vec, + pub alias: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UsePath { + Path(PathId), + /// `*`. + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UseTreeAlias { + Ident(IdentId), + Underscore, +} From 3e9fbacdb2495c87752bfa0d3c5e5e017aee3a24 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:37:31 +0100 Subject: [PATCH 085/678] Add Jar for HIR entities --- Cargo.lock | 9 ++++++ crates/hir/Cargo.toml | 3 ++ crates/hir/src/hir_def/mod.rs | 55 +++++++++++++++++++++++++++++++++++ crates/hir/src/lib.rs | 37 +++++++++++++++++++++++ 4 files changed, 104 insertions(+) create mode 100644 crates/hir/src/hir_def/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 4901678210..9d85f662c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -280,6 +280,12 @@ dependencies = [ "libc", ] +[[package]] +name = "cranelift-entity" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a59bcbca89c3f1b70b93ab3cbba5e5e0cbf3e63dadb23c7525cb142e21a9d4c" + [[package]] name = "criterion" version = "0.3.6" @@ -789,7 +795,10 @@ dependencies = [ name = "fe-hir" version = "0.20.0-alpha" dependencies = [ + "cranelift-entity", + "derive_more", "fe-parser2", + "num-bigint", "salsa-2022", "tracing", ] diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index f159765519..2a6bdee99d 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -13,3 +13,6 @@ tracing = "0.1" # but I want to keep up with the latest version until the new Fe implemeentation is merged into the master. salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } fe-parser2 = { path = "../parser2" } +derive_more = "0.99" +cranelift-entity = "0.91" +num-bigint = "0.4.3" \ No newline at end of file diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs new file mode 100644 index 0000000000..ec628ce4d7 --- /dev/null +++ b/crates/hir/src/hir_def/mod.rs @@ -0,0 +1,55 @@ +pub mod attr; +pub mod body; +pub mod expr; +pub mod item; +pub mod params; +pub mod pat; +pub mod stmt; +pub mod types; +pub mod use_tree; + +pub use attr::*; +pub use body::*; +pub use expr::*; +pub use item::*; +pub use params::*; +pub use pat::*; +pub use stmt::*; +pub use types::*; +pub use use_tree::*; + +#[salsa::interned] +pub struct IdentId { + data: String, +} + +#[salsa::interned] +pub struct IntegerId { + data: IntegerId, +} + +#[salsa::interned] +pub struct StringId { + data: String, +} + +#[salsa::interned] +pub struct PathId { + segments: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum LitKind { + Int(IntegerId), + String(StringId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum PathSegment { + /// `Normal Path` segment. + Ident(IdentId), + /// `Self` segment. + SelfTy, + /// `self` segment. + Self_, +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e69de29bb2..ca90dc511d 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -0,0 +1,37 @@ +pub mod hir_def; +pub mod span; + +#[salsa::jar(db = Db)] +pub struct Jar( + // Tracked Hir items. + hir_def::Fn, + hir_def::Struct, + hir_def::Contract, + hir_def::Enum, + hir_def::TypeAlias, + hir_def::Impl, + hir_def::Trait, + hir_def::ImplTrait, + hir_def::Const, + hir_def::Use, + hir_def::Extern, + hir_def::Body, + // Interned structs. + hir_def::IdentId, + hir_def::IntegerId, + hir_def::StringId, + hir_def::PathId, + hir_def::FnParamListId, + hir_def::AttrListId, + hir_def::WhereClauseId, + hir_def::GenericArgListId, + hir_def::GenericParamListId, + hir_def::RecordFieldListId, + hir_def::EnumVariantListId, + hir_def::ImplItemListId, + hir_def::TypeId, + hir_def::UseTreeId, +); + +pub trait Db: salsa::DbWithJar {} +impl Db for DB where DB: ?Sized + salsa::DbWithJar {} From bb328363c8463e2f09ddc815ef517e6e5a2d8695 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 00:42:20 +0100 Subject: [PATCH 086/678] Add `HirOrigin` type to track the HIR definition origin in the source code --- crates/hir/src/span/mod.rs | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 crates/hir/src/span/mod.rs diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs new file mode 100644 index 0000000000..6066f2d38e --- /dev/null +++ b/crates/hir/src/span/mod.rs @@ -0,0 +1,32 @@ +use fe_parser2::{ast::AstPtr, SyntaxNode}; + +/// This enum represents the origin of the HIR node. +/// The origin has three possible kinds. +/// 1. `Raw` is used for nodes that are created by the parser and not +/// 2. `Expanded` is used for nodes that are created by the compiler and not +/// 3. `Desugared` is used for nodes that are created by the compiler and not +// TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum HirOrigin +where + T: Send + Clone + PartialEq + Eq + std::fmt::Debug + std::hash::Hash, +{ + /// The HIR node is created by direct lowering from the corresponding AST. + Raw(T), + /// The HIR node is created by expanding attributes. + /// The `SyntaxNode` points to the callsite of the attribute. + Expanded(SyntaxNode), + /// The HIR node is the result of desugaring in the lower phase from AST to + /// HIR. e.g., `a += b` is desugared into `a = a + b`. + Desugared(DesugaredOrigin), +} + +/// This enum represents the origin of the HIR node which is desugared into +/// other HIR node kinds. +// TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum DesugaredOrigin { + /// The HIR node is the result of desugaring an augmented assignment + /// statement. + AugAssign(AstPtr), +} From 7f8bee78d26d02a7373703fd9e4bd4c7c1a62b1e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 01:32:14 +0100 Subject: [PATCH 087/678] Add HIR `path` def --- crates/hir/src/hir_def/mod.rs | 17 ++--------------- crates/hir/src/hir_def/path.rs | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 15 deletions(-) create mode 100644 crates/hir/src/hir_def/path.rs diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index ec628ce4d7..d7bf1f1878 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -4,6 +4,7 @@ pub mod expr; pub mod item; pub mod params; pub mod pat; +pub mod path; pub mod stmt; pub mod types; pub mod use_tree; @@ -14,6 +15,7 @@ pub use expr::*; pub use item::*; pub use params::*; pub use pat::*; +pub use path::*; pub use stmt::*; pub use types::*; pub use use_tree::*; @@ -33,23 +35,8 @@ pub struct StringId { data: String, } -#[salsa::interned] -pub struct PathId { - segments: Vec, -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LitKind { Int(IntegerId), String(StringId), } - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum PathSegment { - /// `Normal Path` segment. - Ident(IdentId), - /// `Self` segment. - SelfTy, - /// `self` segment. - Self_, -} diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs new file mode 100644 index 0000000000..80622a07d0 --- /dev/null +++ b/crates/hir/src/hir_def/path.rs @@ -0,0 +1,17 @@ +use super::IdentId; + +#[salsa::interned] +pub struct PathId { + segments: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum PathSegment { + /// `Normal Path` segment. + Ident(IdentId), + /// `Self` segment. + SelfTy, + /// `self` segment. + Self_, + Invalid, +} From 34f3944e77120d780cc6e56c356cb6b9a35a0b1f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 01:41:45 +0100 Subject: [PATCH 088/678] Add HIR lower for `Path` --- crates/hir/src/lib.rs | 7 ++++--- crates/hir/src/lower/body.rs | 1 + crates/hir/src/lower/mod.rs | 1 + crates/hir/src/lower/path.rs | 26 ++++++++++++++++++++++++++ 4 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 crates/hir/src/lower/body.rs create mode 100644 crates/hir/src/lower/mod.rs create mode 100644 crates/hir/src/lower/path.rs diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ca90dc511d..26ba501a01 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,7 +1,8 @@ pub mod hir_def; +pub mod lower; pub mod span; -#[salsa::jar(db = Db)] +#[salsa::jar(db = HirDefDb)] pub struct Jar( // Tracked Hir items. hir_def::Fn, @@ -33,5 +34,5 @@ pub struct Jar( hir_def::UseTreeId, ); -pub trait Db: salsa::DbWithJar {} -impl Db for DB where DB: ?Sized + salsa::DbWithJar {} +pub trait HirDefDb: salsa::DbWithJar {} +impl HirDefDb for DB where DB: ?Sized + salsa::DbWithJar {} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/crates/hir/src/lower/body.rs @@ -0,0 +1 @@ + diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs new file mode 100644 index 0000000000..1fb8cc55c2 --- /dev/null +++ b/crates/hir/src/lower/mod.rs @@ -0,0 +1 @@ +mod path; diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs new file mode 100644 index 0000000000..0ad4074ce5 --- /dev/null +++ b/crates/hir/src/lower/path.rs @@ -0,0 +1,26 @@ +use fe_parser2::ast; + +use crate::{ + hir_def::{IdentId, PathId, PathSegment}, + HirDb, +}; + +impl PathId { + pub fn from_ast(db: &dyn HirDb, ast: ast::Path) -> Self { + let mut segments = Vec::new(); + for seg in ast.into_iter() { + let segment = if seg.is_self() { + PathSegment::Self_ + } else if seg.is_self_ty() { + PathSegment::SelfTy + } else if let Some(ident) = seg.ident() { + PathSegment::Ident(IdentId::new(db, ident.text().to_string())) + } else { + PathSegment::Invalid + }; + segments.push(segment); + } + + Self::new(db, segments) + } +} From b756cefe534e9636d369238b2dfbe09bab7bf37d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 03:04:27 +0100 Subject: [PATCH 089/678] Define `IngotId` and `FileId` --- crates/hir/src/lib.rs | 10 +++++---- crates/hir/src/span/mod.rs | 45 +++++++++++++++++++++++++++++++++++++- 2 files changed, 50 insertions(+), 5 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 26ba501a01..43ac052490 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2,7 +2,7 @@ pub mod hir_def; pub mod lower; pub mod span; -#[salsa::jar(db = HirDefDb)] +#[salsa::jar(db = HirDb)] pub struct Jar( // Tracked Hir items. hir_def::Fn, @@ -17,7 +17,6 @@ pub struct Jar( hir_def::Use, hir_def::Extern, hir_def::Body, - // Interned structs. hir_def::IdentId, hir_def::IntegerId, hir_def::StringId, @@ -32,7 +31,10 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, + // Interned structs. + span::IngotId, + span::FileId, ); -pub trait HirDefDb: salsa::DbWithJar {} -impl HirDefDb for DB where DB: ?Sized + salsa::DbWithJar {} +pub trait HirDb: salsa::DbWithJar {} +impl HirDb for DB where DB: ?Sized + salsa::DbWithJar {} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 6066f2d38e..986e0e1e7e 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,5 +1,16 @@ +use std::path::PathBuf; + use fe_parser2::{ast::AstPtr, SyntaxNode}; +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct HirOrigin +where + T: Send + Clone + PartialEq + Eq + std::fmt::Debug + std::hash::Hash, +{ + pub file_id: FileId, + pub kind: HirOriginKind, +} + /// This enum represents the origin of the HIR node. /// The origin has three possible kinds. /// 1. `Raw` is used for nodes that are created by the parser and not @@ -7,7 +18,7 @@ use fe_parser2::{ast::AstPtr, SyntaxNode}; /// 3. `Desugared` is used for nodes that are created by the compiler and not // TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum HirOrigin +pub enum HirOriginKind where T: Send + Clone + PartialEq + Eq + std::fmt::Debug + std::hash::Hash, { @@ -30,3 +41,35 @@ pub enum DesugaredOrigin { /// statement. AugAssign(AstPtr), } + +/// This enum represents the file +#[salsa::interned] +pub struct FileId { + /// A ingot id which the file belongs to. + ingot: IngotId, + /// A relative path from the ingot root. + path: PathBuf, +} + +#[salsa::interned] +pub struct IngotId { + /// A full path to the ingot root. + path: PathBuf, + kind: IngotKind, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum IngotKind { + /// A standalone ingot is a dummy ingot when the compiler is invoked + /// directly on a file. + StandAlone, + + /// A local ingot which is the current ingot being compiled. + Local, + + /// An external ingot which is depended on by the current ingot. + External, + + /// A std ingot. + Std, +} From edded5d832ae823203cb87e3f6083c95131e5ab3 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 03:41:16 +0100 Subject: [PATCH 090/678] Add HIR lower for `TypeId` --- crates/hir/src/hir_def/body.rs | 4 +++ crates/hir/src/hir_def/types.rs | 29 +++++----------- crates/hir/src/lower/body.rs | 12 +++++++ crates/hir/src/lower/mod.rs | 3 ++ crates/hir/src/lower/params.rs | 9 +++++ crates/hir/src/lower/path.rs | 7 +++- crates/hir/src/lower/types.rs | 59 +++++++++++++++++++++++++++++++++ 7 files changed, 101 insertions(+), 22 deletions(-) create mode 100644 crates/hir/src/lower/params.rs create mode 100644 crates/hir/src/lower/types.rs diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 8d5fbf6ff4..df37b48156 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -26,4 +26,8 @@ pub enum BodyKind { DefBlock(super::ItemKind), /// This is a body appearing in array types or NamelessConst, + + /// The body is invalid. + /// This is used to represent bodies that failed to parse. + Invalid, } diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index ab6005941f..69c10becfb 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -1,4 +1,4 @@ -use super::{Body, GenericArg, PathId}; +use super::{Body, GenericArgListId, PathId}; #[salsa::interned] pub struct TypeId { @@ -8,26 +8,13 @@ pub struct TypeId { #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeKind { Ptr(TypeId), - Path(PathType), + /// The `PathId` is the path to the type, the `Option` is the generic + /// arguments. + Path(PathId, Option), SelfType, - Tuple(TupleType), - Array(ArrayType), + /// The `Vec` contains the types of the tuple elements. + Tuple(Vec), + /// The first `TypeId` is the element type, the second `Body` is the length. + Array(TypeId, Body), Invalid, } - -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub struct PathType { - pub path: PathId, - pub args: Vec, -} - -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub struct TupleType { - pub elems: Vec, -} - -#[derive(Clone, PartialEq, Eq, Hash, Debug)] -pub struct ArrayType { - pub elem_ty: TypeId, - pub len: Body, -} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 8b13789179..0b4fb6e211 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -1 +1,13 @@ +use fe_parser2::ast; +use crate::{hir_def::Body, span::FileId, HirDb}; + +impl Body { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Expr, fid: FileId) -> Self { + todo!() + } + + pub(crate) fn invalid(db: &dyn HirDb, fid: FileId) -> Self { + todo!() + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 1fb8cc55c2..734f9015fc 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1 +1,4 @@ +mod body; +mod params; mod path; +mod types; diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs new file mode 100644 index 0000000000..2e977bc86e --- /dev/null +++ b/crates/hir/src/lower/params.rs @@ -0,0 +1,9 @@ +use fe_parser2::ast; + +use crate::{hir_def::GenericArgListId, HirDb}; + +impl GenericArgListId { + pub fn from_ast(db: &dyn HirDb, ast: ast::GenericArgList) -> Self { + todo!() + } +} diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 0ad4074ce5..755078019d 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -6,7 +6,12 @@ use crate::{ }; impl PathId { - pub fn from_ast(db: &dyn HirDb, ast: ast::Path) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, ast: Option) -> Self { + // If the path is None, we return a path with a single invalid segment. + let Some(ast) = ast else { + return Self::new(db, vec![PathSegment::Invalid]) + }; + let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = if seg.is_self() { diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs new file mode 100644 index 0000000000..b896a22c1d --- /dev/null +++ b/crates/hir/src/lower/types.rs @@ -0,0 +1,59 @@ +use fe_parser2::ast::{self, prelude::*}; + +use crate::{ + hir_def::{Body, GenericArgListId, PathId, TypeId, TypeKind}, + span::FileId, + HirDb, +}; + +impl TypeId { + pub(crate) fn from_ast(db: &dyn HirDb, ast: Option, fid: FileId) -> Self { + let Some(ty) = ast else { + return TypeId::new(db, TypeKind::Invalid); + }; + + let kind = match ty.kind() { + ast::TypeKind::Ptr(ptr_type) => { + let inner = ptr_type.inner(); + let inner_id = TypeId::from_ast(db, inner, fid); + TypeKind::Ptr(inner_id) + } + + ast::TypeKind::Path(path_type) => { + let path = path_type.path(); + let path_id = PathId::from_ast(db, path); + if let Some(generic_args) = path_type.generic_args() { + let generic_args = GenericArgListId::from_ast(db, generic_args); + TypeKind::Path(path_id, generic_args.into()) + } else { + TypeKind::Path(path_id, None) + } + } + + ast::TypeKind::SelfType(_) => TypeKind::SelfType, + + ast::TypeKind::Tuple(tuple_type) => { + let mut elem_tys = Vec::new(); + for elem in tuple_type { + elem_tys.push(TypeId::from_ast(db, elem.into(), fid)); + } + TypeKind::Tuple(elem_tys) + } + + ast::TypeKind::Array(array_type) => { + let elem = array_type.elem_ty(); + let elem_ty_id = TypeId::from_ast(db, elem, fid); + + let body = if let Some(body) = array_type.len() { + Body::from_ast(db, body, fid) + } else { + Body::invalid(db, fid) + }; + + TypeKind::Array(elem_ty_id, body) + } + }; + + TypeId::new(db, kind) + } +} From 89c4ba832a233271231c4b94e62230278ac95daa Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 16:29:28 +0100 Subject: [PATCH 091/678] Add HIR lower for `params` --- crates/hir/src/hir_def/item.rs | 22 ++-- crates/hir/src/hir_def/mod.rs | 14 ++- crates/hir/src/hir_def/params.rs | 66 ++++++----- crates/hir/src/lower/body.rs | 2 +- crates/hir/src/lower/mod.rs | 18 +++ crates/hir/src/lower/params.rs | 194 ++++++++++++++++++++++++++++++- crates/hir/src/lower/types.rs | 12 +- crates/parser2/src/ast/param.rs | 2 +- 8 files changed, 280 insertions(+), 50 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 50c6099aa4..1cb6a3aa23 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -1,17 +1,19 @@ +// This is necessary because `salsa::tracked` structs generates a constructor +// that may take many arguments depending on the number of fields in the struct. +#![allow(clippy::too_many_arguments)] + use fe_parser2::ast; use crate::span::HirOrigin; -use super::{ - AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TypeId, WherePredicateId, -}; +use super::{AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TypeId, WhereClauseId}; #[salsa::tracked] pub struct Fn { #[id] pub name: super::IdentId, pub generic_params: GenericParamListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub params: FnParamListId, pub ret_ty: Option, pub modifier: ItemModifier, @@ -28,7 +30,7 @@ pub struct Struct { pub is_pub: bool, pub generic_params: GenericParamListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub attributes: AttrListId, pub fields: RecordFieldListId, @@ -55,7 +57,7 @@ pub struct Enum { pub is_pub: bool, pub generic_params: GenericParamListId, pub attributes: AttrListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub variants: EnumVariantListId, pub(crate) origin: HirOrigin>, @@ -69,7 +71,7 @@ pub struct TypeAlias { pub is_pub: bool, pub generic_params: GenericParamListId, pub attributes: AttrListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub ty: TypeId, pub(crate) origin: HirOrigin>, @@ -82,7 +84,7 @@ pub struct Impl { pub generic_params: GenericParamListId, pub attributes: AttrListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub items: ImplItemListId, pub(crate) origin: HirOrigin>, @@ -95,7 +97,7 @@ pub struct Trait { pub generic_params: GenericParamListId, pub attributes: AttrListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub items: TraitItemListId, pub(crate) origin: HirOrigin>, @@ -110,7 +112,7 @@ pub struct ImplTrait { pub generic_params: GenericParamListId, pub attributes: AttrListId, - pub where_predicate: WherePredicateId, + pub where_clause: WhereClauseId, pub items: ImplTraitItemListId, pub(crate) origin: HirOrigin>, diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index d7bf1f1878..763394e7ea 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -13,6 +13,7 @@ pub use attr::*; pub use body::*; pub use expr::*; pub use item::*; +use num_bigint::BigUint; pub use params::*; pub use pat::*; pub use path::*; @@ -20,14 +21,25 @@ pub use stmt::*; pub use types::*; pub use use_tree::*; +use crate::HirDb; + #[salsa::interned] pub struct IdentId { data: String, } +impl IdentId { + pub fn is_invalid(self, db: &dyn HirDb) -> bool { + self.data(db).is_empty() + } + + pub fn is_self(&self, db: &dyn HirDb) -> bool { + self.data(db) == "self" + } +} #[salsa::interned] pub struct IntegerId { - data: IntegerId, + data: BigUint, } #[salsa::interned] diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 9200f27d12..8682514137 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,6 +1,6 @@ use crate::hir_def::TypeId; -use super::{Expr, IdentId, PathId}; +use super::{Body, IdentId, PathId}; #[salsa::interned] pub struct GenericArgListId { @@ -9,9 +9,9 @@ pub struct GenericArgListId { } #[salsa::interned] -pub struct WhereClauseId { +pub struct GenericParamListId { #[return_ref] - pub predicates: Vec, + pub params: Vec, } #[salsa::interned] @@ -21,25 +21,12 @@ pub struct FnParamListId { } #[salsa::interned] -pub struct GenericParamListId { +pub struct WhereClauseId { #[return_ref] - pub params: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct FnParam { - pub name: IdentId, - pub label: Option, - pub ty: TypeId, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct WherePredicateId { - pub ty: TypeId, - pub bound: Vec, + pub predicates: Vec, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum GenericParam { Type(TypeGenericParam), Const(ConstGenericParam), @@ -54,10 +41,10 @@ pub struct TypeGenericParam { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericParam { pub name: IdentId, - pub expr: Expr, + pub ty: TypeId, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum GenericArg { Type(TypeGenericArg), Const(ConstGenericArg), @@ -65,23 +52,48 @@ pub enum GenericArg { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericArg { - pub path: PathId, + pub ty: TypeId, pub bounds: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericArg { - pub expr: Expr, + pub body: Body, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TypeBound { - pub path: PathId, - pub generic_args: Vec, +pub struct FnParam { + pub is_mut: bool, + pub label: Option, + pub name: FnParamName, + pub ty: TypeId, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WherePredicate { pub ty: TypeId, - pub bound: Vec, + pub bounds: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum FnParamLabel { + Ident(IdentId), + Underscore, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum FnParamName { + /// `self` parameter. + Self_, + Ident(IdentId), + Underscore, + Invalid, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeBound { + /// The path to the trait. + pub path: PathId, + /// The type arguments of the trait. + pub generic_args: Option, } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 0b4fb6e211..b5b4e97af3 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -3,7 +3,7 @@ use fe_parser2::ast; use crate::{hir_def::Body, span::FileId, HirDb}; impl Body { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Expr, fid: FileId) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { todo!() } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 734f9015fc..124e50f63d 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,4 +1,22 @@ +use fe_parser2::SyntaxToken; + +use crate::{hir_def::IdentId, HirDb}; + mod body; mod params; mod path; mod types; + +impl IdentId { + fn from_token(db: &dyn HirDb, token: Option) -> Self { + if let Some(token) = token { + Self::new(db, token.text().to_string()) + } else { + Self::invalid(db) + } + } + + fn invalid(db: &dyn HirDb) -> Self { + Self::new(db, String::new()) + } +} diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 2e977bc86e..42a9347286 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,9 +1,195 @@ -use fe_parser2::ast; +use fe_parser2::ast::{self}; -use crate::{hir_def::GenericArgListId, HirDb}; +use crate::{ + hir_def::{params::*, Body, IdentId, PathId, TypeId}, + span::FileId, + HirDb, +}; impl GenericArgListId { - pub fn from_ast(db: &dyn HirDb, ast: ast::GenericArgList) -> Self { - todo!() + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericArgList) -> Self { + let args = ast + .into_iter() + .map(|arg| GenericArg::from_ast(db, fid, arg)) + .collect(); + Self::new(db, args) + } +} + +impl GenericParamListId { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericParamList) -> Self { + let params = ast + .into_iter() + .map(|param| GenericParam::from_ast(db, fid, param)) + .collect(); + Self::new(db, params) + } +} + +impl FnParamListId { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::FnParamList) -> Self { + let params = ast + .into_iter() + .map(|param| FnParam::from_ast(db, fid, param)) + .collect(); + Self::new(db, params) + } +} + +impl WhereClauseId { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::WhereClause) -> Self { + let predicates = ast + .into_iter() + .map(|pred| WherePredicate::from_ast(db, fid, pred)) + .collect(); + Self::new(db, predicates) + } +} + +impl TypeGenericParam { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericParam) -> Self { + let name = IdentId::from_token(db, ast.name()); + let bounds = ast + .bounds() + .map(|bounds| { + bounds + .into_iter() + .map(|bound| TypeBound::from_ast(db, fid, bound)) + .collect() + }) + .unwrap_or_default(); + + Self { name, bounds } + } +} + +impl ConstGenericParam { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericParam) -> Self { + let name = IdentId::from_token(db, ast.name()); + let ty = TypeId::from_ast(db, fid, ast.ty()); + Self { name, ty } + } +} + +impl GenericArg { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericArg) -> Self { + match ast.kind() { + ast::GenericArgKind::Type(type_param) => { + TypeGenericArg::from_ast(db, fid, type_param).into() + } + ast::GenericArgKind::Const(const_param) => { + ConstGenericArg::from_ast(db, fid, const_param).into() + } + } + } +} + +impl TypeGenericArg { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericArg) -> Self { + let ty = TypeId::from_ast(db, fid, ast.ty()); + let bounds = ast + .bounds() + .map(|bounds| { + bounds + .into_iter() + .map(|bound| TypeBound::from_ast(db, fid, bound)) + .collect() + }) + .unwrap_or_default(); + Self { ty, bounds } + } +} + +impl ConstGenericArg { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericArg) -> Self { + let body = if let Some(expr) = ast.expr() { + Body::from_ast(db, fid, expr) + } else { + Body::invalid(db, fid) + }; + + Self { body } + } +} + +impl GenericParam { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericParam) -> Self { + match ast.kind() { + ast::GenericParamKind::Type(type_param) => { + TypeGenericParam::from_ast(db, fid, type_param).into() + } + ast::GenericParamKind::Const(const_param) => { + ConstGenericParam::from_ast(db, fid, const_param).into() + } + } + } +} + +impl FnParam { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::FnParam) -> Self { + let is_mut = ast.mut_token().is_some(); + let label = ast.label().map(|ast| FnParamLabel::from_ast(db, ast)); + let name = if let Some(name) = ast.name() { + FnParamName::from_ast(db, name) + } else { + FnParamName::Invalid + }; + let ty = TypeId::from_ast(db, fid, ast.ty()); + + Self { + is_mut, + label, + name, + ty, + } + } +} + +impl WherePredicate { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::WherePredicate) -> Self { + let ty = TypeId::from_ast(db, fid, ast.ty()); + let bounds = ast + .bounds() + .map(|bounds| { + bounds + .into_iter() + .map(|bound| TypeBound::from_ast(db, fid, bound)) + .collect() + }) + .unwrap_or_default(); + Self { ty, bounds } + } +} + +impl TypeBound { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeBound) -> Self { + let path = PathId::from_ast(db, ast.path()); + let generic_args = ast + .generic_args() + .map(|args| GenericArgListId::from_ast(db, fid, args)); + Self { path, generic_args } + } +} + +impl FnParamName { + fn from_ast(db: &dyn HirDb, ast: ast::FnParamName) -> Self { + match ast { + ast::FnParamName::Ident(name) => { + FnParamName::Ident(IdentId::from_token(db, name.into())) + } + ast::FnParamName::SelfParam(_) => FnParamName::Self_, + ast::FnParamName::Underscore(_) => FnParamName::Underscore, + } + } +} + +impl FnParamLabel { + fn from_ast(db: &dyn HirDb, ast: ast::FnParamLabel) -> Self { + match ast { + ast::FnParamLabel::Ident(name) => { + FnParamLabel::Ident(IdentId::from_token(db, name.into())) + } + ast::FnParamLabel::Underscore(_) => FnParamLabel::Underscore, + } } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index b896a22c1d..d45f224e40 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -7,7 +7,7 @@ use crate::{ }; impl TypeId { - pub(crate) fn from_ast(db: &dyn HirDb, ast: Option, fid: FileId) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { let Some(ty) = ast else { return TypeId::new(db, TypeKind::Invalid); }; @@ -15,7 +15,7 @@ impl TypeId { let kind = match ty.kind() { ast::TypeKind::Ptr(ptr_type) => { let inner = ptr_type.inner(); - let inner_id = TypeId::from_ast(db, inner, fid); + let inner_id = TypeId::from_ast(db, fid, inner); TypeKind::Ptr(inner_id) } @@ -23,7 +23,7 @@ impl TypeId { let path = path_type.path(); let path_id = PathId::from_ast(db, path); if let Some(generic_args) = path_type.generic_args() { - let generic_args = GenericArgListId::from_ast(db, generic_args); + let generic_args = GenericArgListId::from_ast(db, fid, generic_args); TypeKind::Path(path_id, generic_args.into()) } else { TypeKind::Path(path_id, None) @@ -35,17 +35,17 @@ impl TypeId { ast::TypeKind::Tuple(tuple_type) => { let mut elem_tys = Vec::new(); for elem in tuple_type { - elem_tys.push(TypeId::from_ast(db, elem.into(), fid)); + elem_tys.push(TypeId::from_ast(db, fid, elem.into())); } TypeKind::Tuple(elem_tys) } ast::TypeKind::Array(array_type) => { let elem = array_type.elem_ty(); - let elem_ty_id = TypeId::from_ast(db, elem, fid); + let elem_ty_id = TypeId::from_ast(db, fid, elem); let body = if let Some(body) = array_type.len() { - Body::from_ast(db, body, fid) + Body::from_ast(db, fid, body) } else { Body::invalid(db, fid) }; diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index a47ae10980..2d758c0014 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -199,7 +199,7 @@ ast_node! { SK::TypeGenericArg, } impl TypeGenericArg { - pub fn type_(&self) -> Option { + pub fn ty(&self) -> Option { support::child(self.syntax()) } From e69c5c1b3aeb0ce165f604e00a751d37a07f640e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 16 Mar 2023 16:54:30 +0100 Subject: [PATCH 092/678] Add HIR lower for `attr` --- crates/hir/src/hir_def/attr.rs | 10 +++--- crates/hir/src/lower/attr.rs | 59 ++++++++++++++++++++++++++++++++++ crates/hir/src/lower/mod.rs | 1 + 3 files changed, 65 insertions(+), 5 deletions(-) create mode 100644 crates/hir/src/lower/attr.rs diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs index 00d81ce349..345119265c 100644 --- a/crates/hir/src/hir_def/attr.rs +++ b/crates/hir/src/hir_def/attr.rs @@ -1,4 +1,4 @@ -use super::IdentId; +use super::{IdentId, StringId}; #[salsa::interned] pub struct AttrListId { @@ -6,7 +6,7 @@ pub struct AttrListId { attrs: Vec, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum Attr { Normal(NormalAttr), DocComment(DocCommentAttr), @@ -21,11 +21,11 @@ pub struct NormalAttr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct DocCommentAttr { /// This is the text of the doc comment, excluding the `///` prefix. - pub text: String, + pub text: StringId, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AttrArg { - key: IdentId, - value: IdentId, + pub key: IdentId, + pub value: IdentId, } diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs new file mode 100644 index 0000000000..9fd34794f1 --- /dev/null +++ b/crates/hir/src/lower/attr.rs @@ -0,0 +1,59 @@ +use crate::hir_def::{attr::*, IdentId, StringId}; +use fe_parser2::ast; + +use crate::HirDb; + +impl AttrListId { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::AttrList) -> Self { + let attrs = ast + .into_iter() + .map(|attr| Attr::from_ast(db, attr)) + .collect(); + Self::new(db, attrs) + } +} + +impl Attr { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Attr) -> Self { + match ast.kind() { + ast::AttrKind::Normal(attr) => NormalAttr::from_ast(db, attr).into(), + ast::AttrKind::DocComment(attr) => DocCommentAttr::from_ast(db, attr).into(), + } + } +} + +impl NormalAttr { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::NormalAttr) -> Self { + let name = IdentId::from_token(db, ast.name()); + let args = ast + .args() + .map(|args| { + args.into_iter() + .map(|arg| AttrArg::from_ast(db, arg)) + .collect() + }) + .unwrap_or_default(); + + Self { name, args } + } +} + +impl DocCommentAttr { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::DocCommentAttr) -> Self { + let text = ast + .doc() + .map(|doc| doc.text()[3..].to_string()) + .unwrap_or_default(); + Self { + text: StringId::new(db, text), + } + } +} + +impl AttrArg { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::AttrArg) -> Self { + let key = IdentId::from_token(db, ast.key()); + let value = IdentId::from_token(db, ast.value()); + Self { key, value } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 124e50f63d..10b4ad2632 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -2,6 +2,7 @@ use fe_parser2::SyntaxToken; use crate::{hir_def::IdentId, HirDb}; +mod attr; mod body; mod params; mod path; From e66c08d346d0070b193651cd8fadbb9a4ebc6580 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 17 Mar 2023 02:58:46 +0100 Subject: [PATCH 093/678] Introduce `MaybeInvalid` type --- crates/hir/src/hir_def/attr.rs | 8 ++--- crates/hir/src/hir_def/body.rs | 12 +++---- crates/hir/src/hir_def/expr.rs | 4 --- crates/hir/src/hir_def/item.rs | 53 ++++++++++++++--------------- crates/hir/src/hir_def/mod.rs | 12 ++++--- crates/hir/src/hir_def/params.rs | 19 +++++------ crates/hir/src/hir_def/pat.rs | 2 -- crates/hir/src/hir_def/path.rs | 5 +-- crates/hir/src/hir_def/types.rs | 11 +++--- crates/hir/src/lower/attr.rs | 6 ++-- crates/hir/src/lower/mod.rs | 33 +++++++++++++----- crates/hir/src/lower/params.rs | 28 ++++++++-------- crates/hir/src/lower/path.rs | 24 ++++++++------ crates/hir/src/lower/types.rs | 57 +++++++++++++++----------------- 14 files changed, 136 insertions(+), 138 deletions(-) diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs index 345119265c..a9a84df099 100644 --- a/crates/hir/src/hir_def/attr.rs +++ b/crates/hir/src/hir_def/attr.rs @@ -1,4 +1,4 @@ -use super::{IdentId, StringId}; +use super::{IdentId, MaybeInvalid, StringId}; #[salsa::interned] pub struct AttrListId { @@ -14,7 +14,7 @@ pub enum Attr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct NormalAttr { - pub name: IdentId, + pub name: MaybeInvalid, pub args: Vec, } @@ -26,6 +26,6 @@ pub struct DocCommentAttr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AttrArg { - pub key: IdentId, - pub value: IdentId, + pub key: MaybeInvalid, + pub value: MaybeInvalid, } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index df37b48156..2cec41dd8d 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -3,16 +3,16 @@ use fe_parser2::ast::{self, Stmt}; use crate::span::HirOrigin; -use super::{Expr, ExprId, Pat, PatId, StmtId}; +use super::{Expr, ExprId, MaybeInvalid, Pat, PatId, StmtId}; #[salsa::tracked] pub struct Body { #[id] pub kind: BodyKind, - pub stmts: PrimaryMap, - pub exprs: PrimaryMap, - pub pats: PrimaryMap, + pub stmts: PrimaryMap>, + pub exprs: PrimaryMap>, + pub pats: PrimaryMap>, pub(crate) stmt_source_map: SecondaryMap>>, pub(crate) expr_source_map: SecondaryMap>>, @@ -26,8 +26,4 @@ pub enum BodyKind { DefBlock(super::ItemKind), /// This is a body appearing in array types or NamelessConst, - - /// The body is invalid. - /// This is used to represent bodies that failed to parse. - Invalid, } diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index d6e6f9a7f9..ca62d8bff4 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -37,10 +37,6 @@ pub enum Expr { /// The first `ExprId` is the scrutinee, the second is the arms. Match(ExprId, Vec), - - /// Represents an invalid expressions. - /// These expressions are used to represent errors in the AST. - Invalid, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 1cb6a3aa23..c4fea15815 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -6,19 +6,22 @@ use fe_parser2::ast; use crate::span::HirOrigin; -use super::{AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TypeId, WhereClauseId}; +use super::{ + AttrListId, Body, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, + WhereClauseId, +}; #[salsa::tracked] pub struct Fn { #[id] - pub name: super::IdentId, - pub generic_params: GenericParamListId, - pub where_clause: WhereClauseId, - pub params: FnParamListId, - pub ret_ty: Option, + pub name: MaybeInvalid, + pub generic_params: Option, + pub where_clause: Option, + pub params: MaybeInvalid, + pub ret_ty: Option>, pub modifier: ItemModifier, pub attributes: AttrListId, - pub body: Option, + pub body: Option>, pub(crate) origin: HirOrigin>, } @@ -26,11 +29,11 @@ pub struct Fn { #[salsa::tracked] pub struct Struct { #[id] - pub name: super::IdentId, + pub name: MaybeInvalid, pub is_pub: bool, - pub generic_params: GenericParamListId, - pub where_clause: WhereClauseId, + pub generic_params: Option, + pub where_clause: Option, pub attributes: AttrListId, pub fields: RecordFieldListId, @@ -40,7 +43,7 @@ pub struct Struct { #[salsa::tracked] pub struct Contract { #[id] - pub name: super::IdentId, + pub name: MaybeInvalid, pub is_pub: bool, pub attributes: AttrListId, @@ -52,7 +55,7 @@ pub struct Contract { #[salsa::tracked] pub struct Enum { #[id] - pub name: super::IdentId, + pub name: MaybeInvalid, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -66,13 +69,13 @@ pub struct Enum { #[salsa::tracked] pub struct TypeAlias { #[id] - pub name: super::IdentId, + pub name: MaybeInvalid, pub is_pub: bool, pub generic_params: GenericParamListId, pub attributes: AttrListId, pub where_clause: WhereClauseId, - pub ty: TypeId, + pub ty: MaybeInvalid, pub(crate) origin: HirOrigin>, } @@ -80,7 +83,7 @@ pub struct TypeAlias { #[salsa::tracked] pub struct Impl { #[id] - pub ty: super::TypeId, + pub ty: super::MaybeInvalid, pub generic_params: GenericParamListId, pub attributes: AttrListId, @@ -93,7 +96,7 @@ pub struct Impl { #[salsa::tracked] pub struct Trait { #[id] - pub name: super::IdentId, + pub name: MaybeInvalid, pub generic_params: GenericParamListId, pub attributes: AttrListId, @@ -108,7 +111,7 @@ pub struct ImplTrait { #[id] pub trait_path: super::PathId, #[id] - pub ty: TypeId, + pub ty: MaybeInvalid, pub generic_params: GenericParamListId, pub attributes: AttrListId, @@ -121,8 +124,8 @@ pub struct ImplTrait { #[salsa::tracked] pub struct Const { #[id] - pub name: super::IdentId, - pub body: Body, + pub name: MaybeInvalid, + pub body: MaybeInvalid, pub(crate) origin: HirOrigin>, } @@ -156,14 +159,6 @@ pub enum ItemKind { Extern(Extern), } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ItemId { - Ident(IdentId), - Ty(TypeId), - Ty2(TypeId, TypeId), - Extern(u32), -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ItemModifier { Pub, @@ -181,7 +176,7 @@ pub struct RecordFieldListId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordField { name: IdentId, - ty: TypeId, + ty: MaybeInvalid, is_pub: bool, } @@ -194,7 +189,7 @@ pub struct EnumVariantListId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct EnumVariant { name: IdentId, - ty: TypeId, + ty: MaybeInvalid, } #[salsa::interned] diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 763394e7ea..eafd8b2d77 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -28,10 +28,6 @@ pub struct IdentId { data: String, } impl IdentId { - pub fn is_invalid(self, db: &dyn HirDb) -> bool { - self.data(db).is_empty() - } - pub fn is_self(&self, db: &dyn HirDb) -> bool { self.data(db) == "self" } @@ -52,3 +48,11 @@ pub enum LitKind { Int(IntegerId), String(StringId), } + +/// This enum is used to represent a type that may be invalid in terms of the +/// syntax. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum MaybeInvalid { + Valid(T), + Invalid, +} diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 8682514137..45527e4d00 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,6 +1,6 @@ use crate::hir_def::TypeId; -use super::{Body, IdentId, PathId}; +use super::{Body, IdentId, MaybeInvalid, PathId}; #[salsa::interned] pub struct GenericArgListId { @@ -34,14 +34,14 @@ pub enum GenericParam { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericParam { - pub name: IdentId, + pub name: MaybeInvalid, pub bounds: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericParam { - pub name: IdentId, - pub ty: TypeId, + pub name: MaybeInvalid, + pub ty: MaybeInvalid, } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -52,7 +52,7 @@ pub enum GenericArg { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericArg { - pub ty: TypeId, + pub ty: MaybeInvalid, pub bounds: Vec, } @@ -65,13 +65,13 @@ pub struct ConstGenericArg { pub struct FnParam { pub is_mut: bool, pub label: Option, - pub name: FnParamName, - pub ty: TypeId, + pub name: MaybeInvalid, + pub ty: MaybeInvalid, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WherePredicate { - pub ty: TypeId, + pub ty: MaybeInvalid, pub bounds: Vec, } @@ -87,13 +87,12 @@ pub enum FnParamName { Self_, Ident(IdentId), Underscore, - Invalid, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeBound { /// The path to the trait. - pub path: PathId, + pub path: MaybeInvalid, /// The type arguments of the trait. pub generic_args: Option, } diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index 7f565dc7de..f0de3a1394 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -12,8 +12,6 @@ pub enum Pat { PathTuple(PathId, Vec), Record(PathId, Vec), Or(PatId, PatId), - - Invalid, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index 80622a07d0..02af42e32e 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -1,8 +1,10 @@ +use crate::hir_def::MaybeInvalid; + use super::IdentId; #[salsa::interned] pub struct PathId { - segments: Vec, + segments: Vec>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -13,5 +15,4 @@ pub enum PathSegment { SelfTy, /// `self` segment. Self_, - Invalid, } diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index 69c10becfb..ebf04f3e84 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -1,4 +1,4 @@ -use super::{Body, GenericArgListId, PathId}; +use super::{Body, GenericArgListId, MaybeInvalid, PathId}; #[salsa::interned] pub struct TypeId { @@ -7,14 +7,13 @@ pub struct TypeId { #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeKind { - Ptr(TypeId), + Ptr(MaybeInvalid), /// The `PathId` is the path to the type, the `Option` is the generic /// arguments. - Path(PathId, Option), + Path(MaybeInvalid, Option), SelfType, /// The `Vec` contains the types of the tuple elements. - Tuple(Vec), + Tuple(Vec>), /// The first `TypeId` is the element type, the second `Body` is the length. - Array(TypeId, Body), - Invalid, + Array(MaybeInvalid, MaybeInvalid), } diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 9fd34794f1..57053a9c02 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -24,7 +24,7 @@ impl Attr { impl NormalAttr { pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::NormalAttr) -> Self { - let name = IdentId::from_token(db, ast.name()); + let name = IdentId::maybe_from_token(db, ast.name()); let args = ast .args() .map(|args| { @@ -52,8 +52,8 @@ impl DocCommentAttr { impl AttrArg { pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::AttrArg) -> Self { - let key = IdentId::from_token(db, ast.key()); - let value = IdentId::from_token(db, ast.value()); + let key = IdentId::maybe_from_token(db, ast.key()); + let value = IdentId::maybe_from_token(db, ast.value()); Self { key, value } } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 10b4ad2632..c11a79f803 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,6 +1,9 @@ use fe_parser2::SyntaxToken; -use crate::{hir_def::IdentId, HirDb}; +use crate::{ + hir_def::{IdentId, MaybeInvalid}, + HirDb, +}; mod attr; mod body; @@ -9,15 +12,27 @@ mod path; mod types; impl IdentId { - fn from_token(db: &dyn HirDb, token: Option) -> Self { - if let Some(token) = token { - Self::new(db, token.text().to_string()) - } else { - Self::invalid(db) - } + fn from_token(db: &dyn HirDb, token: SyntaxToken) -> Self { + Self::new(db, token.text().to_string()) + } + + fn maybe_from_token(db: &dyn HirDb, token: Option) -> MaybeInvalid { + token.map(|token| Self::from_token(db, token)).into() + } +} + +impl MaybeInvalid { + fn invalid() -> Self { + Self::Invalid } +} - fn invalid(db: &dyn HirDb) -> Self { - Self::new(db, String::new()) +impl From> for MaybeInvalid { + fn from(value: Option) -> Self { + if let Some(value) = value { + Self::Valid(value) + } else { + Self::Invalid + } } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 42a9347286..14ea141b56 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -44,11 +44,15 @@ impl WhereClauseId { .collect(); Self::new(db, predicates) } + + pub(crate) fn empty(db: &dyn HirDb, fic: FileId) -> Self { + Self::new(db, Vec::new()) + } } impl TypeGenericParam { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericParam) -> Self { - let name = IdentId::from_token(db, ast.name()); + let name = IdentId::maybe_from_token(db, ast.name()); let bounds = ast .bounds() .map(|bounds| { @@ -65,8 +69,8 @@ impl TypeGenericParam { impl ConstGenericParam { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericParam) -> Self { - let name = IdentId::from_token(db, ast.name()); - let ty = TypeId::from_ast(db, fid, ast.ty()); + let name = IdentId::maybe_from_token(db, ast.name()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); Self { name, ty } } } @@ -86,7 +90,7 @@ impl GenericArg { impl TypeGenericArg { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericArg) -> Self { - let ty = TypeId::from_ast(db, fid, ast.ty()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); let bounds = ast .bounds() .map(|bounds| { @@ -129,12 +133,8 @@ impl FnParam { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::FnParam) -> Self { let is_mut = ast.mut_token().is_some(); let label = ast.label().map(|ast| FnParamLabel::from_ast(db, ast)); - let name = if let Some(name) = ast.name() { - FnParamName::from_ast(db, name) - } else { - FnParamName::Invalid - }; - let ty = TypeId::from_ast(db, fid, ast.ty()); + let name = ast.name().map(|ast| FnParamName::from_ast(db, ast)).into(); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); Self { is_mut, @@ -147,7 +147,7 @@ impl FnParam { impl WherePredicate { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::WherePredicate) -> Self { - let ty = TypeId::from_ast(db, fid, ast.ty()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); let bounds = ast .bounds() .map(|bounds| { @@ -163,7 +163,7 @@ impl WherePredicate { impl TypeBound { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeBound) -> Self { - let path = PathId::from_ast(db, ast.path()); + let path = ast.path().map(|ast| PathId::from_ast(db, ast)).into(); let generic_args = ast .generic_args() .map(|args| GenericArgListId::from_ast(db, fid, args)); @@ -186,9 +186,7 @@ impl FnParamName { impl FnParamLabel { fn from_ast(db: &dyn HirDb, ast: ast::FnParamLabel) -> Self { match ast { - ast::FnParamLabel::Ident(name) => { - FnParamLabel::Ident(IdentId::from_token(db, name.into())) - } + ast::FnParamLabel::Ident(name) => FnParamLabel::Ident(IdentId::from_token(db, name)), ast::FnParamLabel::Underscore(_) => FnParamLabel::Underscore, } } diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 755078019d..27362aadad 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,31 +1,33 @@ use fe_parser2::ast; use crate::{ - hir_def::{IdentId, PathId, PathSegment}, + hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}, HirDb, }; impl PathId { - pub(crate) fn from_ast(db: &dyn HirDb, ast: Option) -> Self { - // If the path is None, we return a path with a single invalid segment. - let Some(ast) = ast else { - return Self::new(db, vec![PathSegment::Invalid]) - }; - + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Path) -> Self { let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = if seg.is_self() { - PathSegment::Self_ + MaybeInvalid::Valid(PathSegment::Self_) } else if seg.is_self_ty() { - PathSegment::SelfTy + MaybeInvalid::Valid(PathSegment::SelfTy) } else if let Some(ident) = seg.ident() { - PathSegment::Ident(IdentId::new(db, ident.text().to_string())) + MaybeInvalid::Valid(PathSegment::Ident(IdentId::new( + db, + ident.text().to_string(), + ))) } else { - PathSegment::Invalid + MaybeInvalid::invalid() }; segments.push(segment); } Self::new(db, segments) } + + pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: Option) -> MaybeInvalid { + ast.map(|ast| Self::from_ast(db, ast)).into() + } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index d45f224e40..7a2a1e3f02 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,59 +1,54 @@ use fe_parser2::ast::{self, prelude::*}; use crate::{ - hir_def::{Body, GenericArgListId, PathId, TypeId, TypeKind}, + hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TypeId, TypeKind}, span::FileId, HirDb, }; impl TypeId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { - let Some(ty) = ast else { - return TypeId::new(db, TypeKind::Invalid); - }; - - let kind = match ty.kind() { - ast::TypeKind::Ptr(ptr_type) => { - let inner = ptr_type.inner(); - let inner_id = TypeId::from_ast(db, fid, inner); - TypeKind::Ptr(inner_id) + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Type) -> Self { + let kind = match ast.kind() { + ast::TypeKind::Ptr(ty) => { + let inner = Self::maybe_from_ast(db, fid, ty.inner()); + TypeKind::Ptr(inner) } - ast::TypeKind::Path(path_type) => { - let path = path_type.path(); - let path_id = PathId::from_ast(db, path); - if let Some(generic_args) = path_type.generic_args() { + ast::TypeKind::Path(ty) => { + let path = PathId::maybe_from_ast(db, ty.path()).into(); + if let Some(generic_args) = ty.generic_args() { let generic_args = GenericArgListId::from_ast(db, fid, generic_args); - TypeKind::Path(path_id, generic_args.into()) + TypeKind::Path(path, generic_args.into()) } else { - TypeKind::Path(path_id, None) + TypeKind::Path(path, None) } } ast::TypeKind::SelfType(_) => TypeKind::SelfType, - ast::TypeKind::Tuple(tuple_type) => { + ast::TypeKind::Tuple(ty) => { let mut elem_tys = Vec::new(); - for elem in tuple_type { - elem_tys.push(TypeId::from_ast(db, fid, elem.into())); + for elem in ty { + elem_tys.push(Some(TypeId::from_ast(db, fid, elem)).into()); } TypeKind::Tuple(elem_tys) } - ast::TypeKind::Array(array_type) => { - let elem = array_type.elem_ty(); - let elem_ty_id = TypeId::from_ast(db, fid, elem); - - let body = if let Some(body) = array_type.len() { - Body::from_ast(db, fid, body) - } else { - Body::invalid(db, fid) - }; - - TypeKind::Array(elem_ty_id, body) + ast::TypeKind::Array(ty) => { + let elem_ty = Self::maybe_from_ast(db, fid, ty.elem_ty()); + let body = ty.len().map(|ast| Body::from_ast(db, fid, ast)).into(); + TypeKind::Array(elem_ty, body) } }; TypeId::new(db, kind) } + + pub(crate) fn maybe_from_ast( + db: &dyn HirDb, + fid: FileId, + ast: Option, + ) -> MaybeInvalid { + ast.map(|ast| Self::from_ast(db, fid, ast)).into() + } } From a0b297a947cdf98d585f368c11d106d183a94d3b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 17 Mar 2023 03:29:10 +0100 Subject: [PATCH 094/678] Add HIR lower for `Item` --- crates/hir/src/hir_def/body.rs | 8 +- crates/hir/src/hir_def/item.rs | 82 +++++----- crates/hir/src/hir_def/params.rs | 2 +- crates/hir/src/lower/attr.rs | 5 + crates/hir/src/lower/body.rs | 4 +- crates/hir/src/lower/item.rs | 270 +++++++++++++++++++++++++++++++ crates/hir/src/lower/mod.rs | 1 + crates/hir/src/lower/params.rs | 30 +++- crates/hir/src/lower/types.rs | 2 +- crates/hir/src/span/mod.rs | 25 ++- crates/parser2/src/ast/item.rs | 11 +- crates/parser2/src/ast/mod.rs | 3 + 12 files changed, 378 insertions(+), 65 deletions(-) create mode 100644 crates/hir/src/lower/item.rs diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 2cec41dd8d..616a65553b 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -14,10 +14,10 @@ pub struct Body { pub exprs: PrimaryMap>, pub pats: PrimaryMap>, - pub(crate) stmt_source_map: SecondaryMap>>, - pub(crate) expr_source_map: SecondaryMap>>, - pub(crate) pat_source_map: SecondaryMap>>, - pub(crate) ast: HirOrigin>, + pub(crate) stmt_source_map: SecondaryMap>, + pub(crate) expr_source_map: SecondaryMap>, + pub(crate) pat_source_map: SecondaryMap>, + pub(crate) ast: HirOrigin, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index c4fea15815..7e8bfd3d56 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -7,7 +7,7 @@ use fe_parser2::ast; use crate::span::HirOrigin; use super::{ - AttrListId, Body, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, + AttrListId, FnParamListId, GenericArgListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, WhereClauseId, }; @@ -15,15 +15,15 @@ use super::{ pub struct Fn { #[id] pub name: MaybeInvalid, - pub generic_params: Option, - pub where_clause: Option, + + pub attributes: AttrListId, + pub generic_params: GenericParamListId, + pub where_clause: WhereClauseId, pub params: MaybeInvalid, - pub ret_ty: Option>, + pub ret_ty: Option, pub modifier: ItemModifier, - pub attributes: AttrListId, - pub body: Option>, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -31,13 +31,13 @@ pub struct Struct { #[id] pub name: MaybeInvalid, - pub is_pub: bool, - pub generic_params: Option, - pub where_clause: Option, pub attributes: AttrListId, + pub is_pub: bool, + pub generic_params: GenericParamListId, + pub where_clause: WhereClauseId, pub fields: RecordFieldListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -45,11 +45,11 @@ pub struct Contract { #[id] pub name: MaybeInvalid, - pub is_pub: bool, pub attributes: AttrListId, + pub is_pub: bool, pub fields: RecordFieldListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -57,13 +57,13 @@ pub struct Enum { #[id] pub name: MaybeInvalid, + pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, - pub attributes: AttrListId, pub where_clause: WhereClauseId, pub variants: EnumVariantListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -71,13 +71,13 @@ pub struct TypeAlias { #[id] pub name: MaybeInvalid, + pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, - pub attributes: AttrListId, pub where_clause: WhereClauseId, pub ty: MaybeInvalid, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -85,12 +85,10 @@ pub struct Impl { #[id] pub ty: super::MaybeInvalid, - pub generic_params: GenericParamListId, pub attributes: AttrListId, pub where_clause: WhereClauseId, - pub items: ImplItemListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] @@ -98,50 +96,46 @@ pub struct Trait { #[id] pub name: MaybeInvalid, - pub generic_params: GenericParamListId, pub attributes: AttrListId, + pub is_pub: bool, + pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub items: TraitItemListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] pub struct ImplTrait { + // TraitRef here. #[id] - pub trait_path: super::PathId, + pub trait_ref: MaybeInvalid, #[id] pub ty: MaybeInvalid, - pub generic_params: GenericParamListId, pub attributes: AttrListId, pub where_clause: WhereClauseId, - pub items: ImplTraitItemListId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] pub struct Const { #[id] pub name: MaybeInvalid, - pub body: MaybeInvalid, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] pub struct Use { pub name: super::UseTreeId, - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[salsa::tracked] pub struct Extern { - pub items: ExternItemListId, - - pub(crate) origin: HirOrigin>, + pub(crate) origin: HirOrigin, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] @@ -167,35 +161,41 @@ pub enum ItemModifier { None, } +impl ItemModifier { + pub fn is_pub(self) -> bool { + matches!(self, ItemModifier::Pub | ItemModifier::PubAndUnsafe) + } +} + #[salsa::interned] pub struct RecordFieldListId { #[return_ref] - fields: Vec, + pub fields: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordField { - name: IdentId, - ty: MaybeInvalid, - is_pub: bool, + pub name: MaybeInvalid, + pub ty: MaybeInvalid, + pub is_pub: bool, } #[salsa::interned] pub struct EnumVariantListId { #[return_ref] - variants: Vec, + pub variants: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct EnumVariant { - name: IdentId, - ty: MaybeInvalid, + pub name: MaybeInvalid, + pub ty: Option, } #[salsa::interned] pub struct ImplItemListId { #[return_ref] - items: Vec, + pub items: Vec, } pub type TraitItemListId = ImplItemListId; diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 45527e4d00..7e3c2e4792 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -58,7 +58,7 @@ pub struct TypeGenericArg { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericArg { - pub body: Body, + pub body: MaybeInvalid, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 57053a9c02..0be9d869cb 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -11,6 +11,11 @@ impl AttrListId { .collect(); Self::new(db, attrs) } + + pub fn from_ast_opt(db: &dyn HirDb, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, ast)) + .unwrap_or_else(|| Self::new(db, vec![])) + } } impl Attr { diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index b5b4e97af3..24aadf775d 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -3,11 +3,11 @@ use fe_parser2::ast; use crate::{hir_def::Body, span::FileId, HirDb}; impl Body { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { + pub(crate) fn from_ast_expr(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { todo!() } - pub(crate) fn invalid(db: &dyn HirDb, fid: FileId) -> Self { + pub(crate) fn from_ast_block(db: &dyn HirDb, fid: FileId, ast: ast::BlockExpr) -> Self { todo!() } } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs new file mode 100644 index 0000000000..80b3515c5e --- /dev/null +++ b/crates/hir/src/lower/item.rs @@ -0,0 +1,270 @@ +use fe_parser2::ast::{self, prelude::*}; + +use crate::{ + hir_def::{ + item::*, AttrListId, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, + UseTreeId, WhereClauseId, + }, + span::{FileId, HirOrigin}, + HirDb, +}; + +impl Fn { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Fn) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let generic_paramas = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let params = ast + .params() + .map(|params| FnParamListId::from_ast(db, fid, params)) + .into(); + let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, fid, ty)); + let modifier = ItemModifier::from_ast(db, ast.modifier()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + name, + attributes, + generic_paramas, + where_clause, + params, + ret_ty, + modifier, + origin, + ) + } +} + +impl Struct { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Struct) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); + let generic_paramas = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let fields = RecordFieldListId::from_ast_opt(db, fid, ast.fields()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + name, + attributes, + is_pub, + generic_paramas, + where_clause, + fields, + origin, + ) + } +} + +impl Contract { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Contract) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); + let fields = RecordFieldListId::from_ast_opt(db, fid, ast.fields()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new(db, name, attributes, is_pub, fields, origin) + } +} + +impl Enum { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Enum) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); + let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let variants = EnumVariantListId::from_ast_opt(db, fid, ast.variants()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + name, + attributes, + is_pub, + generic_params, + where_clause, + variants, + origin, + ) + } +} + +impl TypeAlias { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeAlias) -> Self { + let name = IdentId::maybe_from_token(db, ast.alias()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); + let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + name, + attributes, + is_pub, + generic_params, + where_clause, + ty, + origin, + ) + } +} + +impl Impl { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Impl) -> Self { + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new(db, ty, attributes, generic_params, where_clause, origin) + } +} + +impl Trait { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Trait) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); + let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + name, + attributes, + is_pub, + generic_params, + where_clause, + origin, + ) + } +} + +impl ImplTrait { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ImplTrait) -> Self { + let trait_ref = TraitRef::maybe_from_ast(db, fid, ast.trait_ref()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let origin = HirOrigin::raw(fid, &ast); + + Self::new( + db, + trait_ref, + ty, + attributes, + generic_params, + where_clause, + origin, + ) + } +} + +impl Const { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Const) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + + let origin = HirOrigin::raw(fid, &ast); + Self::new(db, name, origin) + } +} + +impl Use { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Use) -> Self { + let tree = UseTreeId::maybe_from_ast(db, ast.use_tree()); + let origin = HirOrigin::raw(fid, &ast); + Self::new(db, tree, origin) + } +} + +impl Extern { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Extern) -> Self { + let origin = HirOrigin::raw(fid, &ast); + + Self::new(db, origin) + } +} + +impl ItemModifier { + fn from_ast(db: &dyn HirDb, ast: Option) -> Self { + let Some(ast) = ast else { + return Self::None; + }; + + match (ast.pub_kw().is_some(), ast.unsafe_kw().is_some()) { + (true, true) => Self::PubAndUnsafe, + (true, false) => Self::Pub, + (false, true) => Self::Unsafe, + (false, false) => Self::None, + } + } +} + +impl RecordFieldListId { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::RecordFieldDefList) -> Self { + let fields = ast + .into_iter() + .map(|field| RecordField::from_ast(db, fid, field)) + .collect(); + Self::new(db, fields) + } + + fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, fid, ast)) + .unwrap_or(Self::new(db, Vec::new())) + } +} + +impl RecordField { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::RecordFieldDef) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + let is_pub = ast.pub_kw().is_some(); + + Self { name, ty, is_pub } + } +} + +impl EnumVariantListId { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::EnumVariantDefList) -> Self { + let variants = ast + .into_iter() + .map(|variant| EnumVariant::from_ast(db, fid, variant)) + .collect(); + Self::new(db, variants) + } + + fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, fid, ast)) + .unwrap_or(Self::new(db, Vec::new())) + } +} + +impl EnumVariant { + fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::EnumVariantDef) -> Self { + let name = IdentId::maybe_from_token(db, ast.name()); + let ty = ast.ty().map(|ty| TypeId::from_ast(db, fid, ty)); + + Self { name, ty } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index c11a79f803..b243857af3 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -7,6 +7,7 @@ use crate::{ mod attr; mod body; +mod item; mod params; mod path; mod types; diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 14ea141b56..f7674c52b3 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -14,6 +14,15 @@ impl GenericArgListId { .collect(); Self::new(db, args) } + + pub(crate) fn from_ast_opt( + db: &dyn HirDb, + fid: FileId, + ast: Option, + ) -> Self { + ast.map(|ast| Self::from_ast(db, fid, ast)) + .unwrap_or_else(|| Self::new(db, Vec::new())) + } } impl GenericParamListId { @@ -24,6 +33,15 @@ impl GenericParamListId { .collect(); Self::new(db, params) } + + pub(crate) fn from_ast_opt( + db: &dyn HirDb, + fid: FileId, + ast: Option, + ) -> Self { + ast.map(|ast| Self::from_ast(db, fid, ast)) + .unwrap_or_else(|| Self::new(db, Vec::new())) + } } impl FnParamListId { @@ -45,8 +63,9 @@ impl WhereClauseId { Self::new(db, predicates) } - pub(crate) fn empty(db: &dyn HirDb, fic: FileId) -> Self { - Self::new(db, Vec::new()) + pub(crate) fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, fid, ast)) + .unwrap_or_else(|| Self::new(db, Vec::new())) } } @@ -107,10 +126,11 @@ impl TypeGenericArg { impl ConstGenericArg { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Body::from_ast(db, fid, expr) + Some(Body::from_ast_expr(db, fid, expr)) } else { - Body::invalid(db, fid) - }; + None + } + .into(); Self { body } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 7a2a1e3f02..177f9333d1 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -36,7 +36,7 @@ impl TypeId { ast::TypeKind::Array(ty) => { let elem_ty = Self::maybe_from_ast(db, fid, ty.elem_ty()); - let body = ty.len().map(|ast| Body::from_ast(db, fid, ast)).into(); + let body = ty.len().map(|ast| Body::from_ast_expr(db, fid, ast)).into(); TypeKind::Array(elem_ty, body) } }; diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 986e0e1e7e..5922ecdedb 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,16 +1,31 @@ use std::path::PathBuf; -use fe_parser2::{ast::AstPtr, SyntaxNode}; +use fe_parser2::{ + ast::{prelude::*, AstPtr}, + SyntaxNode, +}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin where - T: Send + Clone + PartialEq + Eq + std::fmt::Debug + std::hash::Hash, + T: AstNode, { - pub file_id: FileId, + pub fid: FileId, pub kind: HirOriginKind, } +impl HirOrigin +where + T: AstNode, +{ + pub fn raw(fid: FileId, ast: &T) -> Self { + HirOrigin { + fid, + kind: HirOriginKind::Raw(AstPtr::new(ast)), + } + } +} + /// This enum represents the origin of the HIR node. /// The origin has three possible kinds. /// 1. `Raw` is used for nodes that are created by the parser and not @@ -20,10 +35,10 @@ where #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum HirOriginKind where - T: Send + Clone + PartialEq + Eq + std::fmt::Debug + std::hash::Hash, + T: AstNode, { /// The HIR node is created by direct lowering from the corresponding AST. - Raw(T), + Raw(AstPtr), /// The HIR node is created by expanding attributes. /// The `SyntaxNode` points to the callsite of the attribute. Expanded(SyntaxNode), diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 453851c78a..f98ab60858 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -197,7 +197,6 @@ ast_node! { pub struct Impl, SK::Impl, } -impl super::GenericParamsOwner for Impl {} impl super::WhereClauseOwner for Impl {} impl super::AttrListOwner for Impl {} impl Impl { @@ -220,13 +219,13 @@ ast_node! { pub struct ImplTrait, SK::ImplTrait, } -impl super::GenericParamsOwner for ImplTrait {} impl super::WhereClauseOwner for ImplTrait {} impl super::AttrListOwner for ImplTrait {} impl ImplTrait { /// Returns the trait of the impl. /// `Foo` in `impl Foo for Bar { .. }` - pub fn trait_(&self) -> Option { + // TODO: TraitRef. + pub fn trait_ref(&self) -> Option { support::child(self.syntax()) } @@ -391,7 +390,7 @@ impl ItemModifier { } pub trait ItemModifierOwner: AstNode { - fn item_modifier(&self) -> Option { + fn modifier(&self) -> Option { support::child(self.syntax()) } } @@ -453,7 +452,7 @@ mod tests { assert!(func.where_clause().is_some()); assert!(func.body().is_some()); assert!(matches!(func.ret_ty().unwrap().kind(), TypeKind::Tuple(_))); - let modifier = func.item_modifier().unwrap(); + let modifier = func.modifier().unwrap(); assert!(modifier.pub_kw().is_some()); assert!(modifier.unsafe_kw().is_some()); } @@ -612,7 +611,7 @@ mod tests { fn foo(self, _t: T) -> u32 { return 1 }; }"#; let i: ImplTrait = parse_item(source); - assert!(matches!(i.trait_().unwrap().kind(), TypeKind::Path(_))); + assert!(matches!(i.trait_ref().unwrap().kind(), TypeKind::Path(_))); assert!(matches!(i.ty().unwrap().kind(), TypeKind::Tuple(_))); assert!(i.item_list().unwrap().iter().count() == 1); } diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs index 53d2b06014..c5486f6083 100644 --- a/crates/parser2/src/ast/mod.rs +++ b/crates/parser2/src/ast/mod.rs @@ -23,6 +23,7 @@ pub use use_tree::*; pub type AstChildren = rowan::ast::AstChildren; pub type SyntaxText = rowan::SyntaxText; pub type AstPtr = rowan::ast::AstPtr; +pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr; pub mod prelude { pub use super::{ @@ -110,3 +111,5 @@ macro_rules! ast_node { } use ast_node; + +use crate::FeLang; From 47a68902d81e782d97ccec7ee852dba6e585385e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 18 Mar 2023 00:44:46 +0100 Subject: [PATCH 095/678] Add syntax to specify generic parameters in `impl` block --- crates/hir/src/hir_def/item.rs | 13 +- crates/hir/src/hir_def/params.rs | 1 - crates/hir/src/hir_def/types.rs | 8 +- crates/hir/src/hir_def/use_tree.rs | 24 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/lower/mod.rs | 1 + crates/hir/src/lower/params.rs | 11 +- crates/hir/src/lower/types.rs | 26 +- crates/hir/src/lower/use_tree.rs | 60 ++ crates/parser2/src/ast/item.rs | 8 +- crates/parser2/src/ast/param.rs | 12 +- crates/parser2/src/ast/types.rs | 2 +- crates/parser2/src/ast/use_tree.rs | 4 +- crates/parser2/src/parser/func.rs | 20 +- crates/parser2/src/parser/item.rs | 75 +- crates/parser2/src/parser/param.rs | 30 +- crates/parser2/src/parser/stmt.rs | 2 +- crates/parser2/src/parser/struct_.rs | 10 +- crates/parser2/src/parser/type_.rs | 36 +- .../test_files/syntax_node/items/impl.fe | 4 +- .../test_files/syntax_node/items/impl.snap | 476 ++++++------ .../syntax_node/items/impl_trait.fe | 7 +- .../syntax_node/items/impl_trait.snap | 675 +++++++++--------- .../test_files/syntax_node/items/trait.fe | 4 +- .../test_files/syntax_node/items/trait.snap | 337 ++++----- 25 files changed, 979 insertions(+), 869 deletions(-) create mode 100644 crates/hir/src/lower/use_tree.rs diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 7e8bfd3d56..e4a4976aa3 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -4,11 +4,10 @@ use fe_parser2::ast; -use crate::span::HirOrigin; +use crate::{hir_def::TraitRef, span::HirOrigin}; use super::{ - AttrListId, FnParamListId, GenericArgListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, - WhereClauseId, + AttrListId, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, WhereClauseId, }; #[salsa::tracked] @@ -86,6 +85,7 @@ pub struct Impl { pub ty: super::MaybeInvalid, pub attributes: AttrListId, + pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub(crate) origin: HirOrigin, @@ -106,13 +106,13 @@ pub struct Trait { #[salsa::tracked] pub struct ImplTrait { - // TraitRef here. #[id] - pub trait_ref: MaybeInvalid, + pub trait_ref: MaybeInvalid, #[id] pub ty: MaybeInvalid, pub attributes: AttrListId, + pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub(crate) origin: HirOrigin, @@ -128,7 +128,8 @@ pub struct Const { #[salsa::tracked] pub struct Use { - pub name: super::UseTreeId, + #[id] + pub tree: MaybeInvalid, pub(crate) origin: HirOrigin, } diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 7e3c2e4792..2c6f0d4192 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -53,7 +53,6 @@ pub enum GenericArg { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericArg { pub ty: MaybeInvalid, - pub bounds: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index ebf04f3e84..1987baf5a1 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -10,10 +10,16 @@ pub enum TypeKind { Ptr(MaybeInvalid), /// The `PathId` is the path to the type, the `Option` is the generic /// arguments. - Path(MaybeInvalid, Option), + Path(MaybeInvalid, GenericArgListId), SelfType, /// The `Vec` contains the types of the tuple elements. Tuple(Vec>), /// The first `TypeId` is the element type, the second `Body` is the length. Array(MaybeInvalid, MaybeInvalid), } + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TraitRef { + pub path: MaybeInvalid, + pub generic_args: GenericArgListId, +} diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 0435a5c857..c2790f04f1 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -1,15 +1,29 @@ -use super::{IdentId, PathId}; +use crate::hir_def::MaybeInvalid; + +use super::IdentId; #[salsa::interned] pub struct UseTreeId { - pub path: Option, + /// The base path of the use tree. + /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` + /// + /// NOTE: If the tree root is started with `{}`, then the `path` is `None`. + pub path: Vec>, + /// The subtree of the use tree. + /// + /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. pub subtree: Vec, - pub alias: Option, + + //// The alias of this use tree. + /// `Bar` in `Foo as Bar;` + pub alias: Option>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum UsePath { - Path(PathId), +pub enum UsePathSegment { + Ident(IdentId), + /// `self`, + SelfPath, /// `*`. Glob, } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 43ac052490..c0fef10c8b 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -16,6 +16,7 @@ pub struct Jar( hir_def::Const, hir_def::Use, hir_def::Extern, + // Interned structs. hir_def::Body, hir_def::IdentId, hir_def::IntegerId, @@ -31,7 +32,6 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, - // Interned structs. span::IngotId, span::FileId, ); diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index b243857af3..6df58a1afb 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -11,6 +11,7 @@ mod item; mod params; mod path; mod types; +mod use_tree; impl IdentId { fn from_token(db: &dyn HirDb, token: SyntaxToken) -> Self { diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index f7674c52b3..bb0cd7fe7c 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -110,16 +110,7 @@ impl GenericArg { impl TypeGenericArg { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericArg) -> Self { let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); - let bounds = ast - .bounds() - .map(|bounds| { - bounds - .into_iter() - .map(|bound| TypeBound::from_ast(db, fid, bound)) - .collect() - }) - .unwrap_or_default(); - Self { ty, bounds } + Self { ty } } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 177f9333d1..25fb9c8058 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,7 +1,7 @@ use fe_parser2::ast::{self, prelude::*}; use crate::{ - hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TypeId, TypeKind}, + hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}, span::FileId, HirDb, }; @@ -16,12 +16,8 @@ impl TypeId { ast::TypeKind::Path(ty) => { let path = PathId::maybe_from_ast(db, ty.path()).into(); - if let Some(generic_args) = ty.generic_args() { - let generic_args = GenericArgListId::from_ast(db, fid, generic_args); - TypeKind::Path(path, generic_args.into()) - } else { - TypeKind::Path(path, None) - } + let generic_args = GenericArgListId::from_ast_opt(db, fid, ty.generic_args()); + TypeKind::Path(path, generic_args.into()) } ast::TypeKind::SelfType(_) => TypeKind::SelfType, @@ -52,3 +48,19 @@ impl TypeId { ast.map(|ast| Self::from_ast(db, fid, ast)).into() } } + +impl TraitRef { + pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::PathType) -> Self { + let path = PathId::maybe_from_ast(db, ast.path()).into(); + let generic_args = GenericArgListId::from_ast_opt(db, fid, ast.generic_args()); + Self { path, generic_args } + } + + pub(crate) fn maybe_from_ast( + db: &dyn HirDb, + fid: FileId, + ast: Option, + ) -> MaybeInvalid { + ast.map(|ast| Self::from_ast(db, fid, ast)).into() + } +} diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs new file mode 100644 index 0000000000..ca43f14e41 --- /dev/null +++ b/crates/hir/src/lower/use_tree.rs @@ -0,0 +1,60 @@ +use fe_parser2::ast; + +use crate::{ + hir_def::{use_tree::*, IdentId, MaybeInvalid}, + HirDb, +}; + +impl UseTreeId { + pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::UseTree) -> Self { + let path = if let Some(path) = ast.path() { + path.into_iter() + .map(|ast| UsePathSegment::maybe_from_ast(db, ast)) + .collect() + } else { + vec![] + }; + let subtree = if let Some(children) = ast.children() { + children + .into_iter() + .map(|ast| UseTreeId::from_ast(db, ast)) + .collect() + } else { + vec![] + }; + let alias = ast.alias().map(|ast| UseTreeAlias::maybe_from_ast(db, ast)); + + Self::new(db, path, subtree, alias) + } + + pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: Option) -> MaybeInvalid { + ast.map(|ast| Self::from_ast(db, ast)).into() + } +} + +impl UsePathSegment { + pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: ast::UsePathSegment) -> MaybeInvalid { + ast.kind() + .map(|kind| match kind { + ast::UsePathSegmentKind::Ident(ident) => { + Self::Ident(IdentId::from_token(db, ident)) + } + ast::UsePathSegmentKind::SelfPath(_) => Self::SelfPath, + ast::UsePathSegmentKind::Glob(_) => Self::Glob, + }) + .into() + } +} + +impl UseTreeAlias { + pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: ast::UseTreeAlias) -> MaybeInvalid { + if let Some(ident) = ast.ident() { + Some(Self::Ident(IdentId::from_token(db, ident))) + } else if ast.underscore().is_some() { + Some(Self::Underscore) + } else { + None + } + .into() + } +} diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index f98ab60858..3e80d82579 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -197,6 +197,7 @@ ast_node! { pub struct Impl, SK::Impl, } +impl super::GenericParamsOwner for Impl {} impl super::WhereClauseOwner for Impl {} impl super::AttrListOwner for Impl {} impl Impl { @@ -219,13 +220,13 @@ ast_node! { pub struct ImplTrait, SK::ImplTrait, } +impl super::GenericParamsOwner for ImplTrait {} impl super::WhereClauseOwner for ImplTrait {} impl super::AttrListOwner for ImplTrait {} impl ImplTrait { /// Returns the trait of the impl. /// `Foo` in `impl Foo for Bar { .. }` - // TODO: TraitRef. - pub fn trait_ref(&self) -> Option { + pub fn trait_ref(&self) -> Option { support::child(self.syntax()) } @@ -611,7 +612,8 @@ mod tests { fn foo(self, _t: T) -> u32 { return 1 }; }"#; let i: ImplTrait = parse_item(source); - assert!(matches!(i.trait_ref().unwrap().kind(), TypeKind::Path(_))); + assert!(i.generic_params().is_none()); + assert!(i.trait_ref().is_some()); assert!(matches!(i.ty().unwrap().kind(), TypeKind::Tuple(_))); assert!(i.item_list().unwrap().iter().count() == 1); } diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 2d758c0014..9bef957dbc 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -202,10 +202,6 @@ impl TypeGenericArg { pub fn ty(&self) -> Option { support::child(self.syntax()) } - - pub fn bounds(&self) -> Option { - support::child(self.syntax()) - } } ast_node! { @@ -361,7 +357,7 @@ mod tests { fn parse_generic_arg(source: &str) -> GenericArgList { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); - parser.parse(GenericArgListScope::new(true), None); + parser.parse(GenericArgListScope::default(), None); GenericArgList::cast(parser.finish().0).unwrap() } @@ -430,15 +426,13 @@ mod tests { #[test] #[wasm_bindgen_test] fn generic_arg() { - let source = r#""#; + let source = r#""#; let ga = parse_generic_arg(source); let mut args = ga.iter(); - let GenericArgKind::Type(a1) = args.next().unwrap().kind() else { + let GenericArgKind::Type(_) = args.next().unwrap().kind() else { panic!("expected type arg"); }; - assert!(a1.bounds().is_some()); - let GenericArgKind::Const(a2) = args.next().unwrap().kind() else { panic!("expected const arg"); }; diff --git a/crates/parser2/src/ast/types.rs b/crates/parser2/src/ast/types.rs index 0f8dc7f09e..2eafe375da 100644 --- a/crates/parser2/src/ast/types.rs +++ b/crates/parser2/src/ast/types.rs @@ -126,7 +126,7 @@ mod tests { { let lexer = Lexer::new(source); let mut parser = parser::Parser::new(lexer); - parser::type_::parse_type(&mut parser, None, true); + parser::type_::parse_type(&mut parser, None); Type::cast(parser.finish().0) .unwrap() .kind() diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs index b0a57cdbf7..56f3301dad 100644 --- a/crates/parser2/src/ast/use_tree.rs +++ b/crates/parser2/src/ast/use_tree.rs @@ -56,7 +56,7 @@ impl UsePathSegment { Some(node) => match node.kind() { SK::SelfKw => Some(UsePathSegmentKind::SelfPath(node.into_token().unwrap())), SK::Ident => Some(UsePathSegmentKind::Ident(node.into_token().unwrap())), - SK::Star => Some(UsePathSegmentKind::Wildcard(node.into_token().unwrap())), + SK::Star => Some(UsePathSegmentKind::Glob(node.into_token().unwrap())), _ => None, }, _ => None, @@ -100,5 +100,5 @@ pub enum UsePathSegmentKind { Ident(SyntaxToken), /// `*` /// This is only allowed in the last segment of a path. - Wildcard(SyntaxToken), + Glob(SyntaxToken), } diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 916ab804c6..fc03fed711 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -3,7 +3,7 @@ use crate::SyntaxKind; use super::{ define_scope, expr_atom::BlockExprScope, - param::{parse_where_clause_opt, FnParamListScope, GenericParamListScope}, + param::{parse_generic_params_opt, parse_where_clause_opt, FnParamListScope}, token_stream::TokenStream, type_::parse_type, Parser, @@ -54,11 +54,7 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { ); parser.with_next_expected_tokens( - |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }, + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LParen], ); @@ -76,7 +72,7 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { parser.with_next_expected_tokens( |parser| { if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); + parse_type(parser, None); } }, &[SyntaxKind::LBrace, SyntaxKind::WhereKw], @@ -103,11 +99,7 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { ); parser.with_next_expected_tokens( - |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }, + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LParen], ); @@ -125,7 +117,7 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { parser.with_recovery_tokens( |parser| { if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); + parse_type(parser, None); } }, &[SyntaxKind::LBrace, SyntaxKind::WhereKw], @@ -161,6 +153,6 @@ fn parse_extern_fn_def_impl(parser: &mut Parser) { ); if parser.bump_if(SyntaxKind::Arrow) { - parse_type(parser, None, false); + parse_type(parser, None); } } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 33a1e806d9..71e745b4f7 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -6,10 +6,10 @@ use super::{ attr, define_scope, expr::parse_expr, func::FnDefScope, - param::{parse_where_clause_opt, GenericParamListScope}, + param::{parse_generic_params_opt, parse_where_clause_opt}, struct_::RecordFieldDefListScope, token_stream::{LexicalToken, TokenStream}, - type_::{parse_type, TupleTypeScope}, + type_::{parse_type, PathTypeScope, TupleTypeScope}, use_tree::UseTreeScope, Parser, }; @@ -198,11 +198,7 @@ impl super::Parse for EnumScope { ); parser.with_next_expected_tokens( - |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }, + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); @@ -270,11 +266,7 @@ impl super::Parse for TraitScope { ); parser.with_next_expected_tokens( - |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }, + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); @@ -300,29 +292,44 @@ define_scope! { ImplScope, Impl, Inheritance } impl super::Parse for ImplScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ImplKw); - - parser.with_next_expected_tokens( - |parser| { - parse_type(parser, None, true); - }, + parser.with_recovery_tokens( + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], ); - let is_impl_trait = parser.with_next_expected_tokens( - |parser| { - if parser.bump_if(SyntaxKind::ForKw) { - self.set_kind(SyntaxKind::ImplTrait); - parse_type(parser, None, true); - true - } else { - false - } - }, - &[SyntaxKind::LBrace, SyntaxKind::WhereKw], - ); + let is_impl_trait = parser.dry_run(|parser| { + parser.with_next_expected_tokens( + |parser| parse_type(parser, None), + &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], + ); + parser.bump_if(SyntaxKind::ForKw) + }); - parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); + if is_impl_trait { + self.set_kind(SyntaxKind::ImplTrait); + parser.with_next_expected_tokens( + |parser| { + parser.parse(PathTypeScope::default(), None); + }, + &[SyntaxKind::ForKw], + ); + parser.bump_expected(SyntaxKind::ForKw); + parser.with_next_expected_tokens( + |parser| { + parse_type(parser, None); + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); + } else { + parser.with_next_expected_tokens( + |parser| { + parse_type(parser, None); + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ) + } + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { parser.error_and_recover("expected impl body", None); return; @@ -377,7 +384,7 @@ impl super::Parse for ConstScope { "expected type annotation for `const`", None, ); - parse_type(parser, None, false); + parse_type(parser, None); }, &[SyntaxKind::Eq], ); @@ -430,9 +437,7 @@ impl super::Parse for TypeAliasScope { parser.with_next_expected_tokens( |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } + parse_generic_params_opt(parser); }, &[SyntaxKind::Eq], ); @@ -442,7 +447,7 @@ impl super::Parse for TypeAliasScope { return; } - parse_type(parser, None, false); + parse_type(parser, None); } } diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index a46fb84ebd..6472c9bd21 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -76,7 +76,7 @@ impl super::Parse for FnParamScope { parser.bump_or_recover(SyntaxKind::Colon, "expected `:` after argument name", None); - parse_type(parser, None, false); + parse_type(parser, None); } } @@ -129,7 +129,7 @@ impl super::Parse for GenericParamScope { parser.error_and_recover("expected `:` after const parameter", None); return; } - parse_type(parser, None, false); + parse_type(parser, None); parser.set_newline_as_trivia(true); } else { @@ -177,13 +177,13 @@ impl super::Parse for TypeBoundScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::new(false), None); + parser.parse(GenericArgListScope::default(), None); } } } define_scope! { - pub(crate) GenericArgListScope{ allow_bounds: bool }, + pub(crate) GenericArgListScope, GenericArgList, Override(Gt, Comma) } @@ -195,9 +195,9 @@ impl super::Parse for GenericArgListScope { return; } - parser.parse(GenericArgScope::new(self.allow_bounds), None); + parser.parse(GenericArgScope::default(), None); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(GenericArgScope::new(self.allow_bounds), None); + parser.parse(GenericArgScope::default(), None); } parser.bump_or_recover(SyntaxKind::Gt, "expected closing `>`", None); @@ -205,7 +205,7 @@ impl super::Parse for GenericArgListScope { } define_scope! { - GenericArgScope{ allow_bounds: bool }, + GenericArgScope, TypeGenericArg, Inheritance } @@ -226,13 +226,9 @@ impl super::Parse for GenericArgScope { } _ => { - parse_type(parser, None, self.allow_bounds); + parse_type(parser, None); if parser.current_kind() == Some(SyntaxKind::Colon) { - if !self.allow_bounds { - parser.error_and_recover("type bounds are not allowed here", None); - } else { - parser.parse(TypeBoundListScope::default(), None); - } + parser.error_and_recover("type bounds are not allowed here", None); } } } @@ -303,7 +299,7 @@ impl super::Parse for WhereClauseScope { define_scope! { pub(crate) WherePredicateScope, WherePredicate, Inheritance } impl super::Parse for WherePredicateScope { fn parse(&mut self, parser: &mut Parser) { - parse_type(parser, None, false); + parse_type(parser, None); parser.set_newline_as_trivia(false); if parser.current_kind() == Some(SyntaxKind::Colon) { parser.parse(TypeBoundListScope::default(), None); @@ -323,3 +319,9 @@ pub(crate) fn parse_where_clause_opt(parser: &mut Parser) { } parser.set_newline_as_trivia(newline_as_trivia); } + +pub(crate) fn parse_generic_params_opt(parser: &mut Parser) { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::default(), None); + } +} diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs index 68e97ee6d6..8e53be1b7f 100644 --- a/crates/parser2/src/parser/stmt.rs +++ b/crates/parser2/src/parser/stmt.rs @@ -47,7 +47,7 @@ impl super::Parse for LetStmtScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { parser.bump_expected(SyntaxKind::Colon); - parse_type(parser, None, false); + parse_type(parser, None); } if parser.bump_if(SyntaxKind::Eq) { diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 54bf08c2e0..e2fdf3cc3d 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -3,7 +3,7 @@ use crate::SyntaxKind; use super::{ attr::parse_attr_list, define_scope, - param::{parse_where_clause_opt, GenericParamListScope}, + param::{parse_generic_params_opt, parse_where_clause_opt}, token_stream::TokenStream, type_::parse_type, Parser, @@ -28,11 +28,7 @@ impl super::Parse for StructScope { ); parser.with_next_expected_tokens( - |parser| { - if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); - } - }, + |parser| parse_generic_params_opt(parser), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); @@ -104,7 +100,7 @@ impl super::Parse for RecordFieldDefScope { ); if parser.bump_if(SyntaxKind::Colon) { parser.with_next_expected_tokens( - |parser| parse_type(parser, None, false), + |parser| parse_type(parser, None), &[SyntaxKind::Newline, SyntaxKind::RBrace], ); } else { diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 427ed1fe6e..7c0c94b646 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -9,17 +9,13 @@ use super::{ Checkpoint, Parser, }; -pub fn parse_type( - parser: &mut Parser, - checkpoint: Option, - allow_bounds: bool, -) -> bool { +pub fn parse_type(parser: &mut Parser, checkpoint: Option) -> bool { match parser.current_kind() { - Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::new(allow_bounds), checkpoint), + Some(SyntaxKind::Star) => parser.parse(PtrTypeScope::default(), checkpoint), Some(SyntaxKind::SelfTypeKw) => parser.parse(SelfTypeScope::new(), checkpoint), - Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::new(allow_bounds), checkpoint), - Some(SyntaxKind::LBracket) => parser.parse(ArrayTypeScope::new(allow_bounds), checkpoint), - _ => parser.parse(PathTypeScope::new(allow_bounds), checkpoint), + Some(SyntaxKind::LParen) => parser.parse(TupleTypeScope::default(), checkpoint), + Some(SyntaxKind::LBracket) => parser.parse(ArrayTypeScope::default(), checkpoint), + _ => parser.parse(PathTypeScope::default(), checkpoint), } .0 } @@ -34,16 +30,16 @@ pub(crate) fn is_type_start(kind: SyntaxKind) -> bool { } } -define_scope!(PtrTypeScope { allow_bounds: bool }, PtrType, Inheritance); +define_scope!(PtrTypeScope, PtrType, Inheritance); impl super::Parse for PtrTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::Star); - parse_type(parser, None, self.allow_bounds); + parse_type(parser, None); } } -define_scope!(PathTypeScope { allow_bounds: bool }, PathType, Inheritance); +define_scope!(pub(crate) PathTypeScope , PathType, Inheritance); impl super::Parse for PathTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); @@ -52,7 +48,7 @@ impl super::Parse for PathTypeScope { } if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericArgListScope::new(self.allow_bounds), None); + parser.parse(GenericArgListScope::default(), None); } } } @@ -65,7 +61,7 @@ impl super::Parse for SelfTypeScope { } } define_scope! { - pub(crate) TupleTypeScope{ allow_bounds: bool }, + pub(crate) TupleTypeScope, TupleType, Override( RParen, @@ -81,9 +77,9 @@ impl super::Parse for TupleTypeScope { } parser.set_newline_as_trivia(true); - parse_type(parser, None, self.allow_bounds); + parse_type(parser, None); while parser.bump_if(SyntaxKind::Comma) { - parse_type(parser, None, self.allow_bounds); + parse_type(parser, None); } if !parser.bump_if(SyntaxKind::RParen) { @@ -94,7 +90,7 @@ impl super::Parse for TupleTypeScope { } define_scope! { - ArrayTypeScope{ allow_bounds: bool }, + ArrayTypeScope, ArrayType, Override(RBracket) } @@ -103,10 +99,8 @@ impl super::Parse for ArrayTypeScope { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::LBracket); - parser.with_next_expected_tokens( - |parser| parse_type(parser, None, self.allow_bounds), - &[SyntaxKind::SemiColon], - ); + parser + .with_next_expected_tokens(|parser| parse_type(parser, None), &[SyntaxKind::SemiColon]); if !parser.bump_if(SyntaxKind::SemiColon) { parser.error_and_recover("expected `;`", None); diff --git a/crates/parser2/test_files/syntax_node/items/impl.fe b/crates/parser2/test_files/syntax_node/items/impl.fe index 67c8e2b58c..db27e486f0 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.fe +++ b/crates/parser2/test_files/syntax_node/items/impl.fe @@ -1,4 +1,4 @@ -impl Foo::Bar { +impl Foo::Bar { pub fn add(self, rhs: Self) -> Self { Self { val: self.val + rhs.val @@ -6,7 +6,7 @@ impl Foo::Bar { } } -impl Foo +impl Foo where Foo: Clone { fn add>(self, rhs: U) diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 748be7f5a7..6955f625b3 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -3,240 +3,250 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/impl.fe --- -Root@0..266 - ItemList@0..266 - Impl@0..134 +Root@0..272 + ItemList@0..272 + Impl@0..137 ImplKw@0..4 "impl" - WhiteSpace@4..5 " " - PathType@5..21 - Path@5..13 - PathSegment@5..8 - Ident@5..8 "Foo" - Colon2@8..10 "::" - PathSegment@10..13 - Ident@10..13 "Bar" - GenericArgList@13..21 - Lt@13..14 "<" - TypeGenericArg@14..20 - PathType@14..15 - Path@14..15 - PathSegment@14..15 - Ident@14..15 "T" - TypeBoundList@15..20 - Colon@15..16 ":" - WhiteSpace@16..17 " " - TypeBound@17..20 - Path@17..20 - PathSegment@17..20 - Ident@17..20 "Add" - Gt@20..21 ">" - WhiteSpace@21..22 " " - ImplItemList@22..134 - LBrace@22..23 "{" - Newline@23..24 "\n" - WhiteSpace@24..28 " " - Fn@28..132 - ItemModifier@28..31 - PubKw@28..31 "pub" - WhiteSpace@31..32 " " - FnKw@32..34 "fn" + GenericParamList@4..12 + Lt@4..5 "<" + TypeGenericParam@5..11 + Ident@5..6 "T" + TypeBoundList@6..11 + Colon@6..7 ":" + WhiteSpace@7..8 " " + TypeBound@8..11 + Path@8..11 + PathSegment@8..11 + Ident@8..11 "Add" + Gt@11..12 ">" + WhiteSpace@12..13 " " + PathType@13..24 + Path@13..21 + PathSegment@13..16 + Ident@13..16 "Foo" + Colon2@16..18 "::" + PathSegment@18..21 + Ident@18..21 "Bar" + GenericArgList@21..24 + Lt@21..22 "<" + TypeGenericArg@22..23 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + Gt@23..24 ">" + WhiteSpace@24..25 " " + ImplItemList@25..137 + LBrace@25..26 "{" + Newline@26..27 "\n" + WhiteSpace@27..31 " " + Fn@31..135 + ItemModifier@31..34 + PubKw@31..34 "pub" WhiteSpace@34..35 " " - Ident@35..38 "add" - FnParamList@38..55 - LParen@38..39 "(" - FnParam@39..43 - SelfKw@39..43 "self" - Comma@43..44 "," - WhiteSpace@44..45 " " - FnParam@45..54 - Ident@45..48 "rhs" - Colon@48..49 ":" - WhiteSpace@49..50 " " - SelfType@50..54 - SelfTypeKw@50..54 "Self" - RParen@54..55 ")" - WhiteSpace@55..56 " " - Arrow@56..58 "->" + FnKw@35..37 "fn" + WhiteSpace@37..38 " " + Ident@38..41 "add" + FnParamList@41..58 + LParen@41..42 "(" + FnParam@42..46 + SelfKw@42..46 "self" + Comma@46..47 "," + WhiteSpace@47..48 " " + FnParam@48..57 + Ident@48..51 "rhs" + Colon@51..52 ":" + WhiteSpace@52..53 " " + SelfType@53..57 + SelfTypeKw@53..57 "Self" + RParen@57..58 ")" WhiteSpace@58..59 " " - SelfType@59..63 - SelfTypeKw@59..63 "Self" - WhiteSpace@63..64 " " - BlockExpr@64..132 - LBrace@64..65 "{" - Newline@65..66 "\n" - WhiteSpace@66..74 " " - ExprStmt@74..126 - RecordInitExpr@74..126 - PathExpr@74..78 - Path@74..78 - PathSegment@74..78 - SelfTypeKw@74..78 "Self" - WhiteSpace@78..79 " " - RecordFieldList@79..126 - LBrace@79..80 "{" - Newline@80..81 "\n" - WhiteSpace@81..93 " " - RecordField@93..116 - Ident@93..96 "val" - Colon@96..97 ":" - WhiteSpace@97..98 " " - BinExpr@98..116 - FieldExpr@98..106 - PathExpr@98..102 - Path@98..102 - PathSegment@98..102 - SelfKw@98..102 "self" - Dot@102..103 "." - Ident@103..106 "val" - WhiteSpace@106..107 " " - Plus@107..108 "+" - WhiteSpace@108..109 " " - FieldExpr@109..116 - PathExpr@109..112 - Path@109..112 - PathSegment@109..112 - Ident@109..112 "rhs" - Dot@112..113 "." - Ident@113..116 "val" - Newline@116..117 "\n" - WhiteSpace@117..125 " " - RBrace@125..126 "}" - Newline@126..127 "\n" - WhiteSpace@127..131 " " - RBrace@131..132 "}" - Newline@132..133 "\n" - RBrace@133..134 "}" - Newline@134..136 "\n\n" - Impl@136..266 - ImplKw@136..140 "impl" - WhiteSpace@140..141 " " - PathType@141..147 - Path@141..144 - PathSegment@141..144 - Ident@141..144 "Foo" - GenericArgList@144..147 - Lt@144..145 "<" - TypeGenericArg@145..146 - PathType@145..146 - Path@145..146 - PathSegment@145..146 - Ident@145..146 "T" - Gt@146..147 ">" - WhiteSpace@147..148 " " - Newline@148..149 "\n" - WhereClause@149..169 - WhereKw@149..154 "where" - WhiteSpace@154..155 " " - WherePredicate@155..169 - PathType@155..161 - Path@155..158 - PathSegment@155..158 - Ident@155..158 "Foo" - GenericArgList@158..161 - Lt@158..159 "<" - TypeGenericArg@159..160 - PathType@159..160 - Path@159..160 - PathSegment@159..160 - Ident@159..160 "T" - Gt@160..161 ">" - TypeBoundList@161..168 - Colon@161..162 ":" - WhiteSpace@162..163 " " - TypeBound@163..168 - Path@163..168 - PathSegment@163..168 - Ident@163..168 "Clone" - Newline@168..169 "\n" - ImplItemList@169..266 - LBrace@169..170 "{" - Newline@170..171 "\n" - WhiteSpace@171..175 " " - Fn@175..264 - FnKw@175..177 "fn" - WhiteSpace@177..178 " " - Ident@178..181 "add" - GenericParamList@181..192 - Lt@181..182 "<" - TypeGenericParam@182..191 - Ident@182..183 "U" - TypeBoundList@183..191 - Colon@183..184 ":" - WhiteSpace@184..185 " " - TypeBound@185..191 - Path@185..188 - PathSegment@185..188 - Ident@185..188 "Add" - GenericArgList@188..191 - Lt@188..189 "<" - TypeGenericArg@189..190 - PathType@189..190 - Path@189..190 - PathSegment@189..190 - Ident@189..190 "T" - Gt@190..191 ">" - Gt@191..192 ">" - FnParamList@192..206 - LParen@192..193 "(" - FnParam@193..197 - SelfKw@193..197 "self" - Comma@197..198 "," - WhiteSpace@198..199 " " - FnParam@199..205 - Ident@199..202 "rhs" - Colon@202..203 ":" - WhiteSpace@203..204 " " - PathType@204..205 - Path@204..205 - PathSegment@204..205 - Ident@204..205 "U" - RParen@205..206 ")" - WhiteSpace@206..207 " " - Newline@207..208 "\n" - WhiteSpace@208..216 " " - WhereClause@216..230 - WhereKw@216..221 "where" - WhiteSpace@221..222 " " - WherePredicate@222..230 - PathType@222..223 - Path@222..223 - PathSegment@222..223 - Ident@222..223 "T" - TypeBoundList@223..229 - Colon@223..224 ":" - WhiteSpace@224..225 " " - TypeBound@225..229 - Path@225..229 - PathSegment@225..229 - Ident@225..229 "Copy" - Newline@229..230 "\n" - WhiteSpace@230..234 " " - BlockExpr@234..264 - LBrace@234..235 "{" - Newline@235..236 "\n" - WhiteSpace@236..244 " " - ExprStmt@244..258 - ParenExpr@244..258 - LParen@244..245 "(" - BinExpr@245..257 - PathExpr@245..248 - Path@245..248 - PathSegment@245..248 - Ident@245..248 "rhs" - WhiteSpace@248..249 " " - Minus@249..250 "-" - WhiteSpace@250..251 " " - FieldExpr@251..257 - PathExpr@251..255 - Path@251..255 - PathSegment@251..255 - SelfKw@251..255 "self" - Dot@255..256 "." - Ident@256..257 "t" - RParen@257..258 ")" - Newline@258..259 "\n" - WhiteSpace@259..263 " " - RBrace@263..264 "}" - Newline@264..265 "\n" - RBrace@265..266 "}" + Arrow@59..61 "->" + WhiteSpace@61..62 " " + SelfType@62..66 + SelfTypeKw@62..66 "Self" + WhiteSpace@66..67 " " + BlockExpr@67..135 + LBrace@67..68 "{" + Newline@68..69 "\n" + WhiteSpace@69..77 " " + ExprStmt@77..129 + RecordInitExpr@77..129 + PathExpr@77..81 + Path@77..81 + PathSegment@77..81 + SelfTypeKw@77..81 "Self" + WhiteSpace@81..82 " " + RecordFieldList@82..129 + LBrace@82..83 "{" + Newline@83..84 "\n" + WhiteSpace@84..96 " " + RecordField@96..119 + Ident@96..99 "val" + Colon@99..100 ":" + WhiteSpace@100..101 " " + BinExpr@101..119 + FieldExpr@101..109 + PathExpr@101..105 + Path@101..105 + PathSegment@101..105 + SelfKw@101..105 "self" + Dot@105..106 "." + Ident@106..109 "val" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + FieldExpr@112..119 + PathExpr@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "rhs" + Dot@115..116 "." + Ident@116..119 "val" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + RBrace@128..129 "}" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + RBrace@134..135 "}" + Newline@135..136 "\n" + RBrace@136..137 "}" + Newline@137..139 "\n\n" + Impl@139..272 + ImplKw@139..143 "impl" + GenericParamList@143..146 + Lt@143..144 "<" + TypeGenericParam@144..145 + Ident@144..145 "T" + Gt@145..146 ">" + WhiteSpace@146..147 " " + PathType@147..153 + Path@147..150 + PathSegment@147..150 + Ident@147..150 "Foo" + GenericArgList@150..153 + Lt@150..151 "<" + TypeGenericArg@151..152 + PathType@151..152 + Path@151..152 + PathSegment@151..152 + Ident@151..152 "T" + Gt@152..153 ">" + WhiteSpace@153..154 " " + Newline@154..155 "\n" + WhereClause@155..175 + WhereKw@155..160 "where" + WhiteSpace@160..161 " " + WherePredicate@161..175 + PathType@161..167 + Path@161..164 + PathSegment@161..164 + Ident@161..164 "Foo" + GenericArgList@164..167 + Lt@164..165 "<" + TypeGenericArg@165..166 + PathType@165..166 + Path@165..166 + PathSegment@165..166 + Ident@165..166 "T" + Gt@166..167 ">" + TypeBoundList@167..174 + Colon@167..168 ":" + WhiteSpace@168..169 " " + TypeBound@169..174 + Path@169..174 + PathSegment@169..174 + Ident@169..174 "Clone" + Newline@174..175 "\n" + ImplItemList@175..272 + LBrace@175..176 "{" + Newline@176..177 "\n" + WhiteSpace@177..181 " " + Fn@181..270 + FnKw@181..183 "fn" + WhiteSpace@183..184 " " + Ident@184..187 "add" + GenericParamList@187..198 + Lt@187..188 "<" + TypeGenericParam@188..197 + Ident@188..189 "U" + TypeBoundList@189..197 + Colon@189..190 ":" + WhiteSpace@190..191 " " + TypeBound@191..197 + Path@191..194 + PathSegment@191..194 + Ident@191..194 "Add" + GenericArgList@194..197 + Lt@194..195 "<" + TypeGenericArg@195..196 + PathType@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "T" + Gt@196..197 ">" + Gt@197..198 ">" + FnParamList@198..212 + LParen@198..199 "(" + FnParam@199..203 + SelfKw@199..203 "self" + Comma@203..204 "," + WhiteSpace@204..205 " " + FnParam@205..211 + Ident@205..208 "rhs" + Colon@208..209 ":" + WhiteSpace@209..210 " " + PathType@210..211 + Path@210..211 + PathSegment@210..211 + Ident@210..211 "U" + RParen@211..212 ")" + WhiteSpace@212..213 " " + Newline@213..214 "\n" + WhiteSpace@214..222 " " + WhereClause@222..236 + WhereKw@222..227 "where" + WhiteSpace@227..228 " " + WherePredicate@228..236 + PathType@228..229 + Path@228..229 + PathSegment@228..229 + Ident@228..229 "T" + TypeBoundList@229..235 + Colon@229..230 ":" + WhiteSpace@230..231 " " + TypeBound@231..235 + Path@231..235 + PathSegment@231..235 + Ident@231..235 "Copy" + Newline@235..236 "\n" + WhiteSpace@236..240 " " + BlockExpr@240..270 + LBrace@240..241 "{" + Newline@241..242 "\n" + WhiteSpace@242..250 " " + ExprStmt@250..264 + ParenExpr@250..264 + LParen@250..251 "(" + BinExpr@251..263 + PathExpr@251..254 + Path@251..254 + PathSegment@251..254 + Ident@251..254 "rhs" + WhiteSpace@254..255 " " + Minus@255..256 "-" + WhiteSpace@256..257 " " + FieldExpr@257..263 + PathExpr@257..261 + Path@257..261 + PathSegment@257..261 + SelfKw@257..261 "self" + Dot@261..262 "." + Ident@262..263 "t" + RParen@263..264 ")" + Newline@264..265 "\n" + WhiteSpace@265..269 " " + RBrace@269..270 "}" + Newline@270..271 "\n" + RBrace@271..272 "}" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.fe b/crates/parser2/test_files/syntax_node/items/impl_trait.fe index 1010c81148..99e2b4fd20 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.fe +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.fe @@ -1,10 +1,10 @@ -impl Trait for F { +impl Trait for F { fn foo() { return 1 } } -impl Trait for F +impl Trait for F where T: Clone U: Bar { @@ -13,7 +13,8 @@ where T: Clone } } -impl Trait for F +impl Trait for F +where U: Bar { fn foo>(t: T) { do_something(t) diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index 48686870b8..a482a10541 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -3,337 +3,362 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/impl_trait.fe --- -Root@0..317 - ItemList@0..317 - ImplTrait@0..64 +Root@0..335 + ItemList@0..335 + ImplTrait@0..67 ImplKw@0..4 "impl" - WhiteSpace@4..5 " " - PathType@5..13 - Path@5..10 - PathSegment@5..10 - Ident@5..10 "Trait" - GenericArgList@10..13 - Lt@10..11 "<" - TypeGenericArg@11..12 - PathType@11..12 - Path@11..12 - PathSegment@11..12 - Ident@11..12 "T" - Gt@12..13 ">" - WhiteSpace@13..14 " " - ForKw@14..17 "for" - WhiteSpace@17..18 " " - PathType@18..22 - Path@18..19 - PathSegment@18..19 - Ident@18..19 "F" - GenericArgList@19..22 - Lt@19..20 "<" - TypeGenericArg@20..21 - PathType@20..21 - Path@20..21 - PathSegment@20..21 - Ident@20..21 "T" - Gt@21..22 ">" - WhiteSpace@22..23 " " - ImplTraitItemList@23..64 - LBrace@23..24 "{" - Newline@24..25 "\n" - WhiteSpace@25..29 " " - Fn@29..62 - FnKw@29..31 "fn" - WhiteSpace@31..32 " " - Ident@32..35 "foo" - FnParamList@35..37 - LParen@35..36 "(" - RParen@36..37 ")" - WhiteSpace@37..38 " " - BlockExpr@38..62 - LBrace@38..39 "{" - Newline@39..40 "\n" - WhiteSpace@40..48 " " - ReturnStmt@48..56 - ReturnKw@48..54 "return" - WhiteSpace@54..55 " " - LitExpr@55..56 - Lit@55..56 - Int@55..56 "1" - Newline@56..57 "\n" - WhiteSpace@57..61 " " - RBrace@61..62 "}" - Newline@62..63 "\n" - RBrace@63..64 "}" - Newline@64..66 "\n\n" - ImplTrait@66..196 - ImplKw@66..70 "impl" - WhiteSpace@70..71 " " - PathType@71..82 - Path@71..76 - PathSegment@71..76 - Ident@71..76 "Trait" - GenericArgList@76..82 - Lt@76..77 "<" - TypeGenericArg@77..78 - PathType@77..78 - Path@77..78 - PathSegment@77..78 - Ident@77..78 "T" - Comma@78..79 "," - WhiteSpace@79..80 " " - TypeGenericArg@80..81 - PathType@80..81 - Path@80..81 - PathSegment@80..81 - Ident@80..81 "U" - Gt@81..82 ">" - WhiteSpace@82..83 " " - ForKw@83..86 "for" - WhiteSpace@86..87 " " - PathType@87..91 - Path@87..88 - PathSegment@87..88 - Ident@87..88 "F" - GenericArgList@88..91 - Lt@88..89 "<" + GenericParamList@4..7 + Lt@4..5 "<" + TypeGenericParam@5..6 + Ident@5..6 "T" + Gt@6..7 ">" + WhiteSpace@7..8 " " + PathType@8..16 + Path@8..13 + PathSegment@8..13 + Ident@8..13 "Trait" + GenericArgList@13..16 + Lt@13..14 "<" + TypeGenericArg@14..15 + PathType@14..15 + Path@14..15 + PathSegment@14..15 + Ident@14..15 "T" + Gt@15..16 ">" + WhiteSpace@16..17 " " + ForKw@17..20 "for" + WhiteSpace@20..21 " " + PathType@21..25 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "F" + GenericArgList@22..25 + Lt@22..23 "<" + TypeGenericArg@23..24 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + Gt@24..25 ">" + WhiteSpace@25..26 " " + ImplTraitItemList@26..67 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + Fn@32..65 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "foo" + FnParamList@38..40 + LParen@38..39 "(" + RParen@39..40 ")" + WhiteSpace@40..41 " " + BlockExpr@41..65 + LBrace@41..42 "{" + Newline@42..43 "\n" + WhiteSpace@43..51 " " + ReturnStmt@51..59 + ReturnKw@51..57 "return" + WhiteSpace@57..58 " " + LitExpr@58..59 + Lit@58..59 + Int@58..59 "1" + Newline@59..60 "\n" + WhiteSpace@60..64 " " + RBrace@64..65 "}" + Newline@65..66 "\n" + RBrace@66..67 "}" + Newline@67..69 "\n\n" + ImplTrait@69..205 + ImplKw@69..73 "impl" + GenericParamList@73..79 + Lt@73..74 "<" + TypeGenericParam@74..75 + Ident@74..75 "T" + Comma@75..76 "," + WhiteSpace@76..77 " " + TypeGenericParam@77..78 + Ident@77..78 "U" + Gt@78..79 ">" + WhiteSpace@79..80 " " + PathType@80..91 + Path@80..85 + PathSegment@80..85 + Ident@80..85 "Trait" + GenericArgList@85..91 + Lt@85..86 "<" + TypeGenericArg@86..87 + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "T" + Comma@87..88 "," + WhiteSpace@88..89 " " TypeGenericArg@89..90 PathType@89..90 Path@89..90 PathSegment@89..90 - Ident@89..90 "T" + Ident@89..90 "U" Gt@90..91 ">" WhiteSpace@91..92 " " - Newline@92..93 "\n" - WhereClause@93..121 - WhereKw@93..98 "where" - WhiteSpace@98..99 " " - WherePredicate@99..108 - PathType@99..100 - Path@99..100 - PathSegment@99..100 - Ident@99..100 "T" - TypeBoundList@100..107 - Colon@100..101 ":" - WhiteSpace@101..102 " " - TypeBound@102..107 - Path@102..107 - PathSegment@102..107 - Ident@102..107 "Clone" - Newline@107..108 "\n" - WhiteSpace@108..114 " " - WherePredicate@114..121 - PathType@114..115 - Path@114..115 - PathSegment@114..115 - Ident@114..115 "U" - TypeBoundList@115..120 - Colon@115..116 ":" - WhiteSpace@116..117 " " - TypeBound@117..120 - Path@117..120 - PathSegment@117..120 - Ident@117..120 "Bar" - Newline@120..121 "\n" - ImplTraitItemList@121..196 - LBrace@121..122 "{" - Newline@122..123 "\n" - WhiteSpace@123..127 " " - Fn@127..194 - FnKw@127..129 "fn" - WhiteSpace@129..130 " " - Ident@130..133 "foo" - GenericParamList@133..151 - Lt@133..134 "<" - TypeGenericParam@134..150 - Ident@134..135 "T" - TypeBoundList@135..150 - Colon@135..136 ":" - WhiteSpace@136..137 " " - TypeBound@137..150 - Path@137..147 - PathSegment@137..147 - Ident@137..147 "OtherTrait" - GenericArgList@147..150 - Lt@147..148 "<" - TypeGenericArg@148..149 - PathType@148..149 - Path@148..149 - PathSegment@148..149 - Ident@148..149 "U" - Gt@149..150 ">" - Gt@150..151 ">" - FnParamList@151..157 - LParen@151..152 "(" - FnParam@152..156 - Ident@152..153 "t" - Colon@153..154 ":" - WhiteSpace@154..155 " " - PathType@155..156 - Path@155..156 - PathSegment@155..156 - Ident@155..156 "T" - RParen@156..157 ")" - WhiteSpace@157..158 " " - BlockExpr@158..194 - LBrace@158..159 "{" - Newline@159..160 "\n" - WhiteSpace@160..168 " " - ExprStmt@168..188 - CallExpr@168..188 - PathExpr@168..180 - Path@168..180 - PathSegment@168..180 - Ident@168..180 "do_something" - GenericArgList@180..185 - Lt@180..181 "<" - TypeGenericArg@181..184 - PathType@181..184 - Path@181..184 - PathSegment@181..184 - Ident@181..184 "i32" - Gt@184..185 ">" - CallArgList@185..188 - LParen@185..186 "(" - CallArg@186..187 - PathExpr@186..187 - Path@186..187 - PathSegment@186..187 - Ident@186..187 "t" - RParen@187..188 ")" - Newline@188..189 "\n" - WhiteSpace@189..193 " " - RBrace@193..194 "}" - Newline@194..195 "\n" - RBrace@195..196 "}" - Newline@196..198 "\n\n" - ImplTrait@198..317 - ImplKw@198..202 "impl" - WhiteSpace@202..203 " " - PathType@203..226 - Path@203..208 - PathSegment@203..208 - Ident@203..208 "Trait" - GenericArgList@208..226 - Lt@208..209 "<" - TypeGenericArg@209..217 - PathType@209..210 - Path@209..210 - PathSegment@209..210 - Ident@209..210 "T" - TypeBoundList@210..217 - Colon@210..211 ":" - WhiteSpace@211..212 " " - TypeBound@212..217 - Path@212..217 - PathSegment@212..217 - Ident@212..217 "Clone" - Comma@217..218 "," - WhiteSpace@218..219 " " - TypeGenericArg@219..225 - PathType@219..220 - Path@219..220 - PathSegment@219..220 - Ident@219..220 "U" - TypeBoundList@220..225 - Colon@220..221 ":" - WhiteSpace@221..222 " " - TypeBound@222..225 - Path@222..225 - PathSegment@222..225 - Ident@222..225 "Bar" - Gt@225..226 ">" - WhiteSpace@226..227 " " - ForKw@227..230 "for" - WhiteSpace@230..231 " " - PathType@231..240 - Path@231..232 - PathSegment@231..232 - Ident@231..232 "F" - GenericArgList@232..240 - Lt@232..233 "<" - TypeGenericArg@233..239 - PathType@233..234 - Path@233..234 - PathSegment@233..234 - Ident@233..234 "U" - TypeBoundList@234..239 - Colon@234..235 ":" - WhiteSpace@235..236 " " - TypeBound@236..239 - Path@236..239 - PathSegment@236..239 - Ident@236..239 "Bar" - Gt@239..240 ">" + ForKw@92..95 "for" + WhiteSpace@95..96 " " + PathType@96..100 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "F" + GenericArgList@97..100 + Lt@97..98 "<" + TypeGenericArg@98..99 + PathType@98..99 + Path@98..99 + PathSegment@98..99 + Ident@98..99 "T" + Gt@99..100 ">" + WhiteSpace@100..101 " " + Newline@101..102 "\n" + WhereClause@102..130 + WhereKw@102..107 "where" + WhiteSpace@107..108 " " + WherePredicate@108..117 + PathType@108..109 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "T" + TypeBoundList@109..116 + Colon@109..110 ":" + WhiteSpace@110..111 " " + TypeBound@111..116 + Path@111..116 + PathSegment@111..116 + Ident@111..116 "Clone" + Newline@116..117 "\n" + WhiteSpace@117..123 " " + WherePredicate@123..130 + PathType@123..124 + Path@123..124 + PathSegment@123..124 + Ident@123..124 "U" + TypeBoundList@124..129 + Colon@124..125 ":" + WhiteSpace@125..126 " " + TypeBound@126..129 + Path@126..129 + PathSegment@126..129 + Ident@126..129 "Bar" + Newline@129..130 "\n" + ImplTraitItemList@130..205 + LBrace@130..131 "{" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Fn@136..203 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + GenericParamList@142..160 + Lt@142..143 "<" + TypeGenericParam@143..159 + Ident@143..144 "T" + TypeBoundList@144..159 + Colon@144..145 ":" + WhiteSpace@145..146 " " + TypeBound@146..159 + Path@146..156 + PathSegment@146..156 + Ident@146..156 "OtherTrait" + GenericArgList@156..159 + Lt@156..157 "<" + TypeGenericArg@157..158 + PathType@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "U" + Gt@158..159 ">" + Gt@159..160 ">" + FnParamList@160..166 + LParen@160..161 "(" + FnParam@161..165 + Ident@161..162 "t" + Colon@162..163 ":" + WhiteSpace@163..164 " " + PathType@164..165 + Path@164..165 + PathSegment@164..165 + Ident@164..165 "T" + RParen@165..166 ")" + WhiteSpace@166..167 " " + BlockExpr@167..203 + LBrace@167..168 "{" + Newline@168..169 "\n" + WhiteSpace@169..177 " " + ExprStmt@177..197 + CallExpr@177..197 + PathExpr@177..189 + Path@177..189 + PathSegment@177..189 + Ident@177..189 "do_something" + GenericArgList@189..194 + Lt@189..190 "<" + TypeGenericArg@190..193 + PathType@190..193 + Path@190..193 + PathSegment@190..193 + Ident@190..193 "i32" + Gt@193..194 ">" + CallArgList@194..197 + LParen@194..195 "(" + CallArg@195..196 + PathExpr@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "t" + RParen@196..197 ")" + Newline@197..198 "\n" + WhiteSpace@198..202 " " + RBrace@202..203 "}" + Newline@203..204 "\n" + RBrace@204..205 "}" + Newline@205..207 "\n\n" + ImplTrait@207..335 + ImplKw@207..211 "impl" + GenericParamList@211..224 + Lt@211..212 "<" + TypeGenericParam@212..220 + Ident@212..213 "T" + TypeBoundList@213..220 + Colon@213..214 ":" + WhiteSpace@214..215 " " + TypeBound@215..220 + Path@215..220 + PathSegment@215..220 + Ident@215..220 "Clone" + Comma@220..221 "," + WhiteSpace@221..222 " " + TypeGenericParam@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + WhiteSpace@224..225 " " + PathType@225..236 + Path@225..230 + PathSegment@225..230 + Ident@225..230 "Trait" + GenericArgList@230..236 + Lt@230..231 "<" + TypeGenericArg@231..232 + PathType@231..232 + Path@231..232 + PathSegment@231..232 + Ident@231..232 "T" + Comma@232..233 "," + WhiteSpace@233..234 " " + TypeGenericArg@234..235 + PathType@234..235 + Path@234..235 + PathSegment@234..235 + Ident@234..235 "U" + Gt@235..236 ">" + WhiteSpace@236..237 " " + ForKw@237..240 "for" WhiteSpace@240..241 " " - Newline@241..242 "\n" - ImplTraitItemList@242..317 - LBrace@242..243 "{" - Newline@243..244 "\n" - WhiteSpace@244..248 " " - Fn@248..315 - FnKw@248..250 "fn" - WhiteSpace@250..251 " " - Ident@251..254 "foo" - GenericParamList@254..272 - Lt@254..255 "<" - TypeGenericParam@255..271 - Ident@255..256 "T" - TypeBoundList@256..271 - Colon@256..257 ":" - WhiteSpace@257..258 " " - TypeBound@258..271 - Path@258..268 - PathSegment@258..268 - Ident@258..268 "OtherTrait" - GenericArgList@268..271 - Lt@268..269 "<" - TypeGenericArg@269..270 - PathType@269..270 - Path@269..270 - PathSegment@269..270 - Ident@269..270 "U" - Gt@270..271 ">" - Gt@271..272 ">" - FnParamList@272..278 - LParen@272..273 "(" - FnParam@273..277 - Ident@273..274 "t" - Colon@274..275 ":" - WhiteSpace@275..276 " " - PathType@276..277 - Path@276..277 - PathSegment@276..277 - Ident@276..277 "T" - RParen@277..278 ")" - WhiteSpace@278..279 " " - BlockExpr@279..315 - LBrace@279..280 "{" - Newline@280..281 "\n" - WhiteSpace@281..289 " " - ExprStmt@289..309 - CallExpr@289..309 - PathExpr@289..301 - Path@289..301 - PathSegment@289..301 - Ident@289..301 "do_something" - GenericArgList@301..306 - Lt@301..302 "<" - TypeGenericArg@302..305 - PathType@302..305 - Path@302..305 - PathSegment@302..305 - Ident@302..305 "i32" - Gt@305..306 ">" - CallArgList@306..309 - LParen@306..307 "(" - CallArg@307..308 - PathExpr@307..308 - Path@307..308 - PathSegment@307..308 - Ident@307..308 "t" - RParen@308..309 ")" - Newline@309..310 "\n" - WhiteSpace@310..314 " " - RBrace@314..315 "}" - Newline@315..316 "\n" - RBrace@316..317 "}" + PathType@241..245 + Path@241..242 + PathSegment@241..242 + Ident@241..242 "F" + GenericArgList@242..245 + Lt@242..243 "<" + TypeGenericArg@243..244 + PathType@243..244 + Path@243..244 + PathSegment@243..244 + Ident@243..244 "U" + Gt@244..245 ">" + WhiteSpace@245..246 " " + Newline@246..247 "\n" + WhereClause@247..260 + WhereKw@247..252 "where" + WhiteSpace@252..253 " " + WherePredicate@253..260 + PathType@253..254 + Path@253..254 + PathSegment@253..254 + Ident@253..254 "U" + TypeBoundList@254..259 + Colon@254..255 ":" + WhiteSpace@255..256 " " + TypeBound@256..259 + Path@256..259 + PathSegment@256..259 + Ident@256..259 "Bar" + Newline@259..260 "\n" + ImplTraitItemList@260..335 + LBrace@260..261 "{" + Newline@261..262 "\n" + WhiteSpace@262..266 " " + Fn@266..333 + FnKw@266..268 "fn" + WhiteSpace@268..269 " " + Ident@269..272 "foo" + GenericParamList@272..290 + Lt@272..273 "<" + TypeGenericParam@273..289 + Ident@273..274 "T" + TypeBoundList@274..289 + Colon@274..275 ":" + WhiteSpace@275..276 " " + TypeBound@276..289 + Path@276..286 + PathSegment@276..286 + Ident@276..286 "OtherTrait" + GenericArgList@286..289 + Lt@286..287 "<" + TypeGenericArg@287..288 + PathType@287..288 + Path@287..288 + PathSegment@287..288 + Ident@287..288 "U" + Gt@288..289 ">" + Gt@289..290 ">" + FnParamList@290..296 + LParen@290..291 "(" + FnParam@291..295 + Ident@291..292 "t" + Colon@292..293 ":" + WhiteSpace@293..294 " " + PathType@294..295 + Path@294..295 + PathSegment@294..295 + Ident@294..295 "T" + RParen@295..296 ")" + WhiteSpace@296..297 " " + BlockExpr@297..333 + LBrace@297..298 "{" + Newline@298..299 "\n" + WhiteSpace@299..307 " " + ExprStmt@307..327 + CallExpr@307..327 + PathExpr@307..319 + Path@307..319 + PathSegment@307..319 + Ident@307..319 "do_something" + GenericArgList@319..324 + Lt@319..320 "<" + TypeGenericArg@320..323 + PathType@320..323 + Path@320..323 + PathSegment@320..323 + Ident@320..323 "i32" + Gt@323..324 ">" + CallArgList@324..327 + LParen@324..325 "(" + CallArg@325..326 + PathExpr@325..326 + Path@325..326 + PathSegment@325..326 + Ident@325..326 "t" + RParen@326..327 ")" + Newline@327..328 "\n" + WhiteSpace@328..332 " " + RBrace@332..333 "}" + Newline@333..334 "\n" + RBrace@334..335 "}" diff --git a/crates/parser2/test_files/syntax_node/items/trait.fe b/crates/parser2/test_files/syntax_node/items/trait.fe index df77c21a11..23fa735198 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.fe +++ b/crates/parser2/test_files/syntax_node/items/trait.fe @@ -19,8 +19,8 @@ pub trait Parse { fn parse(mut self, mut parser: Parser) } -impl Parser - where S: Clone +impl Parser + where S: TokenStream + Clone { pub fn parse(mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) { (SyntaxNode::new_root(self.builder.finish()), self.errors) diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index 2bfa70cf7c..6433173821 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/trait.fe --- -Root@0..588 - ItemList@0..588 +Root@0..592 + ItemList@0..592 Trait@0..15 TraitKw@0..5 "trait" WhiteSpace@5..6 " " @@ -299,169 +299,174 @@ Root@0..588 Newline@353..354 "\n" RBrace@354..355 "}" Newline@355..357 "\n\n" - Impl@357..588 + Impl@357..592 ImplKw@357..361 "impl" - WhiteSpace@361..362 " " - PathType@362..384 - Path@362..368 - PathSegment@362..368 - Ident@362..368 "Parser" - GenericArgList@368..384 - Lt@368..369 "<" - TypeGenericArg@369..383 - PathType@369..370 - Path@369..370 - PathSegment@369..370 - Ident@369..370 "S" - TypeBoundList@370..383 - Colon@370..371 ":" - WhiteSpace@371..372 " " - TypeBound@372..383 - Path@372..383 - PathSegment@372..383 - Ident@372..383 "TokenStream" - Gt@383..384 ">" - WhiteSpace@384..385 " " - Newline@385..386 "\n" - WhiteSpace@386..390 " " - WhereClause@390..405 - WhereKw@390..395 "where" - WhiteSpace@395..396 " " - WherePredicate@396..405 - PathType@396..397 - Path@396..397 - PathSegment@396..397 - Ident@396..397 "S" - TypeBoundList@397..404 - Colon@397..398 ":" - WhiteSpace@398..399 " " - TypeBound@399..404 - Path@399..404 - PathSegment@399..404 - Ident@399..404 "Clone" - Newline@404..405 "\n" - ImplItemList@405..588 - LBrace@405..406 "{" - Newline@406..407 "\n" - WhiteSpace@407..411 " " - Fn@411..586 - ItemModifier@411..414 - PubKw@411..414 "pub" - WhiteSpace@414..415 " " - FnKw@415..417 "fn" - WhiteSpace@417..418 " " - Ident@418..423 "parse" - GenericParamList@423..433 - Lt@423..424 "<" - TypeGenericParam@424..432 - Ident@424..425 "T" - TypeBoundList@425..432 - Colon@425..426 ":" - WhiteSpace@426..427 " " - TypeBound@427..432 - Path@427..432 - PathSegment@427..432 - Ident@427..432 "Parse" - Gt@432..433 ">" - FnParamList@433..489 - LParen@433..434 "(" - FnParam@434..442 - MutKw@434..437 "mut" - WhiteSpace@437..438 " " - SelfKw@438..442 "self" - Comma@442..443 "," - WhiteSpace@443..444 " " - FnParam@444..456 - MutKw@444..447 "mut" - WhiteSpace@447..448 " " - Ident@448..453 "scope" - Colon@453..454 ":" - WhiteSpace@454..455 " " - PathType@455..456 - Path@455..456 - PathSegment@455..456 - Ident@455..456 "T" - Comma@456..457 "," - WhiteSpace@457..458 " " - FnParam@458..488 - Ident@458..468 "checkpoint" - Colon@468..469 ":" - WhiteSpace@469..470 " " - PathType@470..488 - Path@470..476 - PathSegment@470..476 - Ident@470..476 "Option" - GenericArgList@476..488 - Lt@476..477 "<" - TypeGenericArg@477..487 - PathType@477..487 - Path@477..487 - PathSegment@477..487 - Ident@477..487 "Checkpoint" - Gt@487..488 ">" - RParen@488..489 ")" - WhiteSpace@489..490 " " - Arrow@490..492 "->" - WhiteSpace@492..493 " " - TupleType@493..511 - LParen@493..494 "(" - PathType@494..498 - Path@494..498 - PathSegment@494..498 - Ident@494..498 "bool" - Comma@498..499 "," - WhiteSpace@499..500 " " - PathType@500..510 - Path@500..510 - PathSegment@500..510 - Ident@500..510 "Checkpoint" - RParen@510..511 ")" - WhiteSpace@511..512 " " - BlockExpr@512..586 - LBrace@512..513 "{" - Newline@513..514 "\n" - WhiteSpace@514..522 " " - ExprStmt@522..580 - TupleExpr@522..580 - LParen@522..523 "(" - CallExpr@523..566 - PathExpr@523..543 - Path@523..543 - PathSegment@523..533 - Ident@523..533 "SyntaxNode" - Colon2@533..535 "::" - PathSegment@535..543 - Ident@535..543 "new_root" - CallArgList@543..566 - LParen@543..544 "(" - CallArg@544..565 - MethodCallExpr@544..565 - FieldExpr@544..556 - PathExpr@544..548 - Path@544..548 - PathSegment@544..548 - SelfKw@544..548 "self" - Dot@548..549 "." - Ident@549..556 "builder" - Dot@556..557 "." - Ident@557..563 "finish" - CallArgList@563..565 - LParen@563..564 "(" - RParen@564..565 ")" - RParen@565..566 ")" - Comma@566..567 "," - WhiteSpace@567..568 " " - FieldExpr@568..579 - PathExpr@568..572 - Path@568..572 - PathSegment@568..572 - SelfKw@568..572 "self" - Dot@572..573 "." - Ident@573..579 "errors" - RParen@579..580 ")" - Newline@580..581 "\n" - WhiteSpace@581..585 " " - RBrace@585..586 "}" - Newline@586..587 "\n" - RBrace@587..588 "}" + GenericParamList@361..364 + Lt@361..362 "<" + TypeGenericParam@362..363 + Ident@362..363 "S" + Gt@363..364 ">" + WhiteSpace@364..365 " " + PathType@365..374 + Path@365..371 + PathSegment@365..371 + Ident@365..371 "Parser" + GenericArgList@371..374 + Lt@371..372 "<" + TypeGenericArg@372..373 + PathType@372..373 + Path@372..373 + PathSegment@372..373 + Ident@372..373 "S" + Gt@373..374 ">" + WhiteSpace@374..375 " " + Newline@375..376 "\n" + WhiteSpace@376..380 " " + WhereClause@380..409 + WhereKw@380..385 "where" + WhiteSpace@385..386 " " + WherePredicate@386..409 + PathType@386..387 + Path@386..387 + PathSegment@386..387 + Ident@386..387 "S" + TypeBoundList@387..408 + Colon@387..388 ":" + WhiteSpace@388..389 " " + TypeBound@389..400 + Path@389..400 + PathSegment@389..400 + Ident@389..400 "TokenStream" + WhiteSpace@400..401 " " + Plus@401..402 "+" + WhiteSpace@402..403 " " + TypeBound@403..408 + Path@403..408 + PathSegment@403..408 + Ident@403..408 "Clone" + Newline@408..409 "\n" + ImplItemList@409..592 + LBrace@409..410 "{" + Newline@410..411 "\n" + WhiteSpace@411..415 " " + Fn@415..590 + ItemModifier@415..418 + PubKw@415..418 "pub" + WhiteSpace@418..419 " " + FnKw@419..421 "fn" + WhiteSpace@421..422 " " + Ident@422..427 "parse" + GenericParamList@427..437 + Lt@427..428 "<" + TypeGenericParam@428..436 + Ident@428..429 "T" + TypeBoundList@429..436 + Colon@429..430 ":" + WhiteSpace@430..431 " " + TypeBound@431..436 + Path@431..436 + PathSegment@431..436 + Ident@431..436 "Parse" + Gt@436..437 ">" + FnParamList@437..493 + LParen@437..438 "(" + FnParam@438..446 + MutKw@438..441 "mut" + WhiteSpace@441..442 " " + SelfKw@442..446 "self" + Comma@446..447 "," + WhiteSpace@447..448 " " + FnParam@448..460 + MutKw@448..451 "mut" + WhiteSpace@451..452 " " + Ident@452..457 "scope" + Colon@457..458 ":" + WhiteSpace@458..459 " " + PathType@459..460 + Path@459..460 + PathSegment@459..460 + Ident@459..460 "T" + Comma@460..461 "," + WhiteSpace@461..462 " " + FnParam@462..492 + Ident@462..472 "checkpoint" + Colon@472..473 ":" + WhiteSpace@473..474 " " + PathType@474..492 + Path@474..480 + PathSegment@474..480 + Ident@474..480 "Option" + GenericArgList@480..492 + Lt@480..481 "<" + TypeGenericArg@481..491 + PathType@481..491 + Path@481..491 + PathSegment@481..491 + Ident@481..491 "Checkpoint" + Gt@491..492 ">" + RParen@492..493 ")" + WhiteSpace@493..494 " " + Arrow@494..496 "->" + WhiteSpace@496..497 " " + TupleType@497..515 + LParen@497..498 "(" + PathType@498..502 + Path@498..502 + PathSegment@498..502 + Ident@498..502 "bool" + Comma@502..503 "," + WhiteSpace@503..504 " " + PathType@504..514 + Path@504..514 + PathSegment@504..514 + Ident@504..514 "Checkpoint" + RParen@514..515 ")" + WhiteSpace@515..516 " " + BlockExpr@516..590 + LBrace@516..517 "{" + Newline@517..518 "\n" + WhiteSpace@518..526 " " + ExprStmt@526..584 + TupleExpr@526..584 + LParen@526..527 "(" + CallExpr@527..570 + PathExpr@527..547 + Path@527..547 + PathSegment@527..537 + Ident@527..537 "SyntaxNode" + Colon2@537..539 "::" + PathSegment@539..547 + Ident@539..547 "new_root" + CallArgList@547..570 + LParen@547..548 "(" + CallArg@548..569 + MethodCallExpr@548..569 + FieldExpr@548..560 + PathExpr@548..552 + Path@548..552 + PathSegment@548..552 + SelfKw@548..552 "self" + Dot@552..553 "." + Ident@553..560 "builder" + Dot@560..561 "." + Ident@561..567 "finish" + CallArgList@567..569 + LParen@567..568 "(" + RParen@568..569 ")" + RParen@569..570 ")" + Comma@570..571 "," + WhiteSpace@571..572 " " + FieldExpr@572..583 + PathExpr@572..576 + Path@572..576 + PathSegment@572..576 + SelfKw@572..576 "self" + Dot@576..577 "." + Ident@577..583 "errors" + RParen@583..584 ")" + Newline@584..585 "\n" + WhiteSpace@585..589 " " + RBrace@589..590 "}" + Newline@590..591 "\n" + RBrace@591..592 "}" From 348d1e1c9cc915b6cfb932e3189ead2d3b79ac1a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 20 Mar 2023 02:43:19 +0100 Subject: [PATCH 096/678] Add HIR lower for `Pat` --- Cargo.lock | 1 + crates/hir/Cargo.toml | 3 +- crates/hir/src/hir_def/body.rs | 35 ++++++++++---- crates/hir/src/hir_def/mod.rs | 28 ++++++++++- crates/hir/src/hir_def/pat.rs | 12 ++--- crates/hir/src/lower/body.rs | 40 ++++++++++++++-- crates/hir/src/lower/mod.rs | 45 ++++++++++++----- crates/hir/src/lower/params.rs | 2 +- crates/hir/src/lower/pat.rs | 88 ++++++++++++++++++++++++++++++++++ crates/hir/src/lower/types.rs | 5 +- crates/hir/src/span/mod.rs | 14 ++++++ 11 files changed, 239 insertions(+), 34 deletions(-) create mode 100644 crates/hir/src/lower/pat.rs diff --git a/Cargo.lock b/Cargo.lock index 9d85f662c7..daaf4aabbd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -799,6 +799,7 @@ dependencies = [ "derive_more", "fe-parser2", "num-bigint", + "num-traits", "salsa-2022", "tracing", ] diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 2a6bdee99d..b6ca16e588 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -15,4 +15,5 @@ salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } fe-parser2 = { path = "../parser2" } derive_more = "0.99" cranelift-entity = "0.91" -num-bigint = "0.4.3" \ No newline at end of file +num-bigint = "0.4.3" +num-traits = "0.2.15" diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 616a65553b..045356e2cb 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -3,27 +3,46 @@ use fe_parser2::ast::{self, Stmt}; use crate::span::HirOrigin; -use super::{Expr, ExprId, MaybeInvalid, Pat, PatId, StmtId}; +use super::{Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, StmtId}; #[salsa::tracked] pub struct Body { #[id] pub kind: BodyKind, - pub stmts: PrimaryMap>, - pub exprs: PrimaryMap>, - pub pats: PrimaryMap>, + #[return_ref] + pub stmts: BodyNodeMap>, + #[return_ref] + pub exprs: BodyNodeMap>, + #[return_ref] + pub pats: BodyNodeMap>, + + #[return_ref] + pub(crate) stmt_source_map: BodySourceMap, + #[return_ref] + pub(crate) expr_source_map: BodySourceMap, + #[return_ref] + pub(crate) pat_source_map: BodySourceMap, - pub(crate) stmt_source_map: SecondaryMap>, - pub(crate) expr_source_map: SecondaryMap>, - pub(crate) pat_source_map: SecondaryMap>, pub(crate) ast: HirOrigin, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BodyKind { /// This is a body appearing in a item, e.g., a function or const item. - DefBlock(super::ItemKind), + ItemBody(ItemKind), /// This is a body appearing in array types or NamelessConst, } + +impl From> for BodyKind { + fn from(item: Option) -> Self { + match item { + Some(item) => Self::ItemBody(item), + None => Self::NamelessConst, + } + } +} + +pub type BodyNodeMap = PrimaryMap; +pub type BodySourceMap = SecondaryMap>; diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index eafd8b2d77..857602a3b4 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -35,18 +35,22 @@ impl IdentId { #[salsa::interned] pub struct IntegerId { - data: BigUint, + #[return_ref] + pub data: BigUint, } #[salsa::interned] pub struct StringId { - data: String, + /// The text of the string literal, without the quotes. + #[return_ref] + pub data: String, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LitKind { Int(IntegerId), String(StringId), + Bool(bool), } /// This enum is used to represent a type that may be invalid in terms of the @@ -56,3 +60,23 @@ pub enum MaybeInvalid { Valid(T), Invalid, } + +impl MaybeInvalid { + pub(crate) fn valid(t: T) -> Self { + Self::Valid(t) + } + + pub(crate) fn invalid() -> Self { + Self::Invalid + } +} + +impl From> for MaybeInvalid { + fn from(value: Option) -> Self { + if let Some(value) = value { + Self::Valid(value) + } else { + Self::Invalid + } + } +} diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index f0de3a1394..8174859f47 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -1,16 +1,16 @@ use cranelift_entity::entity_impl; -use super::{IdentId, LitKind, PathId}; +use super::{IdentId, LitKind, MaybeInvalid, PathId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Pat { WildCard, Rest, - Lit(LitKind), + Lit(MaybeInvalid), Tuple(Vec), - Path(PathId), - PathTuple(PathId, Vec), - Record(PathId, Vec), + Path(MaybeInvalid), + PathTuple(MaybeInvalid, Vec), + Record(MaybeInvalid, Vec), Or(PatId, PatId), } @@ -20,6 +20,6 @@ entity_impl!(PatId); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordPatField { - pub label: Option, + pub label: MaybeInvalid, pub pat: PatId, } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 24aadf775d..ad3d52ee5b 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -1,13 +1,45 @@ +use cranelift_entity::{PrimaryMap, SecondaryMap}; use fe_parser2::ast; -use crate::{hir_def::Body, span::FileId, HirDb}; +use crate::{ + hir_def::{Body, Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId}, + span::{FileId, HirOrigin}, + HirDb, +}; impl Body { - pub(crate) fn from_ast_expr(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + fid: FileId, + parent_item: Option, + ast: ast::Expr, + ) -> Self { todo!() } +} - pub(crate) fn from_ast_block(db: &dyn HirDb, fid: FileId, ast: ast::BlockExpr) -> Self { - todo!() +pub(super) struct BodyCtxt<'db> { + pub(super) stmts: PrimaryMap>, + pub(super) exprs: PrimaryMap>, + pub(super) pats: PrimaryMap>, + pub(super) db: &'db dyn HirDb, + + pub(super) stmt_source_map: SecondaryMap>, + pub(super) expr_source_map: SecondaryMap>, + pub(super) pat_source_map: SecondaryMap>, + + fid: FileId, +} +impl<'db> BodyCtxt<'db> { + pub(super) fn push_pat(&mut self, pat: Option, ast: &ast::Pat) -> PatId { + let pat_id = self.pats.push(pat.into()); + self.pat_source_map[pat_id] = HirOrigin::raw(self.fid, ast); + pat_id + } + + pub(super) fn push_missing_pat(&mut self) -> PatId { + let pat_id = self.pats.push(None.into()); + self.pat_source_map[pat_id] = HirOrigin::none(self.fid); + pat_id } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 6df58a1afb..efe49178f3 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,7 +1,9 @@ -use fe_parser2::SyntaxToken; +use fe_parser2::{ast, SyntaxToken}; +use num_bigint::BigUint; +use num_traits::Num; use crate::{ - hir_def::{IdentId, MaybeInvalid}, + hir_def::{IdentId, IntegerId, LitKind, MaybeInvalid, StringId}, HirDb, }; @@ -9,6 +11,7 @@ mod attr; mod body; mod item; mod params; +mod pat; mod path; mod types; mod use_tree; @@ -23,18 +26,38 @@ impl IdentId { } } -impl MaybeInvalid { - fn invalid() -> Self { - Self::Invalid +impl LitKind { + pub(super) fn from_ast(db: &dyn HirDb, ast: &ast::Lit) -> Self { + match ast.kind() { + ast::LitKind::Int(int) => Self::Int(IntegerId::from_ast(db, &int)), + ast::LitKind::String(string) => { + let text = string.token().text(); + Self::String(StringId::new(db, text[1..text.len() - 1].to_string())) + } + ast::LitKind::Bool(bool) => match bool.token().text() { + "true" => Self::Bool(true), + "false" => Self::Bool(false), + _ => unreachable!(), + }, + } } } -impl From> for MaybeInvalid { - fn from(value: Option) -> Self { - if let Some(value) = value { - Self::Valid(value) - } else { - Self::Invalid +impl IntegerId { + fn from_ast(db: &dyn HirDb, ast: &ast::LitInt) -> Self { + let text = ast.token().text(); + // Parser ensures that the text is valid pair with a radix and a number. + if text.len() < 2 { + return Self::new(db, BigUint::from_str_radix(&text, 10).unwrap()); } + + let int = match &text[0..2] { + "0x" | "0X" => BigUint::from_str_radix(&text[2..], 16).unwrap(), + "0o" | "0O" => BigUint::from_str_radix(&text[2..], 8).unwrap(), + "0b" | "0B" => BigUint::from_str_radix(&text[2..], 2).unwrap(), + _ => BigUint::from_str_radix(&text, 10).unwrap(), + }; + + Self::new(db, int) } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index bb0cd7fe7c..3d5ca96f91 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -117,7 +117,7 @@ impl TypeGenericArg { impl ConstGenericArg { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Some(Body::from_ast_expr(db, fid, expr)) + Some(Body::from_ast(db, fid, None, expr)) } else { None } diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs new file mode 100644 index 0000000000..8a56a73a57 --- /dev/null +++ b/crates/hir/src/lower/pat.rs @@ -0,0 +1,88 @@ +use fe_parser2::ast; + +use crate::hir_def::{pat::*, IdentId, LitKind, PathId}; + +use super::body::BodyCtxt; + +impl Pat { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: &ast::Pat) -> PatId { + let pat = match &ast.kind() { + ast::PatKind::WildCard(_) => Pat::WildCard, + + ast::PatKind::Rest(_) => Pat::Rest, + + ast::PatKind::Lit(lit_pat) => { + let lit_kind = lit_pat + .lit() + .map(|lit| LitKind::from_ast(ctxt.db, &lit)) + .into(); + Pat::Lit(lit_kind) + } + + ast::PatKind::Tuple(tup) => { + let elems = match tup.elems() { + Some(elems) => elems + .iter() + .map(|pat| Pat::push_to_body(ctxt, &pat)) + .collect(), + None => vec![], + }; + Pat::Tuple(elems) + } + + ast::PatKind::Path(path) => { + let path = PathId::maybe_from_ast(ctxt.db, path.path()); + Pat::Path(path) + } + + ast::PatKind::PathTuple(path_tup) => { + let path = PathId::maybe_from_ast(ctxt.db, path_tup.path()); + let elems = match path_tup.elems() { + Some(elems) => elems + .iter() + .map(|pat| Pat::push_to_body(ctxt, &pat)) + .collect(), + None => vec![], + }; + Pat::PathTuple(path, elems) + } + + ast::PatKind::Record(record) => { + let path = PathId::maybe_from_ast(ctxt.db, record.path()); + let fields = match record.fields() { + Some(fields) => fields + .iter() + .map(|f| RecordPatField::from_ast(ctxt, &f)) + .collect(), + None => vec![], + }; + Pat::Record(path, fields) + } + + ast::PatKind::Or(or) => { + let lhs = or + .lhs() + .map(|pat| Pat::push_to_body(ctxt, &pat)) + .unwrap_or_else(|| ctxt.push_missing_pat()); + let rhs = or + .rhs() + .map(|pat| Pat::push_to_body(ctxt, &pat)) + .unwrap_or_else(|| ctxt.push_missing_pat()); + Pat::Or(lhs, rhs) + } + }; + + ctxt.push_pat(pat.into(), ast) + } +} + +impl RecordPatField { + fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: &ast::RecordPatField) -> RecordPatField { + let label = IdentId::maybe_from_token(ctxt.db, ast.name()); + let pat = ast + .pat() + .map(|pat| Pat::push_to_body(ctxt, &pat)) + .unwrap_or_else(|| ctxt.push_missing_pat()); + RecordPatField { label, pat } + } +} diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 25fb9c8058..6046625c0f 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -32,7 +32,10 @@ impl TypeId { ast::TypeKind::Array(ty) => { let elem_ty = Self::maybe_from_ast(db, fid, ty.elem_ty()); - let body = ty.len().map(|ast| Body::from_ast_expr(db, fid, ast)).into(); + let body = ty + .len() + .map(|ast| Body::from_ast(db, fid, None, ast)) + .into(); TypeKind::Array(elem_ty, body) } }; diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 5922ecdedb..517967ec21 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -24,6 +24,13 @@ where kind: HirOriginKind::Raw(AstPtr::new(ast)), } } + + pub fn none(file: FileId) -> Self { + HirOrigin { + fid: file, + kind: HirOriginKind::None, + } + } } /// This enum represents the origin of the HIR node. @@ -45,6 +52,13 @@ where /// The HIR node is the result of desugaring in the lower phase from AST to /// HIR. e.g., `a += b` is desugared into `a = a + b`. Desugared(DesugaredOrigin), + + /// The HIR node is created by the compiler and not directly from the AST. + /// This is only used with `Invalid` nodes that don't have a corresponding + /// AST node. + /// e.g., the RHS of `a + ` is represented as `Invalid` node but there is no + /// corresponding origin. + None, } /// This enum represents the origin of the HIR node which is desugared into From 1f36b3e6399aa072cb347672f3dc9197dfabd3b2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 20 Mar 2023 20:48:07 +0100 Subject: [PATCH 097/678] Add HIR lower for `Stmt` --- crates/hir/src/hir_def/expr.rs | 8 +-- crates/hir/src/hir_def/stmt.rs | 6 +- crates/hir/src/lower/body.rs | 41 ++++++++--- crates/hir/src/lower/expr.rs | 44 ++++++++++++ crates/hir/src/lower/mod.rs | 2 + crates/hir/src/lower/pat.rs | 33 +++++---- crates/hir/src/lower/path.rs | 19 +++-- crates/hir/src/lower/stmt.rs | 128 +++++++++++++++++++++++++++++++++ crates/hir/src/span/mod.rs | 54 +++++++++++--- crates/parser2/src/ast/expr.rs | 2 +- crates/parser2/src/ast/stmt.rs | 6 ++ 11 files changed, 300 insertions(+), 43 deletions(-) create mode 100644 crates/hir/src/lower/expr.rs create mode 100644 crates/hir/src/lower/stmt.rs diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index ca62d8bff4..07c86b72e4 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -1,6 +1,6 @@ use cranelift_entity::entity_impl; -use super::{Body, IdentId, IntegerId, LitKind, PatId, PathId, StmtId}; +use super::{Body, IdentId, IntegerId, LitKind, MaybeInvalid, PatId, PathId, StmtId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Expr { @@ -10,14 +10,14 @@ pub enum Expr { /// /// **NOTE:** The `AugAssign` statement is desugared to a `Assign` statement /// and a `BinOp`. - Bin(ExprId, ExprId, BinOp), + Bin(ExprId, ExprId, MaybeInvalid), Un(ExprId, UnOp), /// The first `ExprId` is the callee, the second is the arguments. Call(ExprId, Vec), /// The first `ExprId` is the method receiver, the second is the method /// name, the third is the arguments. MethodCall(ExprId, IdentId, Vec), - Path(PathId), + Path(MaybeInvalid), /// The record construction expression. /// The fist `PathId` is the record type, the second is the record fields. Record(PathId, Vec<(IdentId, ExprId)>), @@ -60,7 +60,7 @@ pub enum MatchArm { MatchArm(PatId, ExprId), } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] pub enum BinOp { Arith(ArithBinOp), Comp(CompBinOp), diff --git a/crates/hir/src/hir_def/stmt.rs b/crates/hir/src/hir_def/stmt.rs index 28e060df45..e96cd18f92 100644 --- a/crates/hir/src/hir_def/stmt.rs +++ b/crates/hir/src/hir_def/stmt.rs @@ -14,10 +14,14 @@ pub enum Stmt { /// The first `PatId` is the pattern for binding which can be used in the /// for-loop body. /// - /// The second `ExprId` is the iterator expression. + /// The second `ExprId` is the iterable expression. /// /// The third `ExprId` is the for-loop body. For(PatId, ExprId, ExprId), + + /// The first `ExprId` is the condition of the while-loop. + /// The second `ExprId` is the body of the while-loop. + While(ExprId, ExprId), Continue, Break, Return(Option), diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index ad3d52ee5b..544a83acb5 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -3,7 +3,7 @@ use fe_parser2::ast; use crate::{ hir_def::{Body, Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId}, - span::{FileId, HirOrigin}, + span::{FileId, HirOrigin, HirOriginKind}, HirDb, }; @@ -23,17 +23,40 @@ pub(super) struct BodyCtxt<'db> { pub(super) exprs: PrimaryMap>, pub(super) pats: PrimaryMap>, pub(super) db: &'db dyn HirDb, + pub(super) fid: FileId, - pub(super) stmt_source_map: SecondaryMap>, - pub(super) expr_source_map: SecondaryMap>, - pub(super) pat_source_map: SecondaryMap>, - - fid: FileId, + stmt_source_map: SecondaryMap>, + expr_source_map: SecondaryMap>, + pat_source_map: SecondaryMap>, } impl<'db> BodyCtxt<'db> { - pub(super) fn push_pat(&mut self, pat: Option, ast: &ast::Pat) -> PatId { - let pat_id = self.pats.push(pat.into()); - self.pat_source_map[pat_id] = HirOrigin::raw(self.fid, ast); + pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { + let expr_id = self.exprs.push(Some(expr).into()); + self.expr_source_map[expr_id] = HirOrigin::new(self.fid, origin); + expr_id + } + + pub(super) fn push_missing_expr(&mut self) -> ExprId { + let expr_id = self.exprs.push(None.into()); + self.expr_source_map[expr_id] = HirOrigin::none(self.fid); + expr_id + } + + pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOriginKind) -> StmtId { + let stmt_id = self.stmts.push(Some(stmt).into()); + self.stmt_source_map[stmt_id] = HirOrigin::new(self.fid, origin); + stmt_id + } + + pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { + let pat_id = self.pats.push(Some(pat).into()); + self.pat_source_map[pat_id] = HirOrigin::new(self.fid, origin); + pat_id + } + + pub(super) fn push_pat_opt(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { + let pat_id = self.pats.push(Some(pat).into()); + self.pat_source_map[pat_id] = HirOrigin::new(self.fid, origin); pat_id } diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs new file mode 100644 index 0000000000..dfd08bd8e0 --- /dev/null +++ b/crates/hir/src/lower/expr.rs @@ -0,0 +1,44 @@ +use fe_parser2::ast; + +use crate::hir_def::expr::*; + +use super::body::BodyCtxt; + +impl Expr { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Expr) -> ExprId { + todo!() + } + + pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> ExprId { + todo!() + } +} + +impl BinOp { + pub fn from_ast(&self, ast: ast::BinOp) -> Self { + match ast { + ast::BinOp::Arith(arith) => ArithBinOp::from_ast(arith).into(), + _ => { + todo!() + } + } + } +} + +impl ArithBinOp { + pub(super) fn from_ast(ast: ast::ArithBinOp) -> Self { + match ast { + ast::ArithBinOp::Add(_) => ArithBinOp::Add, + ast::ArithBinOp::Sub(_) => ArithBinOp::Sub, + ast::ArithBinOp::Mul(_) => ArithBinOp::Mul, + ast::ArithBinOp::Div(_) => ArithBinOp::Div, + ast::ArithBinOp::Mod(_) => ArithBinOp::Mod, + ast::ArithBinOp::Pow(_) => ArithBinOp::Pow, + ast::ArithBinOp::LShift(_) => ArithBinOp::LShift, + ast::ArithBinOp::RShift(_) => ArithBinOp::RShift, + ast::ArithBinOp::BitAnd(_) => ArithBinOp::BitAnd, + ast::ArithBinOp::BitOr(_) => ArithBinOp::BitOr, + ast::ArithBinOp::BitXor(_) => ArithBinOp::BitXor, + } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index efe49178f3..134456d483 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -9,10 +9,12 @@ use crate::{ mod attr; mod body; +mod expr; mod item; mod params; mod pat; mod path; +mod stmt; mod types; mod use_tree; diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 8a56a73a57..9e383eb13c 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -1,11 +1,14 @@ use fe_parser2::ast; -use crate::hir_def::{pat::*, IdentId, LitKind, PathId}; +use crate::{ + hir_def::{pat::*, IdentId, LitKind, PathId}, + span::HirOriginKind, +}; use super::body::BodyCtxt; impl Pat { - pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: &ast::Pat) -> PatId { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Pat) -> PatId { let pat = match &ast.kind() { ast::PatKind::WildCard(_) => Pat::WildCard, @@ -23,7 +26,7 @@ impl Pat { let elems = match tup.elems() { Some(elems) => elems .iter() - .map(|pat| Pat::push_to_body(ctxt, &pat)) + .map(|pat| Pat::push_to_body(ctxt, pat)) .collect(), None => vec![], }; @@ -40,7 +43,7 @@ impl Pat { let elems = match path_tup.elems() { Some(elems) => elems .iter() - .map(|pat| Pat::push_to_body(ctxt, &pat)) + .map(|pat| Pat::push_to_body(ctxt, pat)) .collect(), None => vec![], }; @@ -60,19 +63,21 @@ impl Pat { } ast::PatKind::Or(or) => { - let lhs = or - .lhs() - .map(|pat| Pat::push_to_body(ctxt, &pat)) - .unwrap_or_else(|| ctxt.push_missing_pat()); - let rhs = or - .rhs() - .map(|pat| Pat::push_to_body(ctxt, &pat)) - .unwrap_or_else(|| ctxt.push_missing_pat()); + let lhs = Self::push_to_body_opt(ctxt, or.lhs()); + let rhs = Self::push_to_body_opt(ctxt, or.rhs()); Pat::Or(lhs, rhs) } }; - ctxt.push_pat(pat.into(), ast) + ctxt.push_pat(pat, HirOriginKind::raw(&ast)) + } + + pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> PatId { + if let Some(ast) = ast { + Pat::push_to_body(ctxt, ast) + } else { + ctxt.push_missing_pat() + } } } @@ -81,7 +86,7 @@ impl RecordPatField { let label = IdentId::maybe_from_token(ctxt.db, ast.name()); let pat = ast .pat() - .map(|pat| Pat::push_to_body(ctxt, &pat)) + .map(|pat| Pat::push_to_body(ctxt, pat)) .unwrap_or_else(|| ctxt.push_missing_pat()); RecordPatField { label, pat } } diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 27362aadad..92236b7d54 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,4 +1,4 @@ -use fe_parser2::ast; +use fe_parser2::{ast, SyntaxToken}; use crate::{ hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}, @@ -10,17 +10,18 @@ impl PathId { let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = if seg.is_self() { - MaybeInvalid::Valid(PathSegment::Self_) + Some(PathSegment::Self_) } else if seg.is_self_ty() { - MaybeInvalid::Valid(PathSegment::SelfTy) + Some(PathSegment::SelfTy) } else if let Some(ident) = seg.ident() { - MaybeInvalid::Valid(PathSegment::Ident(IdentId::new( + Some(PathSegment::Ident(IdentId::new( db, ident.text().to_string(), ))) } else { - MaybeInvalid::invalid() - }; + None + } + .into(); segments.push(segment); } @@ -30,4 +31,10 @@ impl PathId { pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: Option) -> MaybeInvalid { ast.map(|ast| Self::from_ast(db, ast)).into() } + + pub(super) fn from_ident(db: &dyn HirDb, ast: SyntaxToken) -> Self { + let ident_id = IdentId::new(db, ast.text().to_string()); + let seg = vec![MaybeInvalid::Valid(PathSegment::Ident(ident_id))]; + Self::new(db, seg) + } } diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs new file mode 100644 index 0000000000..6887bf1995 --- /dev/null +++ b/crates/hir/src/lower/stmt.rs @@ -0,0 +1,128 @@ +use fe_parser2::ast::{self, prelude::*}; + +use crate::{ + hir_def::{stmt::*, ArithBinOp, Expr, Pat, PathId, TypeId}, + span::{AugAssignDesugared, HirOriginKind}, +}; + +use super::body::BodyCtxt; + +impl Stmt { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Stmt) -> StmtId { + let (stmt, origin_kind) = match ast.kind() { + ast::StmtKind::Let(let_) => { + let pat = Pat::push_to_body_opt(ctxt, let_.pat()); + let ty = let_ + .type_annotation() + .map(|ty| TypeId::from_ast(ctxt.db, ctxt.fid, ty)); + let init = let_ + .initializer() + .map(|init| Expr::push_to_body(ctxt, init)); + (Stmt::Let(pat, ty, init), HirOriginKind::raw(&ast)) + } + ast::StmtKind::Assign(assign) => { + let lhs = assign + .pat() + .map(|pat| Pat::push_to_body(ctxt, pat)) + .unwrap_or_else(|| ctxt.push_missing_pat()); + + let rhs = assign + .expr() + .map(|expr| Expr::push_to_body(ctxt, expr)) + .unwrap_or_else(|| ctxt.push_missing_expr()); + (Stmt::Assign(lhs, rhs), HirOriginKind::raw(&ast)) + } + + ast::StmtKind::AugAssign(aug_assign) => desugar_aug_assign(ctxt, &aug_assign), + + ast::StmtKind::For(for_) => { + let bind = Pat::push_to_body_opt(ctxt, for_.pat()); + let iter = Expr::push_to_body_opt(ctxt, for_.iterable()); + let body = Expr::push_to_body_opt( + ctxt, + for_.body() + .map(|body| ast::Expr::cast(body.syntax().clone())) + .flatten(), + ); + + (Stmt::For(bind, iter, body), HirOriginKind::raw(&ast)) + } + + ast::StmtKind::While(while_) => { + let cond = Expr::push_to_body_opt(ctxt, while_.cond()); + let body = Expr::push_to_body_opt( + ctxt, + while_ + .body() + .map(|body| ast::Expr::cast(body.syntax().clone())) + .flatten(), + ); + + (Stmt::While(cond, body), HirOriginKind::raw(&ast)) + } + + ast::StmtKind::Continue(_) => (Stmt::Continue, HirOriginKind::raw(&ast)), + + ast::StmtKind::Break(_) => (Stmt::Break, HirOriginKind::raw(&ast)), + + ast::StmtKind::Return(ret) => { + let expr = ret + .has_value() + .then(|| Expr::push_to_body_opt(ctxt, ret.expr())); + (Stmt::Return(expr), HirOriginKind::raw(&ast)) + } + + ast::StmtKind::Expr(expr) => { + let expr = Expr::push_to_body_opt(ctxt, expr.expr()); + (Stmt::Expr(expr), HirOriginKind::raw(&ast)) + } + }; + + ctxt.push_stmt(stmt, origin_kind) + } +} + +fn desugar_aug_assign( + ctxt: &mut BodyCtxt<'_>, + ast: &ast::AugAssignStmt, +) -> (Stmt, HirOriginKind) { + let lhs_ident = ast.ident(); + let path = lhs_ident + .clone() + .map(|ident| PathId::from_ident(ctxt.db, ident)); + + let lhs_origin: AugAssignDesugared = lhs_ident.clone().unwrap().text_range().into(); + let lhs_pat = if let Some(path) = path { + ctxt.push_pat( + Pat::Path(Some(path).into()), + HirOriginKind::desugared(lhs_origin.clone()), + ) + } else { + ctxt.push_missing_pat() + }; + + let binop_lhs = if let Some(path) = path { + ctxt.push_expr( + Expr::Path(Some(path).into()), + HirOriginKind::desugared(lhs_origin.clone()), + ) + } else { + ctxt.push_missing_expr() + }; + + let binop_rhs = ast + .expr() + .map(|expr| Expr::push_to_body(ctxt, expr)) + .unwrap_or_else(|| ctxt.push_missing_expr()); + + let binop = ast.op().map(|op| ArithBinOp::from_ast(op).into()).into(); + let expr = ctxt.push_expr( + Expr::Bin(binop_lhs, binop_rhs, binop), + HirOriginKind::desugared(AugAssignDesugared::stmt(ast)), + ); + + ( + Stmt::Assign(lhs_pat, expr), + HirOriginKind::desugared(AugAssignDesugared::stmt(ast)), + ) +} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 517967ec21..ae03561852 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,8 +1,8 @@ use std::path::PathBuf; use fe_parser2::{ - ast::{prelude::*, AstPtr}, - SyntaxNode, + ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, + TextRange, }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -18,14 +18,18 @@ impl HirOrigin where T: AstNode, { - pub fn raw(fid: FileId, ast: &T) -> Self { + pub(crate) fn new(fid: FileId, origin: HirOriginKind) -> Self { + HirOrigin { fid, kind: origin } + } + + pub(crate) fn raw(fid: FileId, ast: &T) -> Self { HirOrigin { fid, - kind: HirOriginKind::Raw(AstPtr::new(ast)), + kind: HirOriginKind::raw(ast), } } - pub fn none(file: FileId) -> Self { + pub(crate) fn none(file: FileId) -> Self { HirOrigin { fid: file, kind: HirOriginKind::None, @@ -48,7 +52,7 @@ where Raw(AstPtr), /// The HIR node is created by expanding attributes. /// The `SyntaxNode` points to the callsite of the attribute. - Expanded(SyntaxNode), + Expanded(SyntaxNodePtr), /// The HIR node is the result of desugaring in the lower phase from AST to /// HIR. e.g., `a += b` is desugared into `a = a + b`. Desugared(DesugaredOrigin), @@ -61,14 +65,48 @@ where None, } +impl HirOriginKind +where + T: AstNode, +{ + pub(crate) fn raw(ast: &T) -> Self { + Self::Raw(AstPtr::new(ast)) + } + + pub(crate) fn desugared(origin: impl Into) -> Self { + Self::Desugared(origin.into()) + } +} + /// This enum represents the origin of the HIR node which is desugared into /// other HIR node kinds. // TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum DesugaredOrigin { /// The HIR node is the result of desugaring an augmented assignment /// statement. - AugAssign(AstPtr), + AugAssign(AugAssignDesugared), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum AugAssignDesugared { + /// The HIR node is the result of desugaring an augmented assignment + /// statement. + Stmt(AstPtr), + /// The `TextRange` points to the LHS of the augmented assignment statement. + Lhs(TextRange), + /// The HIR node points to the RHS of the RHS of augmented assignment. + Rhs(AstPtr), +} + +impl AugAssignDesugared { + pub(crate) fn stmt(ast: &ast::AugAssignStmt) -> Self { + Self::Stmt(AstPtr::new(ast)) + } + + pub(crate) fn rhs(ast: &ast::Expr) -> Self { + Self::Rhs(AstPtr::new(ast)) + } } /// This enum represents the file diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 579c5c9242..ea8b24b0c0 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -301,7 +301,7 @@ impl IfExpr { /// Returns the else if expression of the if expression. pub fn else_if(&self) -> Option { - support::child(self.syntax()) + self.syntax().children().skip(1).find_map(IfExpr::cast) } } diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index 57b359cb62..b157d5f83b 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -167,6 +167,12 @@ impl ReturnStmt { pub fn expr(&self) -> Option { support::child(self.syntax()) } + + /// Returns `true` if there is an expression or `Error` node after `return` + /// keyword. + pub fn has_value(&self) -> bool { + self.syntax().children().count() > 1 + } } ast_node! { From 39ee15bc13cf8e848969829ca4fcfc366ba85e6e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 21 Mar 2023 01:06:25 +0100 Subject: [PATCH 098/678] Add HIR lower for `Expr` --- crates/hir/src/hir_def/expr.rs | 38 ++- crates/hir/src/lower/body.rs | 20 +- crates/hir/src/lower/expr.rs | 258 ++++++++++++++++-- crates/hir/src/lower/mod.rs | 6 +- crates/hir/src/lower/params.rs | 2 +- crates/hir/src/lower/pat.rs | 2 +- crates/hir/src/lower/types.rs | 2 +- crates/parser2/src/ast/expr.rs | 67 ++--- crates/parser2/src/ast/lit.rs | 2 +- crates/parser2/src/parser/expr_atom.rs | 30 +- .../test_files/syntax_node/exprs/match.snap | 14 +- .../syntax_node/exprs/struct_init.snap | 21 +- .../test_files/syntax_node/items/impl.snap | 7 +- 13 files changed, 341 insertions(+), 128 deletions(-) diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 07c86b72e4..96d95234bc 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -5,38 +5,39 @@ use super::{Body, IdentId, IntegerId, LitKind, MaybeInvalid, PatId, PathId, Stmt #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Expr { Lit(LitKind), - BlockExpr(Vec), + Block(Vec), /// The first `ExprId` is the lhs, the second is the rhs. /// /// **NOTE:** The `AugAssign` statement is desugared to a `Assign` statement /// and a `BinOp`. Bin(ExprId, ExprId, MaybeInvalid), - Un(ExprId, UnOp), + Un(ExprId, MaybeInvalid), /// The first `ExprId` is the callee, the second is the arguments. - Call(ExprId, Vec), + Call(ExprId, Vec), /// The first `ExprId` is the method receiver, the second is the method /// name, the third is the arguments. - MethodCall(ExprId, IdentId, Vec), + MethodCall(ExprId, MaybeInvalid, Vec), Path(MaybeInvalid), /// The record construction expression. /// The fist `PathId` is the record type, the second is the record fields. - Record(PathId, Vec<(IdentId, ExprId)>), - Field(ExprId, FieldIndex), + RecordInit(MaybeInvalid, Vec), + Field(ExprId, MaybeInvalid), Tuple(Vec), /// The first `ExprId` is the indexed expression, the second is the index. Index(ExprId, ExprId), - ArrayExpr(Vec), + Array(Vec), /// The size of the rep should be the body instead of expression, becuase it /// should be resolved as a contatnt expressison. - ArrayRepExpr(ExprId, Body), + ArrayRep(ExprId, MaybeInvalid), /// The first `ExprId` is the condition, the second is the then branch, the /// third is the else branch. + /// In case `else if`, the third is the lowered into `If` expression. If(ExprId, ExprId, Option), /// The first `ExprId` is the scrutinee, the second is the arms. - Match(ExprId, Vec), + Match(ExprId, MaybeInvalid>), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -54,10 +55,9 @@ pub enum FieldIndex { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum MatchArm { - /// The first `Part` is the pattern, the second is - /// the arm body. - MatchArm(PatId, ExprId), +pub struct MatchArm { + pub pat: PatId, + pub body: ExprId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] @@ -128,3 +128,15 @@ pub enum UnOp { /// `~` BitNot, } + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct CallArg { + pub label: Option, + pub expr: ExprId, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RecordField { + pub label: Option, + pub expr: ExprId, +} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 544a83acb5..1cc2c36327 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -8,14 +8,18 @@ use crate::{ }; impl Body { - pub(crate) fn from_ast( + pub(crate) fn item_body_from_ast( db: &dyn HirDb, fid: FileId, - parent_item: Option, + parent_item: ItemKind, ast: ast::Expr, ) -> Self { todo!() } + + pub(crate) fn nameless_from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { + todo!() + } } pub(super) struct BodyCtxt<'db> { @@ -36,6 +40,12 @@ impl<'db> BodyCtxt<'db> { expr_id } + pub(super) fn push_invalid_expr(&mut self, origin: HirOriginKind) -> ExprId { + let expr_id = self.exprs.push(None.into()); + self.expr_source_map[expr_id] = HirOrigin::new(self.fid, origin); + expr_id + } + pub(super) fn push_missing_expr(&mut self) -> ExprId { let expr_id = self.exprs.push(None.into()); self.expr_source_map[expr_id] = HirOrigin::none(self.fid); @@ -54,12 +64,6 @@ impl<'db> BodyCtxt<'db> { pat_id } - pub(super) fn push_pat_opt(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { - let pat_id = self.pats.push(Some(pat).into()); - self.pat_source_map[pat_id] = HirOrigin::new(self.fid, origin); - pat_id - } - pub(super) fn push_missing_pat(&mut self) -> PatId { let pat_id = self.pats.push(None.into()); self.pat_source_map[pat_id] = HirOrigin::none(self.fid); diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index dfd08bd8e0..124ec77330 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -1,26 +1,185 @@ -use fe_parser2::ast; +use fe_parser2::ast::{self, prelude::*}; -use crate::hir_def::expr::*; +use crate::{ + hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, MaybeInvalid, Pat, PathId, Stmt}, + span::HirOriginKind, +}; use super::body::BodyCtxt; impl Expr { pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Expr) -> ExprId { - todo!() + let expr = match ast.kind() { + ast::ExprKind::Lit(lit) => { + if let Some(lit) = lit.lit() { + let lit = LitKind::from_ast(ctxt.db, lit); + Self::Lit(lit) + } else { + return ctxt.push_invalid_expr(HirOriginKind::raw(&ast)); + } + } + + ast::ExprKind::Block(block) => { + let mut stmts = vec![]; + for stmt in block.stmts() { + let stmt = Stmt::push_to_body(ctxt, stmt); + stmts.push(stmt); + } + Self::Block(stmts) + } + + ast::ExprKind::Bin(bin) => { + let lhs = Self::push_to_body_opt(ctxt, bin.lhs()); + let rhs = Self::push_to_body_opt(ctxt, bin.rhs()); + let op = bin.op().map(|op| BinOp::from_ast(op)).into(); + Self::Bin(lhs, rhs, op) + } + + ast::ExprKind::Un(un) => { + let expr = Self::push_to_body_opt(ctxt, un.expr()); + let op = un.op().map(|op| UnOp::from_ast(op)).into(); + Self::Un(expr, op) + } + + ast::ExprKind::Call(call) => { + let callee = Self::push_to_body_opt(ctxt, call.callee()); + let args = call + .args() + .map(|args| { + args.into_iter() + .map(|arg| CallArg::from_ast(ctxt, arg)) + .collect() + }) + .unwrap_or_default(); + Self::Call(callee, args) + } + + ast::ExprKind::MethodCall(method_call) => { + let receiver = Self::push_to_body_opt(ctxt, method_call.receiver()); + let method_name = IdentId::maybe_from_token(ctxt.db, method_call.method_name()); + let args = method_call + .args() + .map(|args| { + args.into_iter() + .map(|arg| CallArg::from_ast(ctxt, arg)) + .collect() + }) + .unwrap_or_default(); + Self::MethodCall(receiver, method_name, args) + } + + ast::ExprKind::Path(path) => { + let path = PathId::maybe_from_ast(ctxt.db, path.path()); + Self::Path(path) + } + + ast::ExprKind::RecordInit(record_init) => { + let path = PathId::maybe_from_ast(ctxt.db, record_init.path()); + let fields = record_init + .fields() + .map(|fields| { + fields + .into_iter() + .map(|field| RecordField::from_ast(ctxt, field)) + .collect() + }) + .unwrap_or_default(); + Self::RecordInit(path, fields) + } + + ast::ExprKind::Field(field) => { + let receiver = Self::push_to_body_opt(ctxt, field.receiver()); + let field = if let Some(name) = field.field_name() { + Some(FieldIndex::Ident(IdentId::from_token(ctxt.db, name))).into() + } else if let Some(num) = field.field_index() { + Some(FieldIndex::Index(IntegerId::from_ast(ctxt.db, num))).into() + } else { + None.into() + }; + Self::Field(receiver, field) + } + + ast::ExprKind::Index(index) => { + let indexed = Self::push_to_body_opt(ctxt, index.expr()); + let index = Self::push_to_body_opt(ctxt, index.index()); + Self::Index(indexed, index) + } + + ast::ExprKind::Tuple(tup) => { + let elems = tup + .elems() + .map(|elem| Self::push_to_body_opt(ctxt, elem)) + .collect(); + + Self::Tuple(elems) + } + + ast::ExprKind::Array(array) => { + let elems = array + .elems() + .map(|elem| Self::push_to_body_opt(ctxt, elem)) + .collect(); + Self::Array(elems) + } + + ast::ExprKind::ArrayRep(array_rep) => { + let val = Self::push_to_body_opt(ctxt, array_rep.val()); + let len = array_rep + .len() + .map(|ast| Body::nameless_from_ast(ctxt.db, ctxt.fid, ast)) + .into(); + Self::ArrayRep(val, len) + } + + ast::ExprKind::If(if_) => { + let cond = Self::push_to_body_opt(ctxt, if_.cond()); + let then = Expr::push_to_body_opt( + ctxt, + if_.then() + .map(|body| ast::Expr::cast(body.syntax().clone())) + .flatten(), + ); + let else_ = if_.else_().map(|ast| Self::push_to_body(ctxt, ast)); + Self::If(cond, then, else_) + } + + ast::ExprKind::Match(match_) => { + let scrutinee = Self::push_to_body_opt(ctxt, match_.scrutinee()); + let arm = match_ + .arms() + .map(|arms| { + arms.into_iter() + .map(|arm| MatchArm::from_ast(ctxt, arm)) + .collect() + }) + .into(); + + Self::Match(scrutinee, arm) + } + + ast::ExprKind::Paren(paren) => { + return Self::push_to_body_opt(ctxt, paren.expr()); + } + }; + + ctxt.push_expr(expr, HirOriginKind::raw(&ast)) } pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> ExprId { - todo!() + if let Some(ast) = ast { + Expr::push_to_body(ctxt, ast) + } else { + ctxt.push_missing_expr() + } } } impl BinOp { - pub fn from_ast(&self, ast: ast::BinOp) -> Self { + pub(super) fn from_ast(ast: ast::BinOp) -> Self { match ast { ast::BinOp::Arith(arith) => ArithBinOp::from_ast(arith).into(), - _ => { - todo!() - } + ast::BinOp::Comp(arith) => CompBinOp::from_ast(arith).into(), + ast::BinOp::Logical(arith) => LogicalBinOp::from_ast(arith).into(), } } } @@ -28,17 +187,78 @@ impl BinOp { impl ArithBinOp { pub(super) fn from_ast(ast: ast::ArithBinOp) -> Self { match ast { - ast::ArithBinOp::Add(_) => ArithBinOp::Add, - ast::ArithBinOp::Sub(_) => ArithBinOp::Sub, - ast::ArithBinOp::Mul(_) => ArithBinOp::Mul, - ast::ArithBinOp::Div(_) => ArithBinOp::Div, - ast::ArithBinOp::Mod(_) => ArithBinOp::Mod, - ast::ArithBinOp::Pow(_) => ArithBinOp::Pow, - ast::ArithBinOp::LShift(_) => ArithBinOp::LShift, - ast::ArithBinOp::RShift(_) => ArithBinOp::RShift, - ast::ArithBinOp::BitAnd(_) => ArithBinOp::BitAnd, - ast::ArithBinOp::BitOr(_) => ArithBinOp::BitOr, - ast::ArithBinOp::BitXor(_) => ArithBinOp::BitXor, + ast::ArithBinOp::Add(_) => Self::Add, + ast::ArithBinOp::Sub(_) => Self::Sub, + ast::ArithBinOp::Mul(_) => Self::Mul, + ast::ArithBinOp::Div(_) => Self::Div, + ast::ArithBinOp::Mod(_) => Self::Mod, + ast::ArithBinOp::Pow(_) => Self::Pow, + ast::ArithBinOp::LShift(_) => Self::LShift, + ast::ArithBinOp::RShift(_) => Self::RShift, + ast::ArithBinOp::BitAnd(_) => Self::BitAnd, + ast::ArithBinOp::BitOr(_) => Self::BitOr, + ast::ArithBinOp::BitXor(_) => Self::BitXor, + } + } +} + +impl CompBinOp { + pub(super) fn from_ast(ast: ast::CompBinOp) -> Self { + match ast { + ast::CompBinOp::Eq(_) => Self::Eq, + ast::CompBinOp::NotEq(_) => Self::NotEq, + ast::CompBinOp::Lt(_) => Self::Lt, + ast::CompBinOp::LtEq(_) => Self::LtEq, + ast::CompBinOp::Gt(_) => Self::Gt, + ast::CompBinOp::GtEq(_) => Self::GtEq, + } + } +} + +impl LogicalBinOp { + pub(super) fn from_ast(ast: ast::LogicalBinOp) -> Self { + match ast { + ast::LogicalBinOp::And(_) => Self::And, + ast::LogicalBinOp::Or(_) => Self::Or, } } } + +impl UnOp { + fn from_ast(ast: ast::UnOp) -> Self { + match ast { + ast::UnOp::Plus(_) => Self::Plus, + ast::UnOp::Minus(_) => Self::Minus, + ast::UnOp::Not(_) => Self::Not, + ast::UnOp::BitNot(_) => Self::BitNot, + } + } +} + +impl MatchArm { + fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::MatchArm) -> Self { + let pat = Pat::push_to_body_opt(ctxt, ast.pat()); + let body = Expr::push_to_body_opt(ctxt, ast.body()); + Self { pat, body } + } +} + +impl CallArg { + fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::CallArg) -> Self { + let label = ast.label().map(|label| IdentId::from_token(ctxt.db, label)); + let expr = Expr::push_to_body_opt(ctxt, ast.expr()); + Self { label, expr } + } + + fn from_ast_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> MaybeInvalid { + ast.map(|ast| Self::from_ast(ctxt, ast)).into() + } +} + +impl RecordField { + fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::RecordField) -> Self { + let label = ast.label().map(|label| IdentId::from_token(ctxt.db, label)); + let expr = Expr::push_to_body_opt(ctxt, ast.expr()); + Self { label, expr } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 134456d483..a6fe782c64 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -29,9 +29,9 @@ impl IdentId { } impl LitKind { - pub(super) fn from_ast(db: &dyn HirDb, ast: &ast::Lit) -> Self { + pub(super) fn from_ast(db: &dyn HirDb, ast: ast::Lit) -> Self { match ast.kind() { - ast::LitKind::Int(int) => Self::Int(IntegerId::from_ast(db, &int)), + ast::LitKind::Int(int) => Self::Int(IntegerId::from_ast(db, int)), ast::LitKind::String(string) => { let text = string.token().text(); Self::String(StringId::new(db, text[1..text.len() - 1].to_string())) @@ -46,7 +46,7 @@ impl LitKind { } impl IntegerId { - fn from_ast(db: &dyn HirDb, ast: &ast::LitInt) -> Self { + pub(super) fn from_ast(db: &dyn HirDb, ast: ast::LitInt) -> Self { let text = ast.token().text(); // Parser ensures that the text is valid pair with a radix and a number. if text.len() < 2 { diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 3d5ca96f91..b4c85df289 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -117,7 +117,7 @@ impl TypeGenericArg { impl ConstGenericArg { fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Some(Body::from_ast(db, fid, None, expr)) + Some(Body::nameless_from_ast(db, fid, expr)) } else { None } diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 9e383eb13c..761c86f338 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -17,7 +17,7 @@ impl Pat { ast::PatKind::Lit(lit_pat) => { let lit_kind = lit_pat .lit() - .map(|lit| LitKind::from_ast(ctxt.db, &lit)) + .map(|lit| LitKind::from_ast(ctxt.db, lit)) .into(); Pat::Lit(lit_kind) } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 6046625c0f..97cbf5d618 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -34,7 +34,7 @@ impl TypeId { let elem_ty = Self::maybe_from_ast(db, fid, ty.elem_ty()); let body = ty .len() - .map(|ast| Body::from_ast(db, fid, None, ast)) + .map(|ast| Body::nameless_from_ast(db, fid, ast)) .into(); TypeKind::Array(elem_ty, body) } diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index ea8b24b0c0..7dbec5af6c 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -1,6 +1,6 @@ use rowan::ast::{support, AstNode}; -use super::{ast_node, GenericArgsOwner}; +use super::{ast_node, GenericArgsOwner, LitInt}; use crate::{SyntaxKind as SK, SyntaxNode, SyntaxToken}; ast_node! { @@ -171,7 +171,7 @@ ast_node! { } impl RecordInitExpr { /// Returns the path of the record init expression. - pub fn path(&self) -> Option { + pub fn path(&self) -> Option { support::child(self.syntax()) } @@ -198,8 +198,8 @@ impl FieldExpr { } /// Returns the index number of the field. - pub fn field_index(&self) -> Option { - support::token(self.syntax(), SK::Int) + pub fn field_index(&self) -> Option { + support::token(self.syntax(), SK::Int).map(|it| LitInt { token: it }) } } @@ -224,12 +224,11 @@ ast_node! { /// `(expr1, expr2, ..)` pub struct TupleExpr, SK::TupleExpr, - IntoIterator, } impl TupleExpr { /// Returns the expressions in the tuple. - pub fn elems(&self) -> impl Iterator { - self.iter() + pub fn elems(&self) -> impl Iterator> { + self.syntax().children().map(|node| Expr::cast(node)) } } @@ -237,12 +236,12 @@ ast_node! { /// `[expr1, expr2, ..]` pub struct ArrayExpr, SK::ArrayExpr, - IntoIterator, } impl ArrayExpr { /// Returns the expressions in the array. - pub fn elems(&self) -> impl Iterator { - self.iter() + /// Returns the expressions in the tuple. + pub fn elems(&self) -> impl Iterator> { + self.syntax().children().map(|node| Expr::cast(node)) } } @@ -253,12 +252,12 @@ ast_node! { } impl ArrayRepExpr { /// Returns the expression being repeated. - pub fn expr(&self) -> Option { + pub fn val(&self) -> Option { support::child(self.syntax()) } /// Returns the size of the array. - pub fn size(&self) -> Option { + pub fn len(&self) -> Option { support::children(self.syntax()).nth(1) } } @@ -291,17 +290,8 @@ impl IfExpr { } /// Returns the else block of the if expression. - pub fn else_(&self) -> Option { - self.syntax() - .children() - .skip(1) - .filter_map(BlockExpr::cast) - .nth(1) - } - - /// Returns the else if expression of the if expression. - pub fn else_if(&self) -> Option { - self.syntax().children().skip(1).find_map(IfExpr::cast) + pub fn else_(&self) -> Option { + self.syntax().children().filter_map(Expr::cast).nth(2) } } @@ -336,6 +326,7 @@ impl ParenExpr { #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum ExprKind { + Lit(LitExpr), Block(BlockExpr), Bin(BinExpr), Un(UnExpr), @@ -348,7 +339,6 @@ pub enum ExprKind { Tuple(TupleExpr), Array(ArrayExpr), ArrayRep(ArrayRepExpr), - Lit(LitExpr), If(IfExpr), Match(MatchExpr), Paren(ParenExpr), @@ -366,7 +356,7 @@ ast_node! { } impl RecordField { /// Returns the name of the field. - pub fn name(&self) -> Option { + pub fn label(&self) -> Option { support::token(self.syntax(), SK::Ident) } @@ -710,15 +700,15 @@ mod tests { for (i, field) in record_init_expr.fields().unwrap().into_iter().enumerate() { match i { 0 => { - assert_eq!(field.name().unwrap().text(), "a"); + assert_eq!(field.label().unwrap().text(), "a"); assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) } 1 => { - assert_eq!(field.name().unwrap().text(), "b"); + assert_eq!(field.label().unwrap().text(), "b"); assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) } 2 => { - assert_eq!(field.name().unwrap().text(), "c"); + assert_eq!(field.label().unwrap().text(), "c"); assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) } _ => panic!("unexpected field"), @@ -743,7 +733,7 @@ mod tests { field_expr.receiver().unwrap().kind(), ExprKind::Tuple(_) )); - assert_eq!(field_expr.field_index().unwrap().text(), "1"); + assert_eq!(field_expr.field_index().unwrap().token().text(), "1"); } #[test] @@ -751,7 +741,7 @@ mod tests { fn tuple_expr() { let tuple_expr: TupleExpr = parse_expr("(1, 2, 3)"); - for (i, expr) in tuple_expr.elems().into_iter().enumerate() { + for (i, expr) in tuple_expr.elems().into_iter().flatten().enumerate() { match i { 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), @@ -766,7 +756,7 @@ mod tests { fn array_expr() { let array_expr: ArrayExpr = parse_expr("[1, 2, 3]"); - for (i, expr) in array_expr.elems().into_iter().enumerate() { + for (i, expr) in array_expr.elems().into_iter().flatten().enumerate() { match i { 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), @@ -797,11 +787,11 @@ mod tests { let array_rep_expr: ArrayRepExpr = parse_expr("[1; 2]"); assert!(matches!( - array_rep_expr.expr().unwrap().kind(), + array_rep_expr.val().unwrap().kind(), ExprKind::Lit(_) )); assert!(matches!( - array_rep_expr.size().unwrap().kind(), + array_rep_expr.len().unwrap().kind(), ExprKind::Lit(_) )); } @@ -812,8 +802,6 @@ mod tests { let if_expr: IfExpr = parse_expr("if true { 1 } else { 2 }"); assert!(matches!(if_expr.cond().unwrap().kind(), ExprKind::Lit(_))); assert!(if_expr.then().is_some()); - assert_ne!(if_expr.then().unwrap(), if_expr.else_().unwrap(),); - assert!(if_expr.else_if().is_none()); let if_expr: IfExpr = parse_expr("if { true } { return } else { continue }"); if let ExprKind::Block(stmts) = if_expr.cond().unwrap().kind() { @@ -828,15 +816,16 @@ mod tests { if_expr.then().unwrap().into_iter().next().unwrap().kind(), crate::ast::StmtKind::Return(_) ); + let ExprKind::Block(else_) = if_expr.else_().unwrap().kind() else { + panic!("expected block statement"); + }; matches!( - if_expr.else_().unwrap().into_iter().next().unwrap().kind(), + else_.into_iter().next().unwrap().kind(), crate::ast::StmtKind::Return(_) ); - assert!(if_expr.else_if().is_none()); let if_expr: IfExpr = parse_expr("if false { return } else if true { continue }"); - assert!(if_expr.else_().is_none()); - assert!(if_expr.else_if().is_some()); + assert!(matches!(if_expr.else_().unwrap().kind(), ExprKind::If(_))); } #[test] diff --git a/crates/parser2/src/ast/lit.rs b/crates/parser2/src/ast/lit.rs index 86e7a7c8db..c88fcbc643 100644 --- a/crates/parser2/src/ast/lit.rs +++ b/crates/parser2/src/ast/lit.rs @@ -22,7 +22,7 @@ impl Lit { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LitInt { - token: SyntaxToken, + pub(super) token: SyntaxToken, } impl LitInt { pub fn token(&self) -> &SyntaxToken { diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 5c30183b51..9e54d3c208 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -16,7 +16,7 @@ use super::{ pub(super) fn parse_expr_atom( parser: &mut Parser, - allow_struct_init: bool, + allow_record_init: bool, ) -> (bool, Checkpoint) { use SyntaxKind::*; match parser.current_kind() { @@ -27,13 +27,7 @@ pub(super) fn parse_expr_atom( Some(LBracket) => parser.parse(ArrayScope::default(), None), Some(kind) if lit::is_lit(kind) => parser.parse(LitExprScope::default(), None), Some(kind) if path::is_path_segment(kind) => { - let (success, checkpoint) = parser.parse(PathExprScope::default(), None); - if success && parser.current_kind() == Some(LBrace) && allow_struct_init { - let (success, _) = parser.parse(RecordInitExprScope::default(), Some(checkpoint)); - (success, checkpoint) - } else { - (success, checkpoint) - } + parser.parse(PathExprScope::new(allow_record_init), None) } _ => { parser.error_and_recover("expected expression", None); @@ -181,17 +175,17 @@ impl super::Parse for LitExprScope { } } -define_scope! { PathExprScope, PathExpr, Inheritance } +define_scope! { PathExprScope{ allow_record_init: bool }, PathExpr, Inheritance } impl super::Parse for PathExprScope { fn parse(&mut self, parser: &mut Parser) { - parser.parse(path::PathScope::default(), None); - } -} - -define_scope! { RecordInitExprScope, RecordInitExpr, Inheritance } -impl super::Parse for RecordInitExprScope { - fn parse(&mut self, parser: &mut Parser) { - parser.parse(RecordFieldListScope::default(), None); + parser.with_recovery_tokens( + |parser| parser.parse(path::PathScope::default(), None), + &[SyntaxKind::LBrace], + ); + if parser.current_kind() == Some(SyntaxKind::LBrace) && self.allow_record_init { + self.set_kind(SyntaxKind::RecordInitExpr); + parser.parse(RecordFieldListScope::default(), None); + } } } @@ -226,7 +220,7 @@ define_scope! { RecordFieldScope, RecordField, Inheritance } impl super::Parse for RecordFieldScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); - parser.bump_or_recover(SyntaxKind::Ident, "expected identifier", None); + parser.bump_if(SyntaxKind::Ident); if parser.bump_if(SyntaxKind::Colon) { parse_expr(parser); diff --git a/crates/parser2/test_files/syntax_node/exprs/match.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap index 206420bc23..1278de74d2 100644 --- a/crates/parser2/test_files/syntax_node/exprs/match.snap +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -111,10 +111,9 @@ Root@0..516 ParenExpr@89..105 LParen@89..90 "(" RecordInitExpr@90..104 - PathExpr@90..91 - Path@90..91 - PathSegment@90..91 - Ident@90..91 "S" + Path@90..91 + PathSegment@90..91 + Ident@90..91 "S" WhiteSpace@91..92 " " RecordFieldList@92..104 LBrace@92..93 "{" @@ -502,10 +501,9 @@ Root@0..516 ParenExpr@437..463 LParen@437..438 "(" RecordInitExpr@438..462 - PathExpr@438..439 - Path@438..439 - PathSegment@438..439 - Ident@438..439 "S" + Path@438..439 + PathSegment@438..439 + Ident@438..439 "S" WhiteSpace@439..440 " " RecordFieldList@440..462 LBrace@440..441 "{" diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap index e22d6fd9f5..09a3f1b5d3 100644 --- a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -5,10 +5,9 @@ input_file: crates/parser2/test_files/syntax_node/exprs/struct_init.fe --- Root@0..40 RecordInitExpr@0..13 - PathExpr@0..6 - Path@0..6 - PathSegment@0..6 - Ident@0..6 "Struct" + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Struct" WhiteSpace@6..7 " " RecordFieldList@7..13 LBrace@7..8 "{" @@ -21,10 +20,9 @@ Root@0..40 RBrace@12..13 "}" Newline@13..14 "\n" RecordInitExpr@14..31 - PathExpr@14..20 - Path@14..20 - PathSegment@14..20 - Ident@14..20 "Struct" + Path@14..20 + PathSegment@14..20 + Ident@14..20 "Struct" WhiteSpace@20..21 " " RecordFieldList@21..31 LBrace@21..22 "{" @@ -45,10 +43,9 @@ Root@0..40 RBrace@30..31 "}" Newline@31..32 "\n" RecordInitExpr@32..40 - PathExpr@32..37 - Path@32..37 - PathSegment@32..37 - Ident@32..37 "Empty" + Path@32..37 + PathSegment@32..37 + Ident@32..37 "Empty" WhiteSpace@37..38 " " RecordFieldList@38..40 LBrace@38..39 "{" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 6955f625b3..5c8744d28d 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -72,10 +72,9 @@ Root@0..272 WhiteSpace@69..77 " " ExprStmt@77..129 RecordInitExpr@77..129 - PathExpr@77..81 - Path@77..81 - PathSegment@77..81 - SelfTypeKw@77..81 "Self" + Path@77..81 + PathSegment@77..81 + SelfTypeKw@77..81 "Self" WhiteSpace@81..82 " " RecordFieldList@82..129 LBrace@82..83 "{" From 780be9e6b1d2a03f9075c2d21e3ad6ac4aa0fea6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 21 Mar 2023 02:50:41 +0100 Subject: [PATCH 099/678] Add HIR lower for `Body` --- crates/hir/src/hir_def/body.rs | 7 +++-- crates/hir/src/hir_def/mod.rs | 8 ++--- crates/hir/src/lower/body.rs | 53 +++++++++++++++++++++++++++------- crates/hir/src/span/mod.rs | 23 +++++++++++++++ 4 files changed, 72 insertions(+), 19 deletions(-) diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 045356e2cb..cfe05f2eeb 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -1,9 +1,9 @@ use cranelift_entity::{PrimaryMap, SecondaryMap}; -use fe_parser2::ast::{self, Stmt}; +use fe_parser2::ast::{self}; use crate::span::HirOrigin; -use super::{Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, StmtId}; +use super::{Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId}; #[salsa::tracked] pub struct Body { @@ -22,8 +22,9 @@ pub struct Body { #[return_ref] pub(crate) expr_source_map: BodySourceMap, #[return_ref] - pub(crate) pat_source_map: BodySourceMap, + pub(crate) pat_source_map: BodySourceMap, + #[return_fer] pub(crate) ast: HirOrigin, } diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 857602a3b4..fe417486b6 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -61,12 +61,8 @@ pub enum MaybeInvalid { Invalid, } -impl MaybeInvalid { - pub(crate) fn valid(t: T) -> Self { - Self::Valid(t) - } - - pub(crate) fn invalid() -> Self { +impl Default for MaybeInvalid { + fn default() -> Self { Self::Invalid } } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 1cc2c36327..c81f4c8b0c 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -1,8 +1,10 @@ -use cranelift_entity::{PrimaryMap, SecondaryMap}; use fe_parser2::ast; use crate::{ - hir_def::{Body, Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId}, + hir_def::{ + Body, BodyKind, BodyNodeMap, BodySourceMap, Expr, ExprId, ItemKind, MaybeInvalid, Pat, + PatId, Stmt, StmtId, + }, span::{FileId, HirOrigin, HirOriginKind}, HirDb, }; @@ -14,24 +16,28 @@ impl Body { parent_item: ItemKind, ast: ast::Expr, ) -> Self { - todo!() + let mut ctxt = BodyCtxt::new(db, fid); + Expr::push_to_body(&mut ctxt, ast.clone()); + ctxt.build(BodyKind::ItemBody(parent_item), HirOrigin::raw(fid, &ast)) } pub(crate) fn nameless_from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { - todo!() + let mut ctxt = BodyCtxt::new(db, fid); + Expr::push_to_body(&mut ctxt, ast.clone()); + ctxt.build(BodyKind::NamelessConst, HirOrigin::raw(fid, &ast)) } } pub(super) struct BodyCtxt<'db> { - pub(super) stmts: PrimaryMap>, - pub(super) exprs: PrimaryMap>, - pub(super) pats: PrimaryMap>, + pub(super) stmts: BodyNodeMap>, + pub(super) exprs: BodyNodeMap>, + pub(super) pats: BodyNodeMap>, pub(super) db: &'db dyn HirDb, pub(super) fid: FileId, - stmt_source_map: SecondaryMap>, - expr_source_map: SecondaryMap>, - pat_source_map: SecondaryMap>, + stmt_source_map: BodySourceMap, + expr_source_map: BodySourceMap, + pat_source_map: BodySourceMap, } impl<'db> BodyCtxt<'db> { pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { @@ -69,4 +75,31 @@ impl<'db> BodyCtxt<'db> { self.pat_source_map[pat_id] = HirOrigin::none(self.fid); pat_id } + + fn new(db: &'db dyn HirDb, fid: FileId) -> Self { + Self { + stmts: BodyNodeMap::new(), + exprs: BodyNodeMap::new(), + pats: BodyNodeMap::new(), + db, + fid, + stmt_source_map: BodySourceMap::new(), + expr_source_map: BodySourceMap::new(), + pat_source_map: BodySourceMap::new(), + } + } + + fn build(self, kind: BodyKind, origin: HirOrigin) -> Body { + Body::new( + self.db, + kind, + self.stmts, + self.exprs, + self.pats, + self.stmt_source_map, + self.expr_source_map, + self.pat_source_map, + origin, + ) + } } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index ae03561852..1973c545ce 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -37,6 +37,18 @@ where } } +impl Default for HirOrigin +where + T: AstNode, +{ + fn default() -> Self { + HirOrigin { + fid: FileId::invalid(), + kind: HirOriginKind::None, + } + } +} + /// This enum represents the origin of the HIR node. /// The origin has three possible kinds. /// 1. `Raw` is used for nodes that are created by the parser and not @@ -118,6 +130,17 @@ pub struct FileId { path: PathBuf, } +impl FileId { + pub(crate) fn invalid() -> Self { + use salsa::Id; + Self(Id::from_u32(Id::MAX_U32 - 1)) + } + + pub(crate) fn is_invalid(self) -> bool { + self.0 == Self::invalid().0 + } +} + #[salsa::interned] pub struct IngotId { /// A full path to the ingot root. From e4d976966f4aafb0ce63e54447e635f1acb6de3d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 21 Mar 2023 20:53:39 +0100 Subject: [PATCH 100/678] Define input --- Cargo.lock | 17 +++-- crates/hir/Cargo.toml | 3 + crates/hir/src/input.rs | 65 ++++++++++++++++++ crates/hir/src/lib.rs | 6 +- crates/hir/src/lower/body.rs | 33 +++++----- crates/hir/src/lower/expr.rs | 2 +- crates/hir/src/lower/item.rs | 117 +++++++++++++++++---------------- crates/hir/src/lower/params.rs | 72 ++++++++++---------- crates/hir/src/lower/stmt.rs | 2 +- crates/hir/src/lower/types.rs | 26 ++++---- crates/hir/src/span/mod.rs | 69 +++++-------------- 11 files changed, 224 insertions(+), 188 deletions(-) create mode 100644 crates/hir/src/input.rs diff --git a/Cargo.lock b/Cargo.lock index daaf4aabbd..99e7cec344 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -143,9 +143,9 @@ checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "camino" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" +checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2" [[package]] name = "cast" @@ -683,7 +683,7 @@ dependencies = [ "pretty_assertions", "rstest", "salsa", - "semver 1.0.16", + "semver 1.0.17", "smallvec", "smol_str", "strum", @@ -795,12 +795,15 @@ dependencies = [ name = "fe-hir" version = "0.20.0-alpha" dependencies = [ + "camino", "cranelift-entity", "derive_more", "fe-parser2", "num-bigint", "num-traits", "salsa-2022", + "semver 1.0.17", + "smol_str", "tracing", ] @@ -843,7 +846,7 @@ dependencies = [ "insta", "logos", "pretty_assertions", - "semver 1.0.16", + "semver 1.0.17", "serde", "smol_str", "unescape", @@ -1810,7 +1813,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.16", + "semver 1.0.17", ] [[package]] @@ -1940,9 +1943,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "semver-parser" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index b6ca16e588..705b714957 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -17,3 +17,6 @@ derive_more = "0.99" cranelift-entity = "0.91" num-bigint = "0.4.3" num-traits = "0.2.15" +semver = "1.0.17" +camino = "1.1.4" +smol_str = "0.1.24" diff --git a/crates/hir/src/input.rs b/crates/hir/src/input.rs new file mode 100644 index 0000000000..79a7a64c47 --- /dev/null +++ b/crates/hir/src/input.rs @@ -0,0 +1,65 @@ +use camino::Utf8PathBuf; +use smol_str::SmolStr; + +/// An ingot is a collection of files which are compiled together. +/// Ingot can depend on other ingots. +#[salsa::input] +pub struct Ingot { + /// A path to the ingot root directory. + #[return_ref] + pub path: Utf8PathBuf, + + /// Specifies the kind of the ingot. + pub kind: IngotKind, + + /// A version of the ingot. + #[return_ref] + pub version: Version, + + /// A list of ingots which the current ingot depends on. + #[return_ref] + pub dependency: Vec, + + /// A list of files which the current ingot contains. + #[return_ref] + pub files: Vec, +} + +#[salsa::input(constructor = __new_priv)] +pub struct File { + /// A ingot id which the file belongs to. + pub ingot: Ingot, + + /// A path to the file from the ingot root directory. + #[return_ref] + pub path: Utf8PathBuf, + + #[return_ref] + pub text: String, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum IngotKind { + /// A standalone ingot is a dummy ingot when the compiler is invoked + /// directly on a file. + StandAlone, + + /// A local ingot which is the current ingot being compiled. + Local, + + /// An external ingot which is depended on by the current ingot. + External, + + /// A std ingot. + Std, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct IngotDependency { + /// The ingot may have a alias name from the original ingot name. + pub name: SmolStr, + /// An ingot which the current ingot depends on. + pub ingot: Ingot, +} + +pub type Version = semver::Version; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index c0fef10c8b..3c6908a2fd 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,9 +1,13 @@ pub mod hir_def; +pub mod input; pub mod lower; pub mod span; #[salsa::jar(db = HirDb)] pub struct Jar( + /// Inputs + input::Ingot, + input::File, // Tracked Hir items. hir_def::Fn, hir_def::Struct, @@ -32,8 +36,6 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, - span::IngotId, - span::FileId, ); pub trait HirDb: salsa::DbWithJar {} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index c81f4c8b0c..02131565d6 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -5,26 +5,27 @@ use crate::{ Body, BodyKind, BodyNodeMap, BodySourceMap, Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId, }, - span::{FileId, HirOrigin, HirOriginKind}, + input::File, + span::{HirOrigin, HirOriginKind}, HirDb, }; impl Body { pub(crate) fn item_body_from_ast( db: &dyn HirDb, - fid: FileId, + file: File, parent_item: ItemKind, ast: ast::Expr, ) -> Self { - let mut ctxt = BodyCtxt::new(db, fid); + let mut ctxt = BodyCtxt::new(db, file); Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(BodyKind::ItemBody(parent_item), HirOrigin::raw(fid, &ast)) + ctxt.build(BodyKind::ItemBody(parent_item), HirOrigin::raw(file, &ast)) } - pub(crate) fn nameless_from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Expr) -> Self { - let mut ctxt = BodyCtxt::new(db, fid); + pub(crate) fn nameless_from_ast(db: &dyn HirDb, file: File, ast: ast::Expr) -> Self { + let mut ctxt = BodyCtxt::new(db, file); Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(BodyKind::NamelessConst, HirOrigin::raw(fid, &ast)) + ctxt.build(BodyKind::NamelessConst, HirOrigin::raw(file, &ast)) } } @@ -33,7 +34,7 @@ pub(super) struct BodyCtxt<'db> { pub(super) exprs: BodyNodeMap>, pub(super) pats: BodyNodeMap>, pub(super) db: &'db dyn HirDb, - pub(super) fid: FileId, + pub(super) file: File, stmt_source_map: BodySourceMap, expr_source_map: BodySourceMap, @@ -42,47 +43,47 @@ pub(super) struct BodyCtxt<'db> { impl<'db> BodyCtxt<'db> { pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { let expr_id = self.exprs.push(Some(expr).into()); - self.expr_source_map[expr_id] = HirOrigin::new(self.fid, origin); + self.expr_source_map[expr_id] = HirOrigin::new(self.file, origin); expr_id } pub(super) fn push_invalid_expr(&mut self, origin: HirOriginKind) -> ExprId { let expr_id = self.exprs.push(None.into()); - self.expr_source_map[expr_id] = HirOrigin::new(self.fid, origin); + self.expr_source_map[expr_id] = HirOrigin::new(self.file, origin); expr_id } pub(super) fn push_missing_expr(&mut self) -> ExprId { let expr_id = self.exprs.push(None.into()); - self.expr_source_map[expr_id] = HirOrigin::none(self.fid); + self.expr_source_map[expr_id] = HirOrigin::none(self.file); expr_id } pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOriginKind) -> StmtId { let stmt_id = self.stmts.push(Some(stmt).into()); - self.stmt_source_map[stmt_id] = HirOrigin::new(self.fid, origin); + self.stmt_source_map[stmt_id] = HirOrigin::new(self.file, origin); stmt_id } pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { let pat_id = self.pats.push(Some(pat).into()); - self.pat_source_map[pat_id] = HirOrigin::new(self.fid, origin); + self.pat_source_map[pat_id] = HirOrigin::new(self.file, origin); pat_id } pub(super) fn push_missing_pat(&mut self) -> PatId { let pat_id = self.pats.push(None.into()); - self.pat_source_map[pat_id] = HirOrigin::none(self.fid); + self.pat_source_map[pat_id] = HirOrigin::none(self.file); pat_id } - fn new(db: &'db dyn HirDb, fid: FileId) -> Self { + fn new(db: &'db dyn HirDb, file: File) -> Self { Self { stmts: BodyNodeMap::new(), exprs: BodyNodeMap::new(), pats: BodyNodeMap::new(), db, - fid, + file, stmt_source_map: BodySourceMap::new(), expr_source_map: BodySourceMap::new(), pat_source_map: BodySourceMap::new(), diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 124ec77330..6f70d56f7b 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -126,7 +126,7 @@ impl Expr { let val = Self::push_to_body_opt(ctxt, array_rep.val()); let len = array_rep .len() - .map(|ast| Body::nameless_from_ast(ctxt.db, ctxt.fid, ast)) + .map(|ast| Body::nameless_from_ast(ctxt.db, ctxt.file, ast)) .into(); Self::ArrayRep(val, len) } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 80b3515c5e..87e5beb83a 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -5,24 +5,25 @@ use crate::{ item::*, AttrListId, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, UseTreeId, WhereClauseId, }, - span::{FileId, HirOrigin}, + input::File, + span::HirOrigin, HirDb, }; impl Fn { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Fn) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Fn) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_paramas = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); + let generic_paramas = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); let params = ast .params() - .map(|params| FnParamListId::from_ast(db, fid, params)) + .map(|params| FnParamListId::from_ast(db, file, params)) .into(); - let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, fid, ty)); + let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, file, ty)); let modifier = ItemModifier::from_ast(db, ast.modifier()); - let origin = HirOrigin::raw(fid, &ast); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -39,15 +40,15 @@ impl Fn { } impl Struct { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Struct) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Struct) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_paramas = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let fields = RecordFieldListId::from_ast_opt(db, fid, ast.fields()); - let origin = HirOrigin::raw(fid, &ast); + let generic_paramas = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -63,28 +64,28 @@ impl Struct { } impl Contract { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Contract) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Contract) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let fields = RecordFieldListId::from_ast_opt(db, fid, ast.fields()); - let origin = HirOrigin::raw(fid, &ast); + let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); + let origin = HirOrigin::raw(file, &ast); Self::new(db, name, attributes, is_pub, fields, origin) } } impl Enum { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Enum) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Enum) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let variants = EnumVariantListId::from_ast_opt(db, fid, ast.variants()); - let origin = HirOrigin::raw(fid, &ast); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let variants = EnumVariantListId::from_ast_opt(db, file, ast.variants()); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -100,15 +101,15 @@ impl Enum { } impl TypeAlias { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeAlias) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeAlias) -> Self { let name = IdentId::maybe_from_token(db, ast.alias()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); - let origin = HirOrigin::raw(fid, &ast); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -124,27 +125,27 @@ impl TypeAlias { } impl Impl { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Impl) -> Self { - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Impl) -> Self { + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let origin = HirOrigin::raw(fid, &ast); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let origin = HirOrigin::raw(file, &ast); Self::new(db, ty, attributes, generic_params, where_clause, origin) } } impl Trait { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Trait) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Trait) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let origin = HirOrigin::raw(fid, &ast); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -159,14 +160,14 @@ impl Trait { } impl ImplTrait { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ImplTrait) -> Self { - let trait_ref = TraitRef::maybe_from_ast(db, fid, ast.trait_ref()); - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::ImplTrait) -> Self { + let trait_ref = TraitRef::maybe_from_ast(db, file, ast.trait_ref()); + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_params = GenericParamListId::from_ast_opt(db, fid, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, fid, ast.where_clause()); - let origin = HirOrigin::raw(fid, &ast); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let origin = HirOrigin::raw(file, &ast); Self::new( db, @@ -181,25 +182,25 @@ impl ImplTrait { } impl Const { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Const) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Const) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); - let origin = HirOrigin::raw(fid, &ast); + let origin = HirOrigin::raw(file, &ast); Self::new(db, name, origin) } } impl Use { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Use) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Use) -> Self { let tree = UseTreeId::maybe_from_ast(db, ast.use_tree()); - let origin = HirOrigin::raw(fid, &ast); + let origin = HirOrigin::raw(file, &ast); Self::new(db, tree, origin) } } impl Extern { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Extern) -> Self { - let origin = HirOrigin::raw(fid, &ast); + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Extern) -> Self { + let origin = HirOrigin::raw(file, &ast); Self::new(db, origin) } @@ -221,24 +222,24 @@ impl ItemModifier { } impl RecordFieldListId { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::RecordFieldDefList) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::RecordFieldDefList) -> Self { let fields = ast .into_iter() - .map(|field| RecordField::from_ast(db, fid, field)) + .map(|field| RecordField::from_ast(db, file, field)) .collect(); Self::new(db, fields) } - fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, fid, ast)) + fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or(Self::new(db, Vec::new())) } } impl RecordField { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::RecordFieldDef) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::RecordFieldDef) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let is_pub = ast.pub_kw().is_some(); Self { name, ty, is_pub } @@ -246,24 +247,24 @@ impl RecordField { } impl EnumVariantListId { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::EnumVariantDefList) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::EnumVariantDefList) -> Self { let variants = ast .into_iter() - .map(|variant| EnumVariant::from_ast(db, fid, variant)) + .map(|variant| EnumVariant::from_ast(db, file, variant)) .collect(); Self::new(db, variants) } - fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, fid, ast)) + fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or(Self::new(db, Vec::new())) } } impl EnumVariant { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::EnumVariantDef) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::EnumVariantDef) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); - let ty = ast.ty().map(|ty| TypeId::from_ast(db, fid, ty)); + let ty = ast.ty().map(|ty| TypeId::from_ast(db, file, ty)); Self { name, ty } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index b4c85df289..ec7dd0d047 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -2,82 +2,82 @@ use fe_parser2::ast::{self}; use crate::{ hir_def::{params::*, Body, IdentId, PathId, TypeId}, - span::FileId, + input::File, HirDb, }; impl GenericArgListId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericArgList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericArgList) -> Self { let args = ast .into_iter() - .map(|arg| GenericArg::from_ast(db, fid, arg)) + .map(|arg| GenericArg::from_ast(db, file, arg)) .collect(); Self::new(db, args) } pub(crate) fn from_ast_opt( db: &dyn HirDb, - fid: FileId, + file: File, ast: Option, ) -> Self { - ast.map(|ast| Self::from_ast(db, fid, ast)) + ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or_else(|| Self::new(db, Vec::new())) } } impl GenericParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericParamList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericParamList) -> Self { let params = ast .into_iter() - .map(|param| GenericParam::from_ast(db, fid, param)) + .map(|param| GenericParam::from_ast(db, file, param)) .collect(); Self::new(db, params) } pub(crate) fn from_ast_opt( db: &dyn HirDb, - fid: FileId, + file: File, ast: Option, ) -> Self { - ast.map(|ast| Self::from_ast(db, fid, ast)) + ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or_else(|| Self::new(db, Vec::new())) } } impl FnParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::FnParamList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::FnParamList) -> Self { let params = ast .into_iter() - .map(|param| FnParam::from_ast(db, fid, param)) + .map(|param| FnParam::from_ast(db, file, param)) .collect(); Self::new(db, params) } } impl WhereClauseId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::WhereClause) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::WhereClause) -> Self { let predicates = ast .into_iter() - .map(|pred| WherePredicate::from_ast(db, fid, pred)) + .map(|pred| WherePredicate::from_ast(db, file, pred)) .collect(); Self::new(db, predicates) } - pub(crate) fn from_ast_opt(db: &dyn HirDb, fid: FileId, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, fid, ast)) + pub(crate) fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or_else(|| Self::new(db, Vec::new())) } } impl TypeGenericParam { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeGenericParam) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let bounds = ast .bounds() .map(|bounds| { bounds .into_iter() - .map(|bound| TypeBound::from_ast(db, fid, bound)) + .map(|bound| TypeBound::from_ast(db, file, bound)) .collect() }) .unwrap_or_default(); @@ -87,37 +87,37 @@ impl TypeGenericParam { } impl ConstGenericParam { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::ConstGenericParam) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); Self { name, ty } } } impl GenericArg { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericArg) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericArg) -> Self { match ast.kind() { ast::GenericArgKind::Type(type_param) => { - TypeGenericArg::from_ast(db, fid, type_param).into() + TypeGenericArg::from_ast(db, file, type_param).into() } ast::GenericArgKind::Const(const_param) => { - ConstGenericArg::from_ast(db, fid, const_param).into() + ConstGenericArg::from_ast(db, file, const_param).into() } } } } impl TypeGenericArg { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeGenericArg) -> Self { - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeGenericArg) -> Self { + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); Self { ty } } } impl ConstGenericArg { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::ConstGenericArg) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Some(Body::nameless_from_ast(db, fid, expr)) + Some(Body::nameless_from_ast(db, file, expr)) } else { None } @@ -128,24 +128,24 @@ impl ConstGenericArg { } impl GenericParam { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::GenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericParam) -> Self { match ast.kind() { ast::GenericParamKind::Type(type_param) => { - TypeGenericParam::from_ast(db, fid, type_param).into() + TypeGenericParam::from_ast(db, file, type_param).into() } ast::GenericParamKind::Const(const_param) => { - ConstGenericParam::from_ast(db, fid, const_param).into() + ConstGenericParam::from_ast(db, file, const_param).into() } } } } impl FnParam { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::FnParam) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::FnParam) -> Self { let is_mut = ast.mut_token().is_some(); let label = ast.label().map(|ast| FnParamLabel::from_ast(db, ast)); let name = ast.name().map(|ast| FnParamName::from_ast(db, ast)).into(); - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); Self { is_mut, @@ -157,14 +157,14 @@ impl FnParam { } impl WherePredicate { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::WherePredicate) -> Self { - let ty = TypeId::maybe_from_ast(db, fid, ast.ty()); + fn from_ast(db: &dyn HirDb, file: File, ast: ast::WherePredicate) -> Self { + let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let bounds = ast .bounds() .map(|bounds| { bounds .into_iter() - .map(|bound| TypeBound::from_ast(db, fid, bound)) + .map(|bound| TypeBound::from_ast(db, file, bound)) .collect() }) .unwrap_or_default(); @@ -173,11 +173,11 @@ impl WherePredicate { } impl TypeBound { - fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::TypeBound) -> Self { + fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeBound) -> Self { let path = ast.path().map(|ast| PathId::from_ast(db, ast)).into(); let generic_args = ast .generic_args() - .map(|args| GenericArgListId::from_ast(db, fid, args)); + .map(|args| GenericArgListId::from_ast(db, file, args)); Self { path, generic_args } } } diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index 6887bf1995..bf4a3ecd76 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -14,7 +14,7 @@ impl Stmt { let pat = Pat::push_to_body_opt(ctxt, let_.pat()); let ty = let_ .type_annotation() - .map(|ty| TypeId::from_ast(ctxt.db, ctxt.fid, ty)); + .map(|ty| TypeId::from_ast(ctxt.db, ctxt.file, ty)); let init = let_ .initializer() .map(|init| Expr::push_to_body(ctxt, init)); diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 97cbf5d618..784253f0e3 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -2,21 +2,21 @@ use fe_parser2::ast::{self, prelude::*}; use crate::{ hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}, - span::FileId, + input::File, HirDb, }; impl TypeId { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::Type) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Type) -> Self { let kind = match ast.kind() { ast::TypeKind::Ptr(ty) => { - let inner = Self::maybe_from_ast(db, fid, ty.inner()); + let inner = Self::maybe_from_ast(db, file, ty.inner()); TypeKind::Ptr(inner) } ast::TypeKind::Path(ty) => { let path = PathId::maybe_from_ast(db, ty.path()).into(); - let generic_args = GenericArgListId::from_ast_opt(db, fid, ty.generic_args()); + let generic_args = GenericArgListId::from_ast_opt(db, file, ty.generic_args()); TypeKind::Path(path, generic_args.into()) } @@ -25,16 +25,16 @@ impl TypeId { ast::TypeKind::Tuple(ty) => { let mut elem_tys = Vec::new(); for elem in ty { - elem_tys.push(Some(TypeId::from_ast(db, fid, elem)).into()); + elem_tys.push(Some(TypeId::from_ast(db, file, elem)).into()); } TypeKind::Tuple(elem_tys) } ast::TypeKind::Array(ty) => { - let elem_ty = Self::maybe_from_ast(db, fid, ty.elem_ty()); + let elem_ty = Self::maybe_from_ast(db, file, ty.elem_ty()); let body = ty .len() - .map(|ast| Body::nameless_from_ast(db, fid, ast)) + .map(|ast| Body::nameless_from_ast(db, file, ast)) .into(); TypeKind::Array(elem_ty, body) } @@ -45,25 +45,25 @@ impl TypeId { pub(crate) fn maybe_from_ast( db: &dyn HirDb, - fid: FileId, + file: File, ast: Option, ) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, fid, ast)).into() + ast.map(|ast| Self::from_ast(db, file, ast)).into() } } impl TraitRef { - pub(crate) fn from_ast(db: &dyn HirDb, fid: FileId, ast: ast::PathType) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::PathType) -> Self { let path = PathId::maybe_from_ast(db, ast.path()).into(); - let generic_args = GenericArgListId::from_ast_opt(db, fid, ast.generic_args()); + let generic_args = GenericArgListId::from_ast_opt(db, file, ast.generic_args()); Self { path, generic_args } } pub(crate) fn maybe_from_ast( db: &dyn HirDb, - fid: FileId, + file: File, ast: Option, ) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, fid, ast)).into() + ast.map(|ast| Self::from_ast(db, file, ast)).into() } } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 1973c545ce..142d1f6b4b 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,16 +1,16 @@ -use std::path::PathBuf; - use fe_parser2::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, TextRange, }; +use crate::input::File; + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin where T: AstNode, { - pub fid: FileId, + pub file: Option, pub kind: HirOriginKind, } @@ -18,20 +18,23 @@ impl HirOrigin where T: AstNode, { - pub(crate) fn new(fid: FileId, origin: HirOriginKind) -> Self { - HirOrigin { fid, kind: origin } + pub(crate) fn new(file: File, origin: HirOriginKind) -> Self { + HirOrigin { + file: Some(file), + kind: origin, + } } - pub(crate) fn raw(fid: FileId, ast: &T) -> Self { + pub(crate) fn raw(file: File, ast: &T) -> Self { HirOrigin { - fid, + file: Some(file), kind: HirOriginKind::raw(ast), } } - pub(crate) fn none(file: FileId) -> Self { + pub(crate) fn none(file: File) -> Self { HirOrigin { - fid: file, + file: Some(file), kind: HirOriginKind::None, } } @@ -41,9 +44,10 @@ impl Default for HirOrigin where T: AstNode, { + /// The `Default` implemntation is necessary for fn default() -> Self { - HirOrigin { - fid: FileId::invalid(), + Self { + file: None, kind: HirOriginKind::None, } } @@ -120,46 +124,3 @@ impl AugAssignDesugared { Self::Rhs(AstPtr::new(ast)) } } - -/// This enum represents the file -#[salsa::interned] -pub struct FileId { - /// A ingot id which the file belongs to. - ingot: IngotId, - /// A relative path from the ingot root. - path: PathBuf, -} - -impl FileId { - pub(crate) fn invalid() -> Self { - use salsa::Id; - Self(Id::from_u32(Id::MAX_U32 - 1)) - } - - pub(crate) fn is_invalid(self) -> bool { - self.0 == Self::invalid().0 - } -} - -#[salsa::interned] -pub struct IngotId { - /// A full path to the ingot root. - path: PathBuf, - kind: IngotKind, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum IngotKind { - /// A standalone ingot is a dummy ingot when the compiler is invoked - /// directly on a file. - StandAlone, - - /// A local ingot which is the current ingot being compiled. - Local, - - /// An external ingot which is depended on by the current ingot. - External, - - /// A std ingot. - Std, -} From ce2df4a6916730d94fc5e8c617e20a5678232335 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 00:05:42 +0100 Subject: [PATCH 101/678] Add Id types for more fine-grained reuse of salsa querie results --- crates/hir/src/hir_def/body.rs | 24 +++----- crates/hir/src/hir_def/item.rs | 54 +++++++++++++++- crates/hir/src/lower/body.rs | 38 ++++++++---- crates/hir/src/lower/expr.rs | 4 +- crates/hir/src/lower/item.rs | 109 +++++++++++++++++++++++++++------ crates/hir/src/lower/params.rs | 2 +- crates/hir/src/lower/types.rs | 2 +- 7 files changed, 183 insertions(+), 50 deletions(-) diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index cfe05f2eeb..3213bffcf3 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -3,12 +3,12 @@ use fe_parser2::ast::{self}; use crate::span::HirOrigin; -use super::{Expr, ExprId, ItemKind, MaybeInvalid, Pat, PatId, Stmt, StmtId}; +use super::{Expr, ExprId, MaybeInvalid, Pat, PatId, Stmt, StmtId, TrackedItemId}; #[salsa::tracked] pub struct Body { #[id] - pub kind: BodyKind, + id: TrackedBodyId, #[return_ref] pub stmts: BodyNodeMap>, @@ -28,21 +28,11 @@ pub struct Body { pub(crate) ast: HirOrigin, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum BodyKind { - /// This is a body appearing in a item, e.g., a function or const item. - ItemBody(ItemKind), - /// This is a body appearing in array types or - NamelessConst, -} - -impl From> for BodyKind { - fn from(item: Option) -> Self { - match item { - Some(item) => Self::ItemBody(item), - None => Self::NamelessConst, - } - } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TrackedBodyId { + ItemBody(Box), + NestedBody(Box), + NamelessBody, } pub type BodyNodeMap = PrimaryMap; diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index e4a4976aa3..c6ddc03a3c 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -13,6 +13,8 @@ use super::{ #[salsa::tracked] pub struct Fn { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -28,6 +30,8 @@ pub struct Fn { #[salsa::tracked] pub struct Struct { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -42,6 +46,8 @@ pub struct Struct { #[salsa::tracked] pub struct Contract { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -54,6 +60,8 @@ pub struct Contract { #[salsa::tracked] pub struct Enum { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -68,6 +76,8 @@ pub struct Enum { #[salsa::tracked] pub struct TypeAlias { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -82,6 +92,8 @@ pub struct TypeAlias { #[salsa::tracked] pub struct Impl { #[id] + id: TrackedItemId, + pub ty: super::MaybeInvalid, pub attributes: AttrListId, @@ -94,6 +106,8 @@ pub struct Impl { #[salsa::tracked] pub struct Trait { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, @@ -107,8 +121,9 @@ pub struct Trait { #[salsa::tracked] pub struct ImplTrait { #[id] + id: TrackedItemId, + pub trait_ref: MaybeInvalid, - #[id] pub ty: MaybeInvalid, pub attributes: AttrListId, @@ -121,6 +136,8 @@ pub struct ImplTrait { #[salsa::tracked] pub struct Const { #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub(crate) origin: HirOrigin, @@ -129,6 +146,8 @@ pub struct Const { #[salsa::tracked] pub struct Use { #[id] + id: TrackedItemId, + pub tree: MaybeInvalid, pub(crate) origin: HirOrigin, @@ -136,6 +155,9 @@ pub struct Use { #[salsa::tracked] pub struct Extern { + #[id] + id: TrackedItemId, + pub(crate) origin: HirOrigin, } @@ -202,3 +224,33 @@ pub struct ImplItemListId { pub type TraitItemListId = ImplItemListId; pub type ImplTraitItemListId = ImplItemListId; pub type ExternItemListId = ImplItemListId; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TrackedItemId { + Fn(MaybeInvalid), + Struct(MaybeInvalid), + Contract(MaybeInvalid), + Enum(MaybeInvalid), + TypeAlias(MaybeInvalid), + Impl(MaybeInvalid), + Trait(MaybeInvalid), + ImplTrait(MaybeInvalid, MaybeInvalid), + Const(MaybeInvalid), + Use(MaybeInvalid), + Extern, + Joined(Box, Box), +} + +impl TrackedItemId { + pub(crate) fn join(self, rhs: Self) -> Self { + Self::Joined(self.into(), rhs.into()) + } + + pub(crate) fn join_opt(self, rhs: Option) -> Self { + if let Some(rhs) = rhs { + self.join(rhs) + } else { + self + } + } +} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 02131565d6..ecd7050bf4 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -2,8 +2,8 @@ use fe_parser2::ast; use crate::{ hir_def::{ - Body, BodyKind, BodyNodeMap, BodySourceMap, Expr, ExprId, ItemKind, MaybeInvalid, Pat, - PatId, Stmt, StmtId, + Body, BodyNodeMap, BodySourceMap, Expr, ExprId, MaybeInvalid, Pat, PatId, Stmt, StmtId, + TrackedBodyId, TrackedItemId, }, input::File, span::{HirOrigin, HirOriginKind}, @@ -14,18 +14,32 @@ impl Body { pub(crate) fn item_body_from_ast( db: &dyn HirDb, file: File, - parent_item: ItemKind, + parent_id: TrackedItemId, ast: ast::Expr, ) -> Self { - let mut ctxt = BodyCtxt::new(db, file); + let bid = TrackedBodyId::ItemBody(parent_id.into()); + let mut ctxt = BodyCtxt::new(db, file, bid); Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(BodyKind::ItemBody(parent_item), HirOrigin::raw(file, &ast)) + ctxt.build(HirOrigin::raw(file, &ast)) } - pub(crate) fn nameless_from_ast(db: &dyn HirDb, file: File, ast: ast::Expr) -> Self { - let mut ctxt = BodyCtxt::new(db, file); + pub(crate) fn nested_body_from_ast( + db: &dyn HirDb, + file: File, + bid: TrackedBodyId, + ast: ast::Expr, + ) -> Self { + let bid = TrackedBodyId::NestedBody(bid.into()); + let mut ctxt = BodyCtxt::new(db, file, bid); + Expr::push_to_body(&mut ctxt, ast.clone()); + ctxt.build(HirOrigin::raw(file, &ast)) + } + + pub(crate) fn nameless_body_from_ast(db: &dyn HirDb, file: File, ast: ast::Expr) -> Self { + let bid = TrackedBodyId::NamelessBody; + let mut ctxt = BodyCtxt::new(db, file, bid); Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(BodyKind::NamelessConst, HirOrigin::raw(file, &ast)) + ctxt.build(HirOrigin::raw(file, &ast)) } } @@ -35,6 +49,7 @@ pub(super) struct BodyCtxt<'db> { pub(super) pats: BodyNodeMap>, pub(super) db: &'db dyn HirDb, pub(super) file: File, + pub(super) bid: TrackedBodyId, stmt_source_map: BodySourceMap, expr_source_map: BodySourceMap, @@ -77,23 +92,24 @@ impl<'db> BodyCtxt<'db> { pat_id } - fn new(db: &'db dyn HirDb, file: File) -> Self { + fn new(db: &'db dyn HirDb, file: File, bid: TrackedBodyId) -> Self { Self { stmts: BodyNodeMap::new(), exprs: BodyNodeMap::new(), pats: BodyNodeMap::new(), db, file, + bid, stmt_source_map: BodySourceMap::new(), expr_source_map: BodySourceMap::new(), pat_source_map: BodySourceMap::new(), } } - fn build(self, kind: BodyKind, origin: HirOrigin) -> Body { + fn build(self, origin: HirOrigin) -> Body { Body::new( self.db, - kind, + self.bid, self.stmts, self.exprs, self.pats, diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 6f70d56f7b..563f8f764f 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -126,7 +126,9 @@ impl Expr { let val = Self::push_to_body_opt(ctxt, array_rep.val()); let len = array_rep .len() - .map(|ast| Body::nameless_from_ast(ctxt.db, ctxt.file, ast)) + .map(|ast| { + Body::nested_body_from_ast(ctxt.db, ctxt.file, ctxt.bid.clone(), ast) + }) .into(); Self::ArrayRep(val, len) } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 87e5beb83a..f401b586f8 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -11,8 +11,14 @@ use crate::{ }; impl Fn { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Fn) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Fn, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Fn(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let generic_paramas = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); @@ -27,6 +33,7 @@ impl Fn { Self::new( db, + id, name, attributes, generic_paramas, @@ -40,22 +47,29 @@ impl Fn { } impl Struct { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Struct) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Struct, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Struct(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_paramas = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); let origin = HirOrigin::raw(file, &ast); Self::new( db, + id, name, attributes, is_pub, - generic_paramas, + generic_params, where_clause, fields, origin, @@ -64,21 +78,33 @@ impl Struct { } impl Contract { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Contract) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Contract, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Contract(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); let origin = HirOrigin::raw(file, &ast); - Self::new(db, name, attributes, is_pub, fields, origin) + Self::new(db, id, name, attributes, is_pub, fields, origin) } } impl Enum { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Enum) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Enum, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Enum(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); @@ -89,6 +115,7 @@ impl Enum { Self::new( db, + id, name, attributes, is_pub, @@ -101,8 +128,14 @@ impl Enum { } impl TypeAlias { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeAlias) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::TypeAlias, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.alias()); + let id = TrackedItemId::TypeAlias(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); @@ -113,6 +146,7 @@ impl TypeAlias { Self::new( db, + id, name, attributes, is_pub, @@ -125,21 +159,33 @@ impl TypeAlias { } impl Impl { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Impl) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Impl, + ) -> Self { let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + let id = TrackedItemId::Impl(ty).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); let origin = HirOrigin::raw(file, &ast); - Self::new(db, ty, attributes, generic_params, where_clause, origin) + Self::new(db, id, ty, attributes, generic_params, where_clause, origin) } } impl Trait { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Trait) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Trait, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Trait(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); @@ -149,6 +195,7 @@ impl Trait { Self::new( db, + id, name, attributes, is_pub, @@ -160,9 +207,15 @@ impl Trait { } impl ImplTrait { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::ImplTrait) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::ImplTrait, + ) -> Self { let trait_ref = TraitRef::maybe_from_ast(db, file, ast.trait_ref()); let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + let id = TrackedItemId::ImplTrait(trait_ref, ty).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); @@ -171,6 +224,7 @@ impl ImplTrait { Self::new( db, + id, trait_ref, ty, attributes, @@ -182,27 +236,46 @@ impl ImplTrait { } impl Const { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Const) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Const, + ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); + let id = TrackedItemId::Const(name).join_opt(parent_id); let origin = HirOrigin::raw(file, &ast); - Self::new(db, name, origin) + Self::new(db, id, name, origin) } } impl Use { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Use) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent_id: Option, + ast: ast::Use, + ) -> Self { let tree = UseTreeId::maybe_from_ast(db, ast.use_tree()); + let id = TrackedItemId::Use(tree).join_opt(parent_id); + let origin = HirOrigin::raw(file, &ast); - Self::new(db, tree, origin) + Self::new(db, id, tree, origin) } } impl Extern { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Extern) -> Self { + pub(crate) fn from_ast( + db: &dyn HirDb, + file: File, + parent: Option, + ast: ast::Extern, + ) -> Self { let origin = HirOrigin::raw(file, &ast); + let id = TrackedItemId::Extern.join_opt(parent); - Self::new(db, origin) + Self::new(db, id, origin) } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index ec7dd0d047..948b2ab482 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -117,7 +117,7 @@ impl TypeGenericArg { impl ConstGenericArg { fn from_ast(db: &dyn HirDb, file: File, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Some(Body::nameless_from_ast(db, file, expr)) + Some(Body::nameless_body_from_ast(db, file, expr)) } else { None } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 784253f0e3..a16a242001 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -34,7 +34,7 @@ impl TypeId { let elem_ty = Self::maybe_from_ast(db, file, ty.elem_ty()); let body = ty .len() - .map(|ast| Body::nameless_from_ast(db, file, ast)) + .map(|ast| Body::nameless_body_from_ast(db, file, ast)) .into(); TypeKind::Array(elem_ty, body) } From 5d41d5de472b1ef69f8eef1b23c26217d1169371 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 14:44:42 +0100 Subject: [PATCH 102/678] Add `mod` item --- crates/parser2/src/ast/item.rs | 55 ++++++++- crates/parser2/src/parser/item.rs | 40 ++++++- crates/parser2/src/syntax_kind.rs | 5 + .../test_files/syntax_node/items/mod.fe | 11 ++ .../test_files/syntax_node/items/mod.snap | 109 ++++++++++++++++++ 5 files changed, 217 insertions(+), 3 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/mod.fe create mode 100644 crates/parser2/test_files/syntax_node/items/mod.snap diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 3e80d82579..b3991a19c7 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -25,7 +25,8 @@ ast_node! { /// A single item in a module. /// Use `[Item::kind]` to get the specific type of item. pub struct Item, - SK::Fn + SK::Mod + | SK::Fn | SK::Struct | SK::Contract | SK::Enum @@ -40,6 +41,7 @@ ast_node! { impl Item { pub fn kind(&self) -> ItemKind { match self.syntax().kind() { + SK::Mod => ItemKind::Mod(AstNode::cast(self.syntax().clone()).unwrap()), SK::Fn => ItemKind::Fn(AstNode::cast(self.syntax().clone()).unwrap()), SK::Struct => ItemKind::Struct(AstNode::cast(self.syntax().clone()).unwrap()), SK::Contract => ItemKind::Contract(AstNode::cast(self.syntax().clone()).unwrap()), @@ -56,6 +58,24 @@ impl Item { } } +ast_node! { + pub struct Mod, + SK::Mod, +} +impl super::AttrListOwner for Mod {} +impl super::ItemModifierOwner for Mod {} +impl Mod { + /// Returns the name of the function. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the function's parameter list. + pub fn items(&self) -> Option { + support::child(self.syntax()) + } +} + ast_node! { /// `pub fn foo(_ x: T, from u: U) -> T where T: Trait2 { ... }` pub struct Fn, @@ -398,6 +418,7 @@ pub trait ItemModifierOwner: AstNode { #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum ItemKind { + Mod(Mod), Fn(Fn), Struct(Struct), Contract(Contract), @@ -437,6 +458,38 @@ mod tests { items.pop().unwrap().kind().try_into().unwrap() } + #[test] + #[wasm_bindgen_test] + fn mod_() { + let source = r" + pub mod foo { + pub fn bar() {} + pub struct Baz + } + "; + let mod_: Mod = parse_item(source); + assert_eq!(mod_.name().unwrap().text(), "foo"); + let mut i = 0; + for item in mod_.items().unwrap().into_iter() { + match i { + 0 => { + assert!(matches!(item.kind(), ItemKind::Fn(_))); + let func: Fn = item.kind().try_into().unwrap(); + assert_eq!(func.name().unwrap().text(), "bar"); + } + 1 => { + assert!(matches!(item.kind(), ItemKind::Struct(_))); + let struct_: Struct = item.kind().try_into().unwrap(); + assert_eq!(struct_.name().unwrap().text(), "Baz"); + } + _ => panic!(), + } + i += 1; + } + + assert_eq!(i, 2); + } + #[test] #[wasm_bindgen_test] fn func() { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 71e745b4f7..f6c94690a6 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -16,9 +16,10 @@ use super::{ define_scope! { #[doc(hidden)] - pub ItemListScope, + pub ItemListScope {inside_mod: bool}, ItemList, Override( + ModKw, FnKw, StructKw, ContractKw, @@ -39,9 +40,19 @@ impl super::Parse for ItemListScope { fn parse(&mut self, parser: &mut Parser) { use crate::SyntaxKind::*; + if self.inside_mod { + parser.bump_expected(LBrace); + } + loop { parser.set_newline_as_trivia(true); + if self.inside_mod && parser.bump_if(RBrace) { + break; + } if parser.current_kind().is_none() { + if self.inside_mod { + parser.error("expected `}` to close the module"); + } break; } @@ -68,6 +79,9 @@ impl super::Parse for ItemListScope { } match parser.current_kind() { + Some(ModKw) => { + parser.parse(ModScope::default(), checkpoint); + } Some(FnKw) => { parser.parse(FnScope::default(), checkpoint); } @@ -157,6 +171,28 @@ impl ModifierKind { } } +define_scope! { ModScope, Mod, Inheritance } +impl super::Parse for ModScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::ModKw); + parser.with_next_expected_tokens( + |parser| { + parser.bump_or_recover( + SyntaxKind::Ident, + "expected identifier for the module name", + None, + ) + }, + &[SyntaxKind::LBrace], + ); + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(ItemListScope::new(true), None); + } else { + parser.error_and_recover("expected contract field definition", None); + } + } +} + define_scope! { ContractScope, Contract, Inheritance } impl super::Parse for ContractScope { fn parse(&mut self, parser: &mut Parser) { @@ -166,7 +202,7 @@ impl super::Parse for ContractScope { |parser| { parser.bump_or_recover( SyntaxKind::Ident, - "expected identifier for the struct name", + "expected identifier for the contract name", None, ) }, diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 0917b15b41..3605ab48a5 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -152,6 +152,9 @@ pub enum SyntaxKind { /// `fn` #[token("fn")] FnKw, + /// `mod` + #[token("mod")] + ModKw, /// `const` #[token("const")] ConstKw, @@ -329,6 +332,8 @@ pub enum SyntaxKind { MatchArmList, // Items. These are non-leaf nodes. + /// `mod s { .. }` + Mod, /// `fn foo(x: i32) -> i32 { .. }` Fn, /// `struct Foo { .. }` diff --git a/crates/parser2/test_files/syntax_node/items/mod.fe b/crates/parser2/test_files/syntax_node/items/mod.fe new file mode 100644 index 0000000000..5b810b9b8b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/mod.fe @@ -0,0 +1,11 @@ +pub mod foo { + fn foo_foo(bar: i32, mut baz: u256) -> i32 { + 1 + } + + pub struct Foo {} +} + +pub mod bar { + pub struct Bar {} +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/mod.snap b/crates/parser2/test_files/syntax_node/items/mod.snap new file mode 100644 index 0000000000..4d3981a5fb --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/mod.snap @@ -0,0 +1,109 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/mod.fe +--- +Root@0..146 + ItemList@0..146 + Mod@0..107 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ModKw@4..7 "mod" + WhiteSpace@7..8 " " + Ident@8..11 "foo" + WhiteSpace@11..12 " " + ItemList@12..107 + LBrace@12..13 "{" + Newline@13..14 "\n" + WhiteSpace@14..18 " " + Fn@18..78 + FnKw@18..20 "fn" + WhiteSpace@20..21 " " + Ident@21..28 "foo_foo" + FnParamList@28..53 + LParen@28..29 "(" + FnParam@29..37 + Ident@29..32 "bar" + Colon@32..33 ":" + WhiteSpace@33..34 " " + PathType@34..37 + Path@34..37 + PathSegment@34..37 + Ident@34..37 "i32" + Comma@37..38 "," + WhiteSpace@38..39 " " + FnParam@39..52 + MutKw@39..42 "mut" + WhiteSpace@42..43 " " + Ident@43..46 "baz" + Colon@46..47 ":" + WhiteSpace@47..48 " " + PathType@48..52 + Path@48..52 + PathSegment@48..52 + Ident@48..52 "u256" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Arrow@54..56 "->" + WhiteSpace@56..57 " " + PathType@57..60 + Path@57..60 + PathSegment@57..60 + Ident@57..60 "i32" + WhiteSpace@60..61 " " + BlockExpr@61..78 + LBrace@61..62 "{" + Newline@62..63 "\n" + WhiteSpace@63..71 " " + ExprStmt@71..72 + LitExpr@71..72 + Lit@71..72 + Int@71..72 "1" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + RBrace@77..78 "}" + Newline@78..79 "\n" + WhiteSpace@79..83 " " + Newline@83..84 "\n" + WhiteSpace@84..88 " " + Struct@88..105 + ItemModifier@88..91 + PubKw@88..91 "pub" + WhiteSpace@91..92 " " + StructKw@92..98 "struct" + WhiteSpace@98..99 " " + Ident@99..102 "Foo" + WhiteSpace@102..103 " " + RecordFieldDefList@103..105 + LBrace@103..104 "{" + RBrace@104..105 "}" + Newline@105..106 "\n" + RBrace@106..107 "}" + Newline@107..109 "\n\n" + Mod@109..146 + ItemModifier@109..112 + PubKw@109..112 "pub" + WhiteSpace@112..113 " " + ModKw@113..116 "mod" + WhiteSpace@116..117 " " + Ident@117..120 "bar" + WhiteSpace@120..121 " " + ItemList@121..146 + LBrace@121..122 "{" + Newline@122..123 "\n" + WhiteSpace@123..127 " " + Struct@127..144 + ItemModifier@127..130 + PubKw@127..130 "pub" + WhiteSpace@130..131 " " + StructKw@131..137 "struct" + WhiteSpace@137..138 " " + Ident@138..141 "Bar" + WhiteSpace@141..142 " " + RecordFieldDefList@142..144 + LBrace@142..143 "{" + RBrace@143..144 "}" + Newline@144..145 "\n" + RBrace@145..146 "}" + From d59cf0066443ce6726df71b1b36cf44abfe60261 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 21:05:14 +0100 Subject: [PATCH 103/678] Add `ExternFn` item --- crates/hir/src/hir_def/item.rs | 36 ++++++++++++++-------------------- crates/hir/src/lib.rs | 2 +- crates/hir/src/lower/item.rs | 21 ++++++++++++++------ 3 files changed, 31 insertions(+), 28 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index c6ddc03a3c..67c87e89ff 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -16,7 +16,6 @@ pub struct Fn { id: TrackedItemId, pub name: MaybeInvalid, - pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, @@ -28,12 +27,25 @@ pub struct Fn { } #[salsa::tracked] -pub struct Struct { +pub struct ExternFn { #[id] id: TrackedItemId, pub name: MaybeInvalid, + pub attributes: AttrListId, + pub params: MaybeInvalid, + pub ret_ty: Option, + pub modifier: ItemModifier, + + pub(crate) origin: HirOrigin, +} + +#[salsa::tracked] +pub struct Struct { + #[id] + id: TrackedItemId, + pub name: MaybeInvalid, pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -49,7 +61,6 @@ pub struct Contract { id: TrackedItemId, pub name: MaybeInvalid, - pub attributes: AttrListId, pub is_pub: bool, pub fields: RecordFieldListId, @@ -63,7 +74,6 @@ pub struct Enum { id: TrackedItemId, pub name: MaybeInvalid, - pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -79,7 +89,6 @@ pub struct TypeAlias { id: TrackedItemId, pub name: MaybeInvalid, - pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -95,11 +104,9 @@ pub struct Impl { id: TrackedItemId, pub ty: super::MaybeInvalid, - pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub(crate) origin: HirOrigin, } @@ -114,7 +121,6 @@ pub struct Trait { pub is_pub: bool, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub(crate) origin: HirOrigin, } @@ -125,11 +131,9 @@ pub struct ImplTrait { pub trait_ref: MaybeInvalid, pub ty: MaybeInvalid, - pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub(crate) origin: HirOrigin, } @@ -139,7 +143,6 @@ pub struct Const { id: TrackedItemId, pub name: MaybeInvalid, - pub(crate) origin: HirOrigin, } @@ -149,21 +152,13 @@ pub struct Use { id: TrackedItemId, pub tree: MaybeInvalid, - pub(crate) origin: HirOrigin, } -#[salsa::tracked] -pub struct Extern { - #[id] - id: TrackedItemId, - - pub(crate) origin: HirOrigin, -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] pub enum ItemKind { Fn(Fn), + ExternFn(ExternFn), Struct(Struct), Contract(Contract), Enum(Enum), @@ -173,7 +168,6 @@ pub enum ItemKind { ImplTrait(ImplTrait), Const(Const), Use(Use), - Extern(Extern), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 3c6908a2fd..3321623e3a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -10,6 +10,7 @@ pub struct Jar( input::File, // Tracked Hir items. hir_def::Fn, + hir_def::ExternFn, hir_def::Struct, hir_def::Contract, hir_def::Enum, @@ -19,7 +20,6 @@ pub struct Jar( hir_def::ImplTrait, hir_def::Const, hir_def::Use, - hir_def::Extern, // Interned structs. hir_def::Body, hir_def::IdentId, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index f401b586f8..71c7cd7d2d 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -21,7 +21,7 @@ impl Fn { let id = TrackedItemId::Fn(name).join_opt(parent_id); let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_paramas = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); + let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); let params = ast .params() @@ -36,7 +36,7 @@ impl Fn { id, name, attributes, - generic_paramas, + generic_params, where_clause, params, ret_ty, @@ -265,17 +265,26 @@ impl Use { } } -impl Extern { +impl ExternFn { pub(crate) fn from_ast( db: &dyn HirDb, file: File, parent: Option, - ast: ast::Extern, + ast: ast::Fn, ) -> Self { - let origin = HirOrigin::raw(file, &ast); + let name = IdentId::maybe_from_token(db, ast.name()); let id = TrackedItemId::Extern.join_opt(parent); - Self::new(db, id, origin) + let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let params = ast + .params() + .map(|params| FnParamListId::from_ast(db, file, params)) + .into(); + let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, file, ty)); + let modifier = ItemModifier::from_ast(db, ast.modifier()); + let origin = HirOrigin::raw(file, &ast); + + Self::new(db, id, name, attributes, params, ret_ty, modifier, origin) } } From abf41db7b45eee24ce6734a50f7e102ff5133f7a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 21:11:28 +0100 Subject: [PATCH 104/678] Include body in items --- crates/hir/src/hir_def/item.rs | 5 ++++- crates/hir/src/lower/item.rs | 17 +++++++++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 67c87e89ff..f6d4cc9cb0 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -7,7 +7,8 @@ use fe_parser2::ast; use crate::{hir_def::TraitRef, span::HirOrigin}; use super::{ - AttrListId, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, WhereClauseId, + AttrListId, Body, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, + WhereClauseId, }; #[salsa::tracked] @@ -22,6 +23,7 @@ pub struct Fn { pub params: MaybeInvalid, pub ret_ty: Option, pub modifier: ItemModifier, + pub body: Option, pub(crate) origin: HirOrigin, } @@ -143,6 +145,7 @@ pub struct Const { id: TrackedItemId, pub name: MaybeInvalid, + pub body: MaybeInvalid, pub(crate) origin: HirOrigin, } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 71c7cd7d2d..2e1128ee2e 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -2,7 +2,7 @@ use fe_parser2::ast::{self, prelude::*}; use crate::{ hir_def::{ - item::*, AttrListId, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, + item::*, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, UseTreeId, WhereClauseId, }, input::File, @@ -29,6 +29,14 @@ impl Fn { .into(); let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, file, ty)); let modifier = ItemModifier::from_ast(db, ast.modifier()); + let body = ast.body().map(|body| { + Body::item_body_from_ast( + db, + file, + id.clone(), + ast::Expr::cast(body.syntax().clone()).unwrap(), + ) + }); let origin = HirOrigin::raw(file, &ast); Self::new( @@ -41,6 +49,7 @@ impl Fn { params, ret_ty, modifier, + body, origin, ) } @@ -244,9 +253,13 @@ impl Const { ) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let id = TrackedItemId::Const(name).join_opt(parent_id); + let body = ast + .value() + .map(|ast| Body::item_body_from_ast(db, file, id.clone(), ast)) + .into(); let origin = HirOrigin::raw(file, &ast); - Self::new(db, id, name, origin) + Self::new(db, id, name, body, origin) } } From c57a3b3f4fbad87d42873a427a15292e4759f446 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 21:26:06 +0100 Subject: [PATCH 105/678] Separate `InputDb` from `HirDb` --- Cargo.lock | 14 +++++-- crates/common2/Cargo.toml | 16 ++++++++ crates/common2/src/input.rs | 65 ++++++++++++++++++++++++++++++++ crates/common2/src/lib.rs | 9 +++++ crates/hir/Cargo.toml | 7 ++-- crates/hir/src/hir_def/body.rs | 2 +- crates/hir/src/hir_def/item.rs | 2 +- crates/hir/src/input.rs | 64 ------------------------------- crates/hir/src/lib.rs | 3 -- crates/hir/src/lower/attr.rs | 2 +- crates/hir/src/lower/body.rs | 14 +++---- crates/hir/src/lower/expr.rs | 2 +- crates/hir/src/lower/item.rs | 38 +++++++++---------- crates/hir/src/lower/mod.rs | 2 +- crates/hir/src/lower/params.rs | 40 +++++++++++--------- crates/hir/src/lower/pat.rs | 2 +- crates/hir/src/lower/path.rs | 2 +- crates/hir/src/lower/stmt.rs | 2 +- crates/hir/src/lower/types.rs | 12 +++--- crates/hir/src/lower/use_tree.rs | 2 +- crates/hir/src/span/mod.rs | 12 +++--- 21 files changed, 173 insertions(+), 139 deletions(-) create mode 100644 crates/common2/Cargo.toml create mode 100644 crates/common2/src/input.rs create mode 100644 crates/common2/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 99e7cec344..d42060d1f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -726,6 +726,16 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "fe-common2" +version = "0.20.0-alpha" +dependencies = [ + "camino", + "salsa-2022", + "semver 1.0.17", + "smol_str", +] + [[package]] name = "fe-compiler-test-utils" version = "0.20.0-alpha" @@ -795,15 +805,13 @@ dependencies = [ name = "fe-hir" version = "0.20.0-alpha" dependencies = [ - "camino", "cranelift-entity", "derive_more", + "fe-common2", "fe-parser2", "num-bigint", "num-traits", "salsa-2022", - "semver 1.0.17", - "smol_str", "tracing", ] diff --git a/crates/common2/Cargo.toml b/crates/common2/Cargo.toml new file mode 100644 index 0000000000..7b9bbb6e17 --- /dev/null +++ b/crates/common2/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "fe-common2" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR definition and lowering for Fe lang." + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +semver = "1.0.17" +camino = "1.1.4" +smol_str = "0.1.24" +salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs new file mode 100644 index 0000000000..529ee51a98 --- /dev/null +++ b/crates/common2/src/input.rs @@ -0,0 +1,65 @@ +use camino::Utf8PathBuf; +use smol_str::SmolStr; + +/// An ingot is a collection of files which are compiled together. +/// Ingot can depend on other ingots. +#[salsa::input] +pub struct InputIngot { + /// A path to the ingot root directory. + #[return_ref] + pub path: Utf8PathBuf, + + /// Specifies the kind of the ingot. + pub kind: IngotKind, + + /// A version of the ingot. + #[return_ref] + pub version: Version, + + /// A list of ingots which the current ingot depends on. + #[return_ref] + pub dependency: Vec, + + /// A list of files which the current ingot contains. + #[return_ref] + pub files: Vec, +} + +#[salsa::input(constructor = __new_priv)] +pub struct InputFile { + /// A ingot id which the file belongs to. + pub ingot: InputIngot, + + /// A path to the file from the ingot root directory. + #[return_ref] + pub path: Utf8PathBuf, + + #[return_ref] + pub text: String, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum IngotKind { + /// A standalone ingot is a dummy ingot when the compiler is invoked + /// directly on a file. + StandAlone, + + /// A local ingot which is the current ingot being compiled. + Local, + + /// An external ingot which is depended on by the current ingot. + External, + + /// A std ingot. + Std, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct IngotDependency { + /// The ingot may have a alias name from the original ingot name. + pub name: SmolStr, + /// An ingot which the current ingot depends on. + pub ingot: InputIngot, +} + +pub type Version = semver::Version; diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs new file mode 100644 index 0000000000..b0d181b16f --- /dev/null +++ b/crates/common2/src/lib.rs @@ -0,0 +1,9 @@ +pub mod input; + +pub use input::{InputFile, InputIngot}; + +#[salsa::jar(db = InputDb)] +pub struct Jar(InputIngot, InputFile); + +pub trait InputDb: salsa::DbWithJar {} +impl InputDb for DB where DB: ?Sized + salsa::DbWithJar {} diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 705b714957..5fa4d8d448 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -12,11 +12,10 @@ tracing = "0.1" # We may need to fix this to a specific version, # but I want to keep up with the latest version until the new Fe implemeentation is merged into the master. salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } -fe-parser2 = { path = "../parser2" } derive_more = "0.99" cranelift-entity = "0.91" num-bigint = "0.4.3" num-traits = "0.2.15" -semver = "1.0.17" -camino = "1.1.4" -smol_str = "0.1.24" + +parser = { path = "../parser2", package = "fe-parser2" } +common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 3213bffcf3..7cdd7f0207 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -1,5 +1,5 @@ use cranelift_entity::{PrimaryMap, SecondaryMap}; -use fe_parser2::ast::{self}; +use parser::ast::{self}; use crate::span::HirOrigin; diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index f6d4cc9cb0..0827efb2d9 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -2,7 +2,7 @@ // that may take many arguments depending on the number of fields in the struct. #![allow(clippy::too_many_arguments)] -use fe_parser2::ast; +use parser::ast; use crate::{hir_def::TraitRef, span::HirOrigin}; diff --git a/crates/hir/src/input.rs b/crates/hir/src/input.rs index 79a7a64c47..8b13789179 100644 --- a/crates/hir/src/input.rs +++ b/crates/hir/src/input.rs @@ -1,65 +1 @@ -use camino::Utf8PathBuf; -use smol_str::SmolStr; -/// An ingot is a collection of files which are compiled together. -/// Ingot can depend on other ingots. -#[salsa::input] -pub struct Ingot { - /// A path to the ingot root directory. - #[return_ref] - pub path: Utf8PathBuf, - - /// Specifies the kind of the ingot. - pub kind: IngotKind, - - /// A version of the ingot. - #[return_ref] - pub version: Version, - - /// A list of ingots which the current ingot depends on. - #[return_ref] - pub dependency: Vec, - - /// A list of files which the current ingot contains. - #[return_ref] - pub files: Vec, -} - -#[salsa::input(constructor = __new_priv)] -pub struct File { - /// A ingot id which the file belongs to. - pub ingot: Ingot, - - /// A path to the file from the ingot root directory. - #[return_ref] - pub path: Utf8PathBuf, - - #[return_ref] - pub text: String, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum IngotKind { - /// A standalone ingot is a dummy ingot when the compiler is invoked - /// directly on a file. - StandAlone, - - /// A local ingot which is the current ingot being compiled. - Local, - - /// An external ingot which is depended on by the current ingot. - External, - - /// A std ingot. - Std, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct IngotDependency { - /// The ingot may have a alias name from the original ingot name. - pub name: SmolStr, - /// An ingot which the current ingot depends on. - pub ingot: Ingot, -} - -pub type Version = semver::Version; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 3321623e3a..05ffb54401 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -5,9 +5,6 @@ pub mod span; #[salsa::jar(db = HirDb)] pub struct Jar( - /// Inputs - input::Ingot, - input::File, // Tracked Hir items. hir_def::Fn, hir_def::ExternFn, diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 0be9d869cb..4001cc227e 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -1,5 +1,5 @@ use crate::hir_def::{attr::*, IdentId, StringId}; -use fe_parser2::ast; +use parser::ast; use crate::HirDb; diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index ecd7050bf4..0538d3e9f7 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -1,11 +1,11 @@ -use fe_parser2::ast; +use common::InputFile; +use parser::ast; use crate::{ hir_def::{ Body, BodyNodeMap, BodySourceMap, Expr, ExprId, MaybeInvalid, Pat, PatId, Stmt, StmtId, TrackedBodyId, TrackedItemId, }, - input::File, span::{HirOrigin, HirOriginKind}, HirDb, }; @@ -13,7 +13,7 @@ use crate::{ impl Body { pub(crate) fn item_body_from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: TrackedItemId, ast: ast::Expr, ) -> Self { @@ -25,7 +25,7 @@ impl Body { pub(crate) fn nested_body_from_ast( db: &dyn HirDb, - file: File, + file: InputFile, bid: TrackedBodyId, ast: ast::Expr, ) -> Self { @@ -35,7 +35,7 @@ impl Body { ctxt.build(HirOrigin::raw(file, &ast)) } - pub(crate) fn nameless_body_from_ast(db: &dyn HirDb, file: File, ast: ast::Expr) -> Self { + pub(crate) fn nameless_body_from_ast(db: &dyn HirDb, file: InputFile, ast: ast::Expr) -> Self { let bid = TrackedBodyId::NamelessBody; let mut ctxt = BodyCtxt::new(db, file, bid); Expr::push_to_body(&mut ctxt, ast.clone()); @@ -48,7 +48,7 @@ pub(super) struct BodyCtxt<'db> { pub(super) exprs: BodyNodeMap>, pub(super) pats: BodyNodeMap>, pub(super) db: &'db dyn HirDb, - pub(super) file: File, + pub(super) file: InputFile, pub(super) bid: TrackedBodyId, stmt_source_map: BodySourceMap, @@ -92,7 +92,7 @@ impl<'db> BodyCtxt<'db> { pat_id } - fn new(db: &'db dyn HirDb, file: File, bid: TrackedBodyId) -> Self { + fn new(db: &'db dyn HirDb, file: InputFile, bid: TrackedBodyId) -> Self { Self { stmts: BodyNodeMap::new(), exprs: BodyNodeMap::new(), diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 563f8f764f..ac73838544 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -1,4 +1,4 @@ -use fe_parser2::ast::{self, prelude::*}; +use parser::ast::{self, prelude::*}; use crate::{ hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, MaybeInvalid, Pat, PathId, Stmt}, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 2e1128ee2e..f2c766cfab 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -1,11 +1,11 @@ -use fe_parser2::ast::{self, prelude::*}; +use common::InputFile; +use parser::ast::{self, prelude::*}; use crate::{ hir_def::{ item::*, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, UseTreeId, WhereClauseId, }, - input::File, span::HirOrigin, HirDb, }; @@ -13,7 +13,7 @@ use crate::{ impl Fn { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Fn, ) -> Self { @@ -58,7 +58,7 @@ impl Fn { impl Struct { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Struct, ) -> Self { @@ -89,7 +89,7 @@ impl Struct { impl Contract { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Contract, ) -> Self { @@ -108,7 +108,7 @@ impl Contract { impl Enum { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Enum, ) -> Self { @@ -139,7 +139,7 @@ impl Enum { impl TypeAlias { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::TypeAlias, ) -> Self { @@ -170,7 +170,7 @@ impl TypeAlias { impl Impl { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Impl, ) -> Self { @@ -189,7 +189,7 @@ impl Impl { impl Trait { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Trait, ) -> Self { @@ -218,7 +218,7 @@ impl Trait { impl ImplTrait { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::ImplTrait, ) -> Self { @@ -247,7 +247,7 @@ impl ImplTrait { impl Const { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Const, ) -> Self { @@ -266,7 +266,7 @@ impl Const { impl Use { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent_id: Option, ast: ast::Use, ) -> Self { @@ -281,7 +281,7 @@ impl Use { impl ExternFn { pub(crate) fn from_ast( db: &dyn HirDb, - file: File, + file: InputFile, parent: Option, ast: ast::Fn, ) -> Self { @@ -317,7 +317,7 @@ impl ItemModifier { } impl RecordFieldListId { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::RecordFieldDefList) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::RecordFieldDefList) -> Self { let fields = ast .into_iter() .map(|field| RecordField::from_ast(db, file, field)) @@ -325,14 +325,14 @@ impl RecordFieldListId { Self::new(db, fields) } - fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + fn from_ast_opt(db: &dyn HirDb, file: InputFile, ast: Option) -> Self { ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or(Self::new(db, Vec::new())) } } impl RecordField { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::RecordFieldDef) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::RecordFieldDef) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let is_pub = ast.pub_kw().is_some(); @@ -342,7 +342,7 @@ impl RecordField { } impl EnumVariantListId { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::EnumVariantDefList) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::EnumVariantDefList) -> Self { let variants = ast .into_iter() .map(|variant| EnumVariant::from_ast(db, file, variant)) @@ -350,14 +350,14 @@ impl EnumVariantListId { Self::new(db, variants) } - fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + fn from_ast_opt(db: &dyn HirDb, file: InputFile, ast: Option) -> Self { ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or(Self::new(db, Vec::new())) } } impl EnumVariant { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::EnumVariantDef) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::EnumVariantDef) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let ty = ast.ty().map(|ty| TypeId::from_ast(db, file, ty)); diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index a6fe782c64..fa3ba961a2 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,6 +1,6 @@ -use fe_parser2::{ast, SyntaxToken}; use num_bigint::BigUint; use num_traits::Num; +use parser::{ast, SyntaxToken}; use crate::{ hir_def::{IdentId, IntegerId, LitKind, MaybeInvalid, StringId}, diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 948b2ab482..4c4b878c92 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,13 +1,13 @@ -use fe_parser2::ast::{self}; +use common::InputFile; +use parser::ast::{self}; use crate::{ hir_def::{params::*, Body, IdentId, PathId, TypeId}, - input::File, HirDb, }; impl GenericArgListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericArgList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericArgList) -> Self { let args = ast .into_iter() .map(|arg| GenericArg::from_ast(db, file, arg)) @@ -17,7 +17,7 @@ impl GenericArgListId { pub(crate) fn from_ast_opt( db: &dyn HirDb, - file: File, + file: InputFile, ast: Option, ) -> Self { ast.map(|ast| Self::from_ast(db, file, ast)) @@ -26,7 +26,7 @@ impl GenericArgListId { } impl GenericParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericParamList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericParamList) -> Self { let params = ast .into_iter() .map(|param| GenericParam::from_ast(db, file, param)) @@ -36,7 +36,7 @@ impl GenericParamListId { pub(crate) fn from_ast_opt( db: &dyn HirDb, - file: File, + file: InputFile, ast: Option, ) -> Self { ast.map(|ast| Self::from_ast(db, file, ast)) @@ -45,7 +45,7 @@ impl GenericParamListId { } impl FnParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::FnParamList) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::FnParamList) -> Self { let params = ast .into_iter() .map(|param| FnParam::from_ast(db, file, param)) @@ -55,7 +55,7 @@ impl FnParamListId { } impl WhereClauseId { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::WhereClause) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::WhereClause) -> Self { let predicates = ast .into_iter() .map(|pred| WherePredicate::from_ast(db, file, pred)) @@ -63,14 +63,18 @@ impl WhereClauseId { Self::new(db, predicates) } - pub(crate) fn from_ast_opt(db: &dyn HirDb, file: File, ast: Option) -> Self { + pub(crate) fn from_ast_opt( + db: &dyn HirDb, + file: InputFile, + ast: Option, + ) -> Self { ast.map(|ast| Self::from_ast(db, file, ast)) .unwrap_or_else(|| Self::new(db, Vec::new())) } } impl TypeGenericParam { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeGenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeGenericParam) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let bounds = ast .bounds() @@ -87,7 +91,7 @@ impl TypeGenericParam { } impl ConstGenericParam { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::ConstGenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::ConstGenericParam) -> Self { let name = IdentId::maybe_from_token(db, ast.name()); let ty = TypeId::maybe_from_ast(db, file, ast.ty()); Self { name, ty } @@ -95,7 +99,7 @@ impl ConstGenericParam { } impl GenericArg { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericArg) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericArg) -> Self { match ast.kind() { ast::GenericArgKind::Type(type_param) => { TypeGenericArg::from_ast(db, file, type_param).into() @@ -108,14 +112,14 @@ impl GenericArg { } impl TypeGenericArg { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeGenericArg) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeGenericArg) -> Self { let ty = TypeId::maybe_from_ast(db, file, ast.ty()); Self { ty } } } impl ConstGenericArg { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::ConstGenericArg) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { Some(Body::nameless_body_from_ast(db, file, expr)) } else { @@ -128,7 +132,7 @@ impl ConstGenericArg { } impl GenericParam { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::GenericParam) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericParam) -> Self { match ast.kind() { ast::GenericParamKind::Type(type_param) => { TypeGenericParam::from_ast(db, file, type_param).into() @@ -141,7 +145,7 @@ impl GenericParam { } impl FnParam { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::FnParam) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::FnParam) -> Self { let is_mut = ast.mut_token().is_some(); let label = ast.label().map(|ast| FnParamLabel::from_ast(db, ast)); let name = ast.name().map(|ast| FnParamName::from_ast(db, ast)).into(); @@ -157,7 +161,7 @@ impl FnParam { } impl WherePredicate { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::WherePredicate) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::WherePredicate) -> Self { let ty = TypeId::maybe_from_ast(db, file, ast.ty()); let bounds = ast .bounds() @@ -173,7 +177,7 @@ impl WherePredicate { } impl TypeBound { - fn from_ast(db: &dyn HirDb, file: File, ast: ast::TypeBound) -> Self { + fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeBound) -> Self { let path = ast.path().map(|ast| PathId::from_ast(db, ast)).into(); let generic_args = ast .generic_args() diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 761c86f338..11d48164d1 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -1,4 +1,4 @@ -use fe_parser2::ast; +use parser::ast; use crate::{ hir_def::{pat::*, IdentId, LitKind, PathId}, diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 92236b7d54..3502258225 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,4 +1,4 @@ -use fe_parser2::{ast, SyntaxToken}; +use parser::{ast, SyntaxToken}; use crate::{ hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}, diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index bf4a3ecd76..d0d884fa05 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -1,4 +1,4 @@ -use fe_parser2::ast::{self, prelude::*}; +use parser::ast::{self, prelude::*}; use crate::{ hir_def::{stmt::*, ArithBinOp, Expr, Pat, PathId, TypeId}, diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index a16a242001..99e12e3be0 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,13 +1,13 @@ -use fe_parser2::ast::{self, prelude::*}; +use common::InputFile; +use parser::ast::{self, prelude::*}; use crate::{ hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}, - input::File, HirDb, }; impl TypeId { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::Type) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::Type) -> Self { let kind = match ast.kind() { ast::TypeKind::Ptr(ty) => { let inner = Self::maybe_from_ast(db, file, ty.inner()); @@ -45,7 +45,7 @@ impl TypeId { pub(crate) fn maybe_from_ast( db: &dyn HirDb, - file: File, + file: InputFile, ast: Option, ) -> MaybeInvalid { ast.map(|ast| Self::from_ast(db, file, ast)).into() @@ -53,7 +53,7 @@ impl TypeId { } impl TraitRef { - pub(crate) fn from_ast(db: &dyn HirDb, file: File, ast: ast::PathType) -> Self { + pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::PathType) -> Self { let path = PathId::maybe_from_ast(db, ast.path()).into(); let generic_args = GenericArgListId::from_ast_opt(db, file, ast.generic_args()); Self { path, generic_args } @@ -61,7 +61,7 @@ impl TraitRef { pub(crate) fn maybe_from_ast( db: &dyn HirDb, - file: File, + file: InputFile, ast: Option, ) -> MaybeInvalid { ast.map(|ast| Self::from_ast(db, file, ast)).into() diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index ca43f14e41..6b8602ce66 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -1,4 +1,4 @@ -use fe_parser2::ast; +use parser::ast; use crate::{ hir_def::{use_tree::*, IdentId, MaybeInvalid}, diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 142d1f6b4b..e8b5e58ecb 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,16 +1,16 @@ -use fe_parser2::{ +use parser::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, TextRange, }; -use crate::input::File; +use common::InputFile; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin where T: AstNode, { - pub file: Option, + pub file: Option, pub kind: HirOriginKind, } @@ -18,21 +18,21 @@ impl HirOrigin where T: AstNode, { - pub(crate) fn new(file: File, origin: HirOriginKind) -> Self { + pub(crate) fn new(file: InputFile, origin: HirOriginKind) -> Self { HirOrigin { file: Some(file), kind: origin, } } - pub(crate) fn raw(file: File, ast: &T) -> Self { + pub(crate) fn raw(file: InputFile, ast: &T) -> Self { HirOrigin { file: Some(file), kind: HirOriginKind::raw(ast), } } - pub(crate) fn none(file: File) -> Self { + pub(crate) fn none(file: InputFile) -> Self { HirOrigin { file: Some(file), kind: HirOriginKind::None, From 73621d84179a9a91d43aea08f90576a313504df7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 22:52:40 +0100 Subject: [PATCH 106/678] Add HIR `Mod` item --- crates/hir/src/hir_def/item.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 0827efb2d9..f9946c34ca 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -11,6 +11,18 @@ use super::{ WhereClauseId, }; +#[salsa::tracked] +pub struct Mod { + #[id] + id: TrackedItemId, + + pub name: MaybeInvalid, + pub attributes: AttrListId, + pub is_pub: bool, + + pub(crate) origin: HirOrigin, +} + #[salsa::tracked] pub struct Fn { #[id] @@ -158,8 +170,9 @@ pub struct Use { pub(crate) origin: HirOrigin, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, PartialOrd, Ord)] pub enum ItemKind { + Module(Mod), Fn(Fn), ExternFn(ExternFn), Struct(Struct), @@ -224,6 +237,7 @@ pub type ExternItemListId = ImplItemListId; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedItemId { + Mod(MaybeInvalid), Fn(MaybeInvalid), Struct(MaybeInvalid), Contract(MaybeInvalid), From bbafaf9db6619b8bd925cfd790a3a22bedb5d717 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 22 Mar 2023 22:53:11 +0100 Subject: [PATCH 107/678] Add `IngotModuleTree` and `ModuleItemTree` --- crates/hir/src/hir_def/item_tree.rs | 25 +++++++++++++++ crates/hir/src/hir_def/mod.rs | 6 ++++ crates/hir/src/hir_def/module_tree.rs | 44 +++++++++++++++++++++++++++ crates/hir/src/lib.rs | 9 ++++-- 4 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 crates/hir/src/hir_def/item_tree.rs create mode 100644 crates/hir/src/hir_def/module_tree.rs diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs new file mode 100644 index 0000000000..77cf2a9f85 --- /dev/null +++ b/crates/hir/src/hir_def/item_tree.rs @@ -0,0 +1,25 @@ +use std::collections::BTreeMap; + +use common::{InputFile, InputIngot}; + +use crate::HirDb; + +use super::{IdentId, ItemKind, MaybeInvalid, Mod, ToplevelModuleId}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ModuleItemTree { + pub(crate) file: InputFile, + pub(crate) root_mod: Mod, + pub(crate) item_tree: BTreeMap, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct ItemTreeNode { + pub(crate) parent: Option, + pub(crate) children: BTreeMap, Vec>, +} + +#[salsa::tracked(return_ref)] +pub(crate) fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ModuleItemTree { + todo!() +} diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index fe417486b6..6dfcffa01c 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -9,6 +9,9 @@ pub mod stmt; pub mod types; pub mod use_tree; +pub(crate) mod item_tree; +pub(crate) mod module_tree; + pub use attr::*; pub use body::*; pub use expr::*; @@ -21,6 +24,9 @@ pub use stmt::*; pub use types::*; pub use use_tree::*; +pub use item_tree::*; +pub use module_tree::*; + use crate::HirDb; #[salsa::interned] diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs new file mode 100644 index 0000000000..4a5c328e73 --- /dev/null +++ b/crates/hir/src/hir_def/module_tree.rs @@ -0,0 +1,44 @@ +use std::collections::BTreeMap; + +use common::{InputFile, InputIngot}; +use cranelift_entity::{entity_impl, PrimaryMap}; + +use crate::HirDb; + +use super::IdentId; + +/// This tree represents the structure of an ingot. +/// +/// This is used in later name resolution phase. +/// The tree is file contents agnostic, i.e., **only** depends on project +/// structure and crate dependency. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct IngotModuleTree { + pub(crate) root: ToplevelModuleId, + pub(crate) module_tree: PrimaryMap, + pub(crate) file_map: BTreeMap, + + pub(crate) ingot: InputIngot, +} + +/// A top level module that is one-to-one mapped to a file. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct ToplevelModule { + /// A name of the module. + pub(crate) name: IdentId, + /// A file that this module is defined by. + pub(crate) file: InputFile, + /// A parent top level module. + pub(crate) parent: Option, + /// A list of child top level module. + pub(crate) children: BTreeMap>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(crate) struct ToplevelModuleId(u32); +entity_impl!(ToplevelModuleId); + +#[salsa::tracked(return_ref)] +pub fn ingot_module_tree(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { + todo!() +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 05ffb54401..3bb3b3e4f5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,3 +1,5 @@ +use common::InputDb; + pub mod hir_def; pub mod input; pub mod lower; @@ -6,6 +8,7 @@ pub mod span; #[salsa::jar(db = HirDb)] pub struct Jar( // Tracked Hir items. + hir_def::Mod, hir_def::Fn, hir_def::ExternFn, hir_def::Struct, @@ -33,7 +36,9 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, + hir_def::ingot_module_tree, + hir_def::module_item_tree, ); -pub trait HirDb: salsa::DbWithJar {} -impl HirDb for DB where DB: ?Sized + salsa::DbWithJar {} +pub trait HirDb: salsa::DbWithJar + InputDb {} +impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb {} From 54fe157c619834c6070cdc356ddfc7f513abd201 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 23 Mar 2023 00:08:42 +0100 Subject: [PATCH 108/678] Implement `IngotModuleTreeBuidler` --- Cargo.lock | 1 + crates/analyzer/src/db/queries/ingots.rs | 6 +- crates/common2/src/input.rs | 33 ++++- crates/common2/src/lib.rs | 4 + crates/hir/Cargo.toml | 1 + crates/hir/src/hir_def/module_tree.rs | 169 ++++++++++++++++++++++- crates/hir/src/lib.rs | 6 +- 7 files changed, 207 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d42060d1f3..ad279194b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -805,6 +805,7 @@ dependencies = [ name = "fe-hir" version = "0.20.0-alpha" dependencies = [ + "camino", "cranelift-entity", "derive_more", "fe-common2", diff --git a/crates/analyzer/src/db/queries/ingots.rs b/crates/analyzer/src/db/queries/ingots.rs index 231f9673fe..fb3673a9ac 100644 --- a/crates/analyzer/src/db/queries/ingots.rs +++ b/crates/analyzer/src/db/queries/ingots.rs @@ -1,5 +1,7 @@ -use crate::namespace::items::{IngotId, IngotMode, ModuleId, ModuleSource}; -use crate::AnalyzerDb; +use crate::{ + namespace::items::{IngotId, IngotMode, ModuleId, ModuleSource}, + AnalyzerDb, +}; use fe_common::files::{SourceFileId, Utf8Path, Utf8PathBuf}; use indexmap::IndexSet; use std::rc::Rc; diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs index 529ee51a98..bdf430260f 100644 --- a/crates/common2/src/input.rs +++ b/crates/common2/src/input.rs @@ -1,11 +1,15 @@ +use std::collections::BTreeSet; + use camino::Utf8PathBuf; use smol_str::SmolStr; +use crate::InputDb; + /// An ingot is a collection of files which are compiled together. /// Ingot can depend on other ingots. #[salsa::input] pub struct InputIngot { - /// A path to the ingot root directory. + /// An absolute path to the ingot root directory. #[return_ref] pub path: Utf8PathBuf, @@ -16,16 +20,18 @@ pub struct InputIngot { #[return_ref] pub version: Version, + pub root_file: InputFile, + /// A list of ingots which the current ingot depends on. #[return_ref] - pub dependency: Vec, + pub dependency: BTreeSet, /// A list of files which the current ingot contains. #[return_ref] - pub files: Vec, + pub files: BTreeSet, } -#[salsa::input(constructor = __new_priv)] +#[salsa::input] pub struct InputFile { /// A ingot id which the file belongs to. pub ingot: InputIngot, @@ -38,6 +44,12 @@ pub struct InputFile { pub text: String, } +impl InputFile { + pub fn abs_path(&self, db: &dyn InputDb) -> Utf8PathBuf { + self.ingot(db).path(db).join(&self.path(db)) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum IngotKind { /// A standalone ingot is a dummy ingot when the compiler is invoked @@ -50,7 +62,7 @@ pub enum IngotKind { /// An external ingot which is depended on by the current ingot. External, - /// A std ingot. + /// Standard library ingot. Std, } @@ -62,4 +74,15 @@ pub struct IngotDependency { pub ingot: InputIngot, } +impl PartialOrd for IngotDependency { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.ingot.cmp(&other.ingot)) + } +} +impl Ord for IngotDependency { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.ingot.cmp(&other.ingot) + } +} + pub type Version = semver::Version; diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs index b0d181b16f..f1c692a43a 100644 --- a/crates/common2/src/lib.rs +++ b/crates/common2/src/lib.rs @@ -7,3 +7,7 @@ pub struct Jar(InputIngot, InputFile); pub trait InputDb: salsa::DbWithJar {} impl InputDb for DB where DB: ?Sized + salsa::DbWithJar {} + +pub trait Upcast { + fn upcast(&self) -> &T; +} diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 5fa4d8d448..968216b9ec 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -16,6 +16,7 @@ derive_more = "0.99" cranelift-entity = "0.91" num-bigint = "0.4.3" num-traits = "0.2.15" +camino = "1.1.4" parser = { path = "../parser2", package = "fe-parser2" } common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 4a5c328e73..454d1b0a87 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -1,5 +1,9 @@ -use std::collections::BTreeMap; +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, +}; +use camino::{Utf8Path, Utf8PathBuf}; use common::{InputFile, InputIngot}; use cranelift_entity::{entity_impl, PrimaryMap}; @@ -8,10 +12,49 @@ use crate::HirDb; use super::IdentId; /// This tree represents the structure of an ingot. +/// Internal modules are not included in this tree, instead, they are included +/// in [`crate::item_tree::ModuleItemTree`]. /// /// This is used in later name resolution phase. /// The tree is file contents agnostic, i.e., **only** depends on project /// structure and crate dependency. +/// +/// +/// Example: +/// ``` +/// ingot/ +/// ├─ main.fe +/// ├─ mod1.fe +/// ├─ mod1/ +/// │ ├─ foo.fe +/// ├─ mod2.fe +/// ├─ mod2 +/// │ ├─ bar.fe +/// ├─ mod3 +/// │ ├─ baz.fe +/// ``` +/// The resulting tree would be like below. +/// ``` +/// +------+ +/// *---- | main |----* +/// | +------+ | +------+ +/// | | | baz | +/// | | +------+ +/// v v +/// +------+ +------+ +/// | mod2 | | mod1 | +/// +------+ +------+ +/// | | +/// | | +/// v v +/// +------+ +------+ +/// | bar | | foo | +/// +------+ +------+ +/// ``` +/// +/// **NOTE:** `mod3` is not included in the main tree because it doesn't have a corresponding file. +/// As a result, `baz` is represented as a "floating" node. +/// In this case, the tree is actually a forest. But we don't need to care about it. #[derive(Debug, Clone, PartialEq, Eq)] pub struct IngotModuleTree { pub(crate) root: ToplevelModuleId, @@ -28,17 +71,137 @@ pub(crate) struct ToplevelModule { pub(crate) name: IdentId, /// A file that this module is defined by. pub(crate) file: InputFile, - /// A parent top level module. + /// A parent of top level module. + /// This is `None` if 1. the module is a root module or 2. the module is a + /// "floating" module. pub(crate) parent: Option, /// A list of child top level module. pub(crate) children: BTreeMap>, } +impl ToplevelModule { + fn new(name: IdentId, file: InputFile) -> Self { + Self { + name, + file, + parent: None, + children: BTreeMap::new(), + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub(crate) struct ToplevelModuleId(u32); entity_impl!(ToplevelModuleId); +/// Returns a module tree of the given ingot. The resulted tree only includes +/// top level modules. This function only depends on an ingot structure and +/// external ingot dependency, and not depends on file contents. #[salsa::tracked(return_ref)] pub fn ingot_module_tree(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { - todo!() + IngotModuleTreeBuilder::new(db, ingot).build() +} + +struct IngotModuleTreeBuilder<'db> { + db: &'db dyn HirDb, + ingot: InputIngot, + module_tree: PrimaryMap, + file_map: BTreeMap, + path_map: BTreeMap<&'db Utf8Path, ToplevelModuleId>, +} + +impl<'db> IngotModuleTreeBuilder<'db> { + fn new(db: &'db dyn HirDb, ingot: InputIngot) -> Self { + Self { + db, + ingot, + module_tree: PrimaryMap::default(), + file_map: BTreeMap::default(), + path_map: BTreeMap::default(), + } + } + + fn build(mut self) -> IngotModuleTree { + self.set_modules(); + self.build_tree(); + + let root_file = self.ingot.root_file(self.db.upcast()); + let root = self.file_map[&root_file]; + IngotModuleTree { + root, + module_tree: self.module_tree, + file_map: self.file_map, + ingot: self.ingot, + } + } + + fn set_modules(&mut self) { + for &file in self.ingot.files(self.db.upcast()) { + let name = self.module_name(file); + + let module_id = self.module_tree.push(ToplevelModule::new(name, file)); + self.path_map.insert(file.path(self.db.upcast()), module_id); + self.file_map.insert(file, module_id); + } + } + + fn module_name(&self, file: InputFile) -> IdentId { + let path = file.path(self.db.upcast()); + let name = path.file_stem().unwrap(); + IdentId::new(self.db, name.to_string()) + } + + fn build_tree(&mut self) { + let root = self.ingot.root_file(self.db.upcast()); + + for &file in self.ingot.files(self.db.upcast()) { + // Ignore the root file because it has no parent. + if file == root { + continue; + } + + let file_path = file.path(self.db.upcast()); + let root_path = root.path(self.db.upcast()); + + // If the file is in the same directory as the root file, the file is a direct + // child of the root. + if file_path.parent() == root_path.parent() { + let root_mod = self.file_map[&root]; + let cur_mod = self.file_map[&file]; + self.add_branch(root_mod, cur_mod); + continue; + } + + assert!(file_path + .parent() + .unwrap() + .starts_with(root_path.parent().unwrap())); + + if let Some(parent_mod) = self.parent_module(file) { + let cur_mod = self.file_map[&file]; + self.add_branch(parent_mod, cur_mod); + } + } + } + + fn parent_module(&self, file: InputFile) -> Option { + let file_path = file.path(self.db.upcast()); + let file_dir = file_path.parent()?; + let parent_dir = file_dir.parent()?; + + let parent_mod_stem = file_dir.into_iter().next_back()?; + let parent_mod_path = parent_dir.join(parent_mod_stem).with_extension("fe"); + self.path_map.get(parent_mod_path.as_path()).copied() + } + + fn add_branch(&mut self, parent: ToplevelModuleId, child: ToplevelModuleId) { + let child_name = self.module_tree[child].name; + self.module_tree[parent] + .children + .entry(child_name) + .or_default() + .push(child); + + self.module_tree[child].parent = Some(parent); + } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 3bb3b3e4f5..00a0f46bf3 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,4 +1,4 @@ -use common::InputDb; +use common::{InputDb, Upcast}; pub mod hir_def; pub mod input; @@ -40,5 +40,5 @@ pub struct Jar( hir_def::module_item_tree, ); -pub trait HirDb: salsa::DbWithJar + InputDb {} -impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb {} +pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} +impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} From 66afc53edb3bda40a3591de4bfe319f5a0d2e7ec Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 23 Mar 2023 16:49:15 +0100 Subject: [PATCH 109/678] Add tracked fucntion for parsing --- crates/hir/src/input.rs | 1 - crates/hir/src/lib.rs | 13 +++++++++++-- crates/parser2/src/ast/attr.rs | 2 +- crates/parser2/src/ast/expr.rs | 2 +- crates/parser2/src/ast/item.rs | 2 +- crates/parser2/src/ast/param.rs | 6 +++--- crates/parser2/src/ast/pat.rs | 2 +- crates/parser2/src/ast/path.rs | 2 +- crates/parser2/src/ast/stmt.rs | 2 +- crates/parser2/src/ast/types.rs | 2 +- crates/parser2/src/lib.rs | 7 ++++--- crates/parser2/src/parser/mod.rs | 15 +++++++++++---- crates/parser2/src/syntax_node.rs | 1 + crates/parser2/tests/test_runner.rs | 2 +- 14 files changed, 38 insertions(+), 21 deletions(-) delete mode 100644 crates/hir/src/input.rs diff --git a/crates/hir/src/input.rs b/crates/hir/src/input.rs deleted file mode 100644 index 8b13789179..0000000000 --- a/crates/hir/src/input.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 00a0f46bf3..863a89cbe5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,7 +1,7 @@ -use common::{InputDb, Upcast}; +use common::{InputDb, InputFile, Upcast}; +use parser::GreenNode; pub mod hir_def; -pub mod input; pub mod lower; pub mod span; @@ -38,7 +38,16 @@ pub struct Jar( hir_def::UseTreeId, hir_def::ingot_module_tree, hir_def::module_item_tree, + parse_file, ); +#[salsa::tracked] +pub(crate) fn parse_file(db: &dyn HirDb, file: InputFile) -> GreenNode { + let text = file.text(db.upcast()); + // TODO: Register errors when we define the diagnostics API. + let (node, _errs) = parser::parse_source_file(text); + node +} + pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs index ed667dee2d..82942f6d26 100644 --- a/crates/parser2/src/ast/attr.rs +++ b/crates/parser2/src/ast/attr.rs @@ -138,7 +138,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); parser.parse(AttrListScope::default(), None); - AttrList::cast(parser.finish().0).unwrap() + AttrList::cast(parser.finish_to_node().0).unwrap() } #[test] diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 7dbec5af6c..7fdeb7bf59 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -567,7 +567,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::expr::parse_expr(&mut parser); - Expr::cast(parser.finish().0) + Expr::cast(parser.finish_to_node().0) .unwrap() .kind() .try_into() diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index b3991a19c7..c7f2f6c969 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -452,7 +452,7 @@ mod tests { let mut parser = Parser::new(lexer); parser.parse(ItemListScope::default(), None); - let item_list = ItemList::cast(parser.finish().0).unwrap(); + let item_list = ItemList::cast(parser.finish_to_node().0).unwrap(); let mut items = item_list.into_iter().collect::>(); assert_eq!(items.len(), 1); items.pop().unwrap().kind().try_into().unwrap() diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 9bef957dbc..a4a2255e41 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -351,21 +351,21 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); parser.parse(GenericParamListScope::default(), None); - GenericParamList::cast(parser.finish().0).unwrap() + GenericParamList::cast(parser.finish_to_node().0).unwrap() } fn parse_generic_arg(source: &str) -> GenericArgList { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); parser.parse(GenericArgListScope::default(), None); - GenericArgList::cast(parser.finish().0).unwrap() + GenericArgList::cast(parser.finish_to_node().0).unwrap() } fn parse_where_clause(source: &str) -> WhereClause { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); parser.parse(WhereClauseScope::default(), None); - WhereClause::cast(parser.finish().0).unwrap() + WhereClause::cast(parser.finish_to_node().0).unwrap() } #[test] diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs index 2676057a3c..80b2d35829 100644 --- a/crates/parser2/src/ast/pat.rs +++ b/crates/parser2/src/ast/pat.rs @@ -188,7 +188,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::pat::parse_pat(&mut parser); - Pat::cast(parser.finish().0) + Pat::cast(parser.finish_to_node().0) .unwrap() .kind() .try_into() diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs index 47a59a7faa..f01c91d7fa 100644 --- a/crates/parser2/src/ast/path.rs +++ b/crates/parser2/src/ast/path.rs @@ -53,7 +53,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); parser.parse(PathScope::default(), None); - Path::cast(parser.finish().0).unwrap() + Path::cast(parser.finish_to_node().0).unwrap() } #[test] diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs index b157d5f83b..ce4b13231f 100644 --- a/crates/parser2/src/ast/stmt.rs +++ b/crates/parser2/src/ast/stmt.rs @@ -218,7 +218,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = Parser::new(lexer); crate::parser::stmt::parse_stmt(&mut parser, None); - Stmt::cast(parser.finish().0) + Stmt::cast(parser.finish_to_node().0) .unwrap() .kind() .try_into() diff --git a/crates/parser2/src/ast/types.rs b/crates/parser2/src/ast/types.rs index 2eafe375da..0f1d949e0a 100644 --- a/crates/parser2/src/ast/types.rs +++ b/crates/parser2/src/ast/types.rs @@ -127,7 +127,7 @@ mod tests { let lexer = Lexer::new(source); let mut parser = parser::Parser::new(lexer); parser::type_::parse_type(&mut parser, None); - Type::cast(parser.finish().0) + Type::cast(parser.finish_to_node().0) .unwrap() .kind() .try_into() diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index cdace9107f..5771df9a55 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -5,13 +5,13 @@ pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; -pub use syntax_node::{FeLang, SyntaxNode, SyntaxToken}; +pub use syntax_node::{FeLang, GreenNode, SyntaxNode, SyntaxToken}; use parser::RootScope; pub type TextRange = rowan::TextRange; -pub fn parse_source_file(text: &str) -> (SyntaxNode, Vec) { +pub fn parse_source_file(text: &str) -> (GreenNode, Vec) { let lexer = lexer::Lexer::new(text); let mut parser = parser::Parser::new(lexer); let checkpoint = parser.enter(RootScope::default(), None); @@ -19,7 +19,8 @@ pub fn parse_source_file(text: &str) -> (SyntaxNode, Vec) { parser.parse(parser::ItemListScope::default(), None); parser.leave(checkpoint); - parser.finish() + let (node, errs) = parser.finish(); + (node, errs) } /// An parse error which is accumulated in the [`parser::Parser`] while parsing. diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 8416e4ca8b..706ed5dfb9 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -4,7 +4,7 @@ pub(crate) use item::ItemListScope; use fxhash::{FxHashMap, FxHashSet}; -use crate::{syntax_node::SyntaxNode, ParseError, SyntaxKind, TextRange}; +use crate::{syntax_node::SyntaxNode, GreenNode, ParseError, SyntaxKind, TextRange}; use self::token_stream::{BackTrackableTokenStream, LexicalToken, TokenStream}; @@ -88,12 +88,19 @@ impl Parser { std::mem::replace(&mut self.is_newline_trivia, is_trivia) } - /// Finish the parsing and return the syntax tree. - pub fn finish(self) -> (SyntaxNode, Vec) { + /// Finish the parsing and return the GreeNode. + pub fn finish(self) -> (GreenNode, Vec) { debug_assert!(self.parents.is_empty()); debug_assert!(!self.is_dry_run()); - (SyntaxNode::new_root(self.builder.finish()), self.errors) + (self.builder.finish(), self.errors) + } + + /// Finish the parsing and return the SyntaxNode. + /// **NOTE:** This method is mainly used for testing. + pub fn finish_to_node(self) -> (SyntaxNode, Vec) { + let (green_node, errors) = self.finish(); + (SyntaxNode::new_root(green_node), errors) } /// Adds the `recovery_tokens` as a temporary recovery token set. diff --git a/crates/parser2/src/syntax_node.rs b/crates/parser2/src/syntax_node.rs index 74b221c90c..b1011cd9d5 100644 --- a/crates/parser2/src/syntax_node.rs +++ b/crates/parser2/src/syntax_node.rs @@ -17,3 +17,4 @@ impl rowan::Language for FeLang { pub type SyntaxNode = rowan::SyntaxNode; pub type SyntaxToken = rowan::SyntaxToken; +pub type GreenNode = rowan::GreenNode; diff --git a/crates/parser2/tests/test_runner.rs b/crates/parser2/tests/test_runner.rs index bc84c5c261..6bbc56b8fd 100644 --- a/crates/parser2/tests/test_runner.rs +++ b/crates/parser2/tests/test_runner.rs @@ -87,7 +87,7 @@ impl TestRunner { (self.f)(&mut parser); parser.leave(checkpoint); - let (cst, errors) = parser.finish(); + let (cst, errors) = parser.finish_to_node(); for error in &errors { println!("{}@{:?}", error.msg, error.range); From 85f25a5c60fb9c5e871e0407e8cd38f5db8b736f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 24 Mar 2023 00:15:42 +0100 Subject: [PATCH 110/678] Implement `FileLowerCtxt` --- crates/hir/src/hir_def/item.rs | 60 ++-- crates/hir/src/hir_def/item_tree.rs | 32 +- crates/hir/src/hir_def/module_tree.rs | 35 +- crates/hir/src/lib.rs | 1 + crates/hir/src/lower/attr.rs | 36 +- crates/hir/src/lower/body.rs | 76 ++--- crates/hir/src/lower/expr.rs | 75 +++-- crates/hir/src/lower/item.rs | 458 +++++++++++++++++--------- crates/hir/src/lower/mod.rs | 98 +++++- crates/hir/src/lower/params.rs | 122 ++++--- crates/hir/src/lower/pat.rs | 36 +- crates/hir/src/lower/path.rs | 26 +- crates/hir/src/lower/stmt.rs | 24 +- crates/hir/src/lower/types.rs | 44 ++- crates/hir/src/lower/use_tree.rs | 40 ++- crates/hir/src/span/mod.rs | 4 - 16 files changed, 701 insertions(+), 466 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index f9946c34ca..117b6b1606 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -1,4 +1,5 @@ -// This is necessary because `salsa::tracked` structs generates a constructor +// This is necessary because `salsa::tracked` structs generates a +// constructor // that may take many arguments depending on the number of fields in the struct. #![allow(clippy::too_many_arguments)] @@ -11,6 +12,33 @@ use super::{ WhereClauseId, }; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, PartialOrd, Ord)] +pub enum ItemKind { + TopMod(TopLevelMod), + Mod(Mod), + Fn(Fn), + ExternFn(ExternFn), + Struct(Struct), + Contract(Contract), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), + Const(Const), + Use(Use), + /// Body is not an `Item`, but this makes it easier to analyze items. + Body(Body), +} + +#[salsa::tracked] +pub struct TopLevelMod { + // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument + // of `module_item_tree`. + pub name: IdentId, + pub(crate) origin: HirOrigin, +} + #[salsa::tracked] pub struct Mod { #[id] @@ -170,22 +198,6 @@ pub struct Use { pub(crate) origin: HirOrigin, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, PartialOrd, Ord)] -pub enum ItemKind { - Module(Mod), - Fn(Fn), - ExternFn(ExternFn), - Struct(Struct), - Contract(Contract), - Enum(Enum), - TypeAlias(TypeAlias), - Impl(Impl), - Trait(Trait), - ImplTrait(ImplTrait), - Const(Const), - Use(Use), -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ItemModifier { Pub, @@ -196,7 +208,10 @@ pub enum ItemModifier { impl ItemModifier { pub fn is_pub(self) -> bool { - matches!(self, ItemModifier::Pub | ItemModifier::PubAndUnsafe) + match self { + ItemModifier::Pub | ItemModifier::PubAndUnsafe => true, + ItemModifier::Unsafe | ItemModifier::None => false, + } } } @@ -237,6 +252,7 @@ pub type ExternItemListId = ImplItemListId; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedItemId { + TopLevelMod(IdentId), Mod(MaybeInvalid), Fn(MaybeInvalid), Struct(MaybeInvalid), @@ -256,12 +272,4 @@ impl TrackedItemId { pub(crate) fn join(self, rhs: Self) -> Self { Self::Joined(self.into(), rhs.into()) } - - pub(crate) fn join_opt(self, rhs: Option) -> Self { - if let Some(rhs) = rhs { - self.join(rhs) - } else { - self - } - } } diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index 77cf2a9f85..0731784bb3 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -1,25 +1,41 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; -use common::{InputFile, InputIngot}; +use common::InputFile; +use parser::{ + ast::{self, prelude::*}, + SyntaxNode, +}; -use crate::HirDb; +use crate::{ + hir_def::{module_tree, TopLevelMod}, + lower, HirDb, +}; -use super::{IdentId, ItemKind, MaybeInvalid, Mod, ToplevelModuleId}; +use super::ItemKind; +/// This tree represents the item hierarchy inside a file. +/// The root node of the tree is the top level module, which corresponds to the +/// `module_tree::TopLevelModule`. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ModuleItemTree { pub(crate) file: InputFile, - pub(crate) root_mod: Mod, + pub(crate) top_mod: TopLevelMod, pub(crate) item_tree: BTreeMap, } #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct ItemTreeNode { pub(crate) parent: Option, - pub(crate) children: BTreeMap, Vec>, + pub(crate) children: BTreeSet, } #[salsa::tracked(return_ref)] -pub(crate) fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ModuleItemTree { - todo!() +pub fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ModuleItemTree { + let node = SyntaxNode::new_root(crate::parse_file(db, file)); + let module_tree = module_tree::ingot_module_tree(db, file.ingot(db.upcast())); + + // This cast never fails even if the file content is empty. + let ast_root = ast::Root::cast(node).unwrap(); + let top_mod_name = module_tree.module_name(file); + lower::lower_file(db, file, top_mod_name, ast_root) } diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 454d1b0a87..95ba4433a8 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -1,9 +1,6 @@ -use std::{ - collections::BTreeMap, - path::{Path, PathBuf}, -}; +use std::collections::BTreeMap; -use camino::{Utf8Path, Utf8PathBuf}; +use camino::Utf8Path; use common::{InputFile, InputIngot}; use cranelift_entity::{entity_impl, PrimaryMap}; @@ -64,6 +61,26 @@ pub struct IngotModuleTree { pub(crate) ingot: InputIngot, } +impl IngotModuleTree { + #[inline] + pub fn module_name(&self, file: InputFile) -> IdentId { + self.module_data(file).name + } + + fn module_data(&self, file: InputFile) -> &ToplevelModule { + let id = self.file_map[&file]; + &self.module_tree[id] + } +} + +/// Returns a module tree of the given ingot. The resulted tree only includes +/// top level modules. This function only depends on an ingot structure and +/// external ingot dependency, and not depends on file contents. +#[salsa::tracked(return_ref)] +pub fn ingot_module_tree(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { + IngotModuleTreeBuilder::new(db, ingot).build() +} + /// A top level module that is one-to-one mapped to a file. #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct ToplevelModule { @@ -94,14 +111,6 @@ impl ToplevelModule { pub(crate) struct ToplevelModuleId(u32); entity_impl!(ToplevelModuleId); -/// Returns a module tree of the given ingot. The resulted tree only includes -/// top level modules. This function only depends on an ingot structure and -/// external ingot dependency, and not depends on file contents. -#[salsa::tracked(return_ref)] -pub fn ingot_module_tree(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { - IngotModuleTreeBuilder::new(db, ingot).build() -} - struct IngotModuleTreeBuilder<'db> { db: &'db dyn HirDb, ingot: InputIngot, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 863a89cbe5..fb76458923 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -8,6 +8,7 @@ pub mod span; #[salsa::jar(db = HirDb)] pub struct Jar( // Tracked Hir items. + hir_def::TopLevelMod, hir_def::Mod, hir_def::Fn, hir_def::ExternFn, diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 4001cc227e..9cc3efaf08 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -1,40 +1,40 @@ use crate::hir_def::{attr::*, IdentId, StringId}; use parser::ast; -use crate::HirDb; +use super::FileLowerCtxt; impl AttrListId { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::AttrList) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::AttrList) -> Self { let attrs = ast .into_iter() - .map(|attr| Attr::from_ast(db, attr)) + .map(|attr| Attr::lower_ast(ctxt, attr)) .collect(); - Self::new(db, attrs) + Self::new(ctxt.db, attrs) } - pub fn from_ast_opt(db: &dyn HirDb, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, ast)) - .unwrap_or_else(|| Self::new(db, vec![])) + pub(super) fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db, vec![])) } } impl Attr { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Attr) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Attr) -> Self { match ast.kind() { - ast::AttrKind::Normal(attr) => NormalAttr::from_ast(db, attr).into(), - ast::AttrKind::DocComment(attr) => DocCommentAttr::from_ast(db, attr).into(), + ast::AttrKind::Normal(attr) => NormalAttr::lower_ast(ctxt, attr).into(), + ast::AttrKind::DocComment(attr) => DocCommentAttr::lower_ast(ctxt, attr).into(), } } } impl NormalAttr { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::NormalAttr) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::NormalAttr) -> Self { + let name = IdentId::maybe_lower_token(ctxt, ast.name()); let args = ast .args() .map(|args| { args.into_iter() - .map(|arg| AttrArg::from_ast(db, arg)) + .map(|arg| AttrArg::lower_ast(ctxt, arg)) .collect() }) .unwrap_or_default(); @@ -44,21 +44,21 @@ impl NormalAttr { } impl DocCommentAttr { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::DocCommentAttr) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::DocCommentAttr) -> Self { let text = ast .doc() .map(|doc| doc.text()[3..].to_string()) .unwrap_or_default(); Self { - text: StringId::new(db, text), + text: StringId::new(ctxt.db, text), } } } impl AttrArg { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::AttrArg) -> Self { - let key = IdentId::maybe_from_token(db, ast.key()); - let value = IdentId::maybe_from_token(db, ast.value()); + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::AttrArg) -> Self { + let key = IdentId::maybe_lower_token(ctxt, ast.key()); + let value = IdentId::maybe_lower_token(ctxt, ast.value()); Self { key, value } } } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 0538d3e9f7..c79cc4777a 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -1,4 +1,3 @@ -use common::InputFile; use parser::ast; use crate::{ @@ -7,108 +6,108 @@ use crate::{ TrackedBodyId, TrackedItemId, }, span::{HirOrigin, HirOriginKind}, - HirDb, }; +use super::FileLowerCtxt; + impl Body { - pub(crate) fn item_body_from_ast( - db: &dyn HirDb, - file: InputFile, + pub(super) fn lower_ast( + f_ctxt: &mut FileLowerCtxt, parent_id: TrackedItemId, ast: ast::Expr, ) -> Self { let bid = TrackedBodyId::ItemBody(parent_id.into()); - let mut ctxt = BodyCtxt::new(db, file, bid); - Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(HirOrigin::raw(file, &ast)) + let mut ctxt = BodyCtxt::new(f_ctxt, bid); + Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast) } - pub(crate) fn nested_body_from_ast( - db: &dyn HirDb, - file: InputFile, + pub(super) fn lower_ast_nested( + f_ctxt: &mut FileLowerCtxt, bid: TrackedBodyId, ast: ast::Expr, ) -> Self { let bid = TrackedBodyId::NestedBody(bid.into()); - let mut ctxt = BodyCtxt::new(db, file, bid); - Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(HirOrigin::raw(file, &ast)) + let mut ctxt = BodyCtxt::new(f_ctxt, bid); + Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast) } - pub(crate) fn nameless_body_from_ast(db: &dyn HirDb, file: InputFile, ast: ast::Expr) -> Self { + pub(super) fn lower_ast_nameless(f_ctxt: &mut FileLowerCtxt<'_>, ast: ast::Expr) -> Self { let bid = TrackedBodyId::NamelessBody; - let mut ctxt = BodyCtxt::new(db, file, bid); - Expr::push_to_body(&mut ctxt, ast.clone()); - ctxt.build(HirOrigin::raw(file, &ast)) + let mut ctxt = BodyCtxt::new(f_ctxt, bid); + Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast) } } -pub(super) struct BodyCtxt<'db> { +pub(super) struct BodyCtxt<'ctxt, 'db> { + pub(super) f_ctxt: &'ctxt mut FileLowerCtxt<'db>, + pub(super) bid: TrackedBodyId, + pub(super) stmts: BodyNodeMap>, pub(super) exprs: BodyNodeMap>, pub(super) pats: BodyNodeMap>, - pub(super) db: &'db dyn HirDb, - pub(super) file: InputFile, - pub(super) bid: TrackedBodyId, stmt_source_map: BodySourceMap, expr_source_map: BodySourceMap, pat_source_map: BodySourceMap, } -impl<'db> BodyCtxt<'db> { +impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { let expr_id = self.exprs.push(Some(expr).into()); - self.expr_source_map[expr_id] = HirOrigin::new(self.file, origin); + self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); expr_id } pub(super) fn push_invalid_expr(&mut self, origin: HirOriginKind) -> ExprId { let expr_id = self.exprs.push(None.into()); - self.expr_source_map[expr_id] = HirOrigin::new(self.file, origin); + self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); expr_id } pub(super) fn push_missing_expr(&mut self) -> ExprId { let expr_id = self.exprs.push(None.into()); - self.expr_source_map[expr_id] = HirOrigin::none(self.file); + self.expr_source_map[expr_id] = HirOrigin::none(self.f_ctxt.file); expr_id } pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOriginKind) -> StmtId { let stmt_id = self.stmts.push(Some(stmt).into()); - self.stmt_source_map[stmt_id] = HirOrigin::new(self.file, origin); + self.stmt_source_map[stmt_id] = HirOrigin::new(self.f_ctxt.file, origin); stmt_id } pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { let pat_id = self.pats.push(Some(pat).into()); - self.pat_source_map[pat_id] = HirOrigin::new(self.file, origin); + self.pat_source_map[pat_id] = HirOrigin::new(self.f_ctxt.file, origin); pat_id } pub(super) fn push_missing_pat(&mut self) -> PatId { let pat_id = self.pats.push(None.into()); - self.pat_source_map[pat_id] = HirOrigin::none(self.file); + self.pat_source_map[pat_id] = HirOrigin::none(self.f_ctxt.file); pat_id } - fn new(db: &'db dyn HirDb, file: InputFile, bid: TrackedBodyId) -> Self { + fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, bid: TrackedBodyId) -> Self { + f_ctxt.enter_scope(); Self { + f_ctxt: f_ctxt, + bid, stmts: BodyNodeMap::new(), exprs: BodyNodeMap::new(), pats: BodyNodeMap::new(), - db, - file, - bid, stmt_source_map: BodySourceMap::new(), expr_source_map: BodySourceMap::new(), pat_source_map: BodySourceMap::new(), } } - fn build(self, origin: HirOrigin) -> Body { - Body::new( - self.db, + fn build(self, ast: &ast::Expr) -> Body { + let origin = HirOrigin::raw(self.f_ctxt.file, ast); + let body = Body::new( + self.f_ctxt.db, self.bid, self.stmts, self.exprs, @@ -117,6 +116,9 @@ impl<'db> BodyCtxt<'db> { self.expr_source_map, self.pat_source_map, origin, - ) + ); + + self.f_ctxt.leave_scope(body); + body } } diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index ac73838544..cf95ad1335 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -1,18 +1,18 @@ use parser::ast::{self, prelude::*}; use crate::{ - hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, MaybeInvalid, Pat, PathId, Stmt}, + hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, span::HirOriginKind, }; use super::body::BodyCtxt; impl Expr { - pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Expr) -> ExprId { + pub(super) fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::Expr) -> ExprId { let expr = match ast.kind() { ast::ExprKind::Lit(lit) => { if let Some(lit) = lit.lit() { - let lit = LitKind::from_ast(ctxt.db, lit); + let lit = LitKind::lower_ast(ctxt.f_ctxt, lit); Self::Lit(lit) } else { return ctxt.push_invalid_expr(HirOriginKind::raw(&ast)); @@ -31,13 +31,13 @@ impl Expr { ast::ExprKind::Bin(bin) => { let lhs = Self::push_to_body_opt(ctxt, bin.lhs()); let rhs = Self::push_to_body_opt(ctxt, bin.rhs()); - let op = bin.op().map(|op| BinOp::from_ast(op)).into(); + let op = bin.op().map(|op| BinOp::lower_ast(op)).into(); Self::Bin(lhs, rhs, op) } ast::ExprKind::Un(un) => { let expr = Self::push_to_body_opt(ctxt, un.expr()); - let op = un.op().map(|op| UnOp::from_ast(op)).into(); + let op = un.op().map(|op| UnOp::lower_ast(op)).into(); Self::Un(expr, op) } @@ -47,7 +47,7 @@ impl Expr { .args() .map(|args| { args.into_iter() - .map(|arg| CallArg::from_ast(ctxt, arg)) + .map(|arg| CallArg::lower_ast(ctxt, arg)) .collect() }) .unwrap_or_default(); @@ -56,12 +56,13 @@ impl Expr { ast::ExprKind::MethodCall(method_call) => { let receiver = Self::push_to_body_opt(ctxt, method_call.receiver()); - let method_name = IdentId::maybe_from_token(ctxt.db, method_call.method_name()); + let method_name = + IdentId::maybe_lower_token(ctxt.f_ctxt, method_call.method_name()); let args = method_call .args() .map(|args| { args.into_iter() - .map(|arg| CallArg::from_ast(ctxt, arg)) + .map(|arg| CallArg::lower_ast(ctxt, arg)) .collect() }) .unwrap_or_default(); @@ -69,18 +70,18 @@ impl Expr { } ast::ExprKind::Path(path) => { - let path = PathId::maybe_from_ast(ctxt.db, path.path()); + let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path.path()); Self::Path(path) } ast::ExprKind::RecordInit(record_init) => { - let path = PathId::maybe_from_ast(ctxt.db, record_init.path()); + let path = PathId::maybe_lower_ast(ctxt.f_ctxt, record_init.path()); let fields = record_init .fields() .map(|fields| { fields .into_iter() - .map(|field| RecordField::from_ast(ctxt, field)) + .map(|field| RecordField::lower_ast(ctxt, field)) .collect() }) .unwrap_or_default(); @@ -90,9 +91,9 @@ impl Expr { ast::ExprKind::Field(field) => { let receiver = Self::push_to_body_opt(ctxt, field.receiver()); let field = if let Some(name) = field.field_name() { - Some(FieldIndex::Ident(IdentId::from_token(ctxt.db, name))).into() + Some(FieldIndex::Ident(IdentId::lower_token(ctxt.f_ctxt, name))).into() } else if let Some(num) = field.field_index() { - Some(FieldIndex::Index(IntegerId::from_ast(ctxt.db, num))).into() + Some(FieldIndex::Index(IntegerId::lower_ast(ctxt.f_ctxt, num))).into() } else { None.into() }; @@ -126,9 +127,7 @@ impl Expr { let val = Self::push_to_body_opt(ctxt, array_rep.val()); let len = array_rep .len() - .map(|ast| { - Body::nested_body_from_ast(ctxt.db, ctxt.file, ctxt.bid.clone(), ast) - }) + .map(|ast| Body::lower_ast_nested(ctxt.f_ctxt, ctxt.bid.clone(), ast)) .into(); Self::ArrayRep(val, len) } @@ -141,7 +140,7 @@ impl Expr { .map(|body| ast::Expr::cast(body.syntax().clone())) .flatten(), ); - let else_ = if_.else_().map(|ast| Self::push_to_body(ctxt, ast)); + let else_ = if_.else_().map(|ast| Self::lower_ast(ctxt, ast)); Self::If(cond, then, else_) } @@ -151,7 +150,7 @@ impl Expr { .arms() .map(|arms| { arms.into_iter() - .map(|arm| MatchArm::from_ast(ctxt, arm)) + .map(|arm| MatchArm::lower_ast(ctxt, arm)) .collect() }) .into(); @@ -167,9 +166,9 @@ impl Expr { ctxt.push_expr(expr, HirOriginKind::raw(&ast)) } - pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> ExprId { + pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> ExprId { if let Some(ast) = ast { - Expr::push_to_body(ctxt, ast) + Expr::lower_ast(ctxt, ast) } else { ctxt.push_missing_expr() } @@ -177,17 +176,17 @@ impl Expr { } impl BinOp { - pub(super) fn from_ast(ast: ast::BinOp) -> Self { + pub(super) fn lower_ast(ast: ast::BinOp) -> Self { match ast { - ast::BinOp::Arith(arith) => ArithBinOp::from_ast(arith).into(), - ast::BinOp::Comp(arith) => CompBinOp::from_ast(arith).into(), - ast::BinOp::Logical(arith) => LogicalBinOp::from_ast(arith).into(), + ast::BinOp::Arith(arith) => ArithBinOp::lower_ast(arith).into(), + ast::BinOp::Comp(arith) => CompBinOp::lower_ast(arith).into(), + ast::BinOp::Logical(arith) => LogicalBinOp::lower_ast(arith).into(), } } } impl ArithBinOp { - pub(super) fn from_ast(ast: ast::ArithBinOp) -> Self { + pub(super) fn lower_ast(ast: ast::ArithBinOp) -> Self { match ast { ast::ArithBinOp::Add(_) => Self::Add, ast::ArithBinOp::Sub(_) => Self::Sub, @@ -205,7 +204,7 @@ impl ArithBinOp { } impl CompBinOp { - pub(super) fn from_ast(ast: ast::CompBinOp) -> Self { + pub(super) fn lower_ast(ast: ast::CompBinOp) -> Self { match ast { ast::CompBinOp::Eq(_) => Self::Eq, ast::CompBinOp::NotEq(_) => Self::NotEq, @@ -218,7 +217,7 @@ impl CompBinOp { } impl LogicalBinOp { - pub(super) fn from_ast(ast: ast::LogicalBinOp) -> Self { + pub(super) fn lower_ast(ast: ast::LogicalBinOp) -> Self { match ast { ast::LogicalBinOp::And(_) => Self::And, ast::LogicalBinOp::Or(_) => Self::Or, @@ -227,7 +226,7 @@ impl LogicalBinOp { } impl UnOp { - fn from_ast(ast: ast::UnOp) -> Self { + fn lower_ast(ast: ast::UnOp) -> Self { match ast { ast::UnOp::Plus(_) => Self::Plus, ast::UnOp::Minus(_) => Self::Minus, @@ -238,28 +237,28 @@ impl UnOp { } impl MatchArm { - fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::MatchArm) -> Self { - let pat = Pat::push_to_body_opt(ctxt, ast.pat()); + fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::MatchArm) -> Self { + let pat = Pat::lower_ast_opt(ctxt, ast.pat()); let body = Expr::push_to_body_opt(ctxt, ast.body()); Self { pat, body } } } impl CallArg { - fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::CallArg) -> Self { - let label = ast.label().map(|label| IdentId::from_token(ctxt.db, label)); + fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::CallArg) -> Self { + let label = ast + .label() + .map(|label| IdentId::lower_token(ctxt.f_ctxt, label)); let expr = Expr::push_to_body_opt(ctxt, ast.expr()); Self { label, expr } } - - fn from_ast_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(ctxt, ast)).into() - } } impl RecordField { - fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: ast::RecordField) -> Self { - let label = ast.label().map(|label| IdentId::from_token(ctxt.db, label)); + fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::RecordField) -> Self { + let label = ast + .label() + .map(|label| IdentId::lower_token(ctxt.f_ctxt, label)); let expr = Expr::push_to_body_opt(ctxt, ast.expr()); Self { label, expr } } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index f2c766cfab..eee640b479 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -1,4 +1,3 @@ -use common::InputFile; use parser::ast::{self, prelude::*}; use crate::{ @@ -7,40 +6,78 @@ use crate::{ UseTreeId, WhereClauseId, }, span::HirOrigin, - HirDb, }; +use super::FileLowerCtxt; + +impl TopLevelMod { + pub(crate) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, name: IdentId, ast: ast::Root) -> Self { + ctxt.enter_scope(); + + let id = TrackedItemId::TopLevelMod(name); + if let Some(items) = ast.items() { + lower_module_items(ctxt, id, items); + } + + let origin = HirOrigin::raw(ctxt.file, &ast); + let top_mod = Self::new(ctxt.db, name, origin); + ctxt.leave_scope(top_mod) + } +} + +impl Mod { + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, + ast: ast::Mod, + ) -> Self { + ctxt.enter_scope(); + + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Mod(name).join(parent_id); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + if let Some(items) = ast.items() { + lower_module_items(ctxt, id.clone(), items); + } + + let origin = HirOrigin::raw(ctxt.file, &ast); + let mod_ = Self::new(ctxt.db, id, name, attributes, is_pub, origin); + ctxt.leave_scope(mod_) + } +} + impl Fn { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Fn, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Fn(name).join_opt(parent_id); + ctxt.enter_scope(); + + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Fn(name).join(parent_id); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let params = ast .params() - .map(|params| FnParamListId::from_ast(db, file, params)) + .map(|params| FnParamListId::lower_ast(ctxt, params)) .into(); - let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, file, ty)); - let modifier = ItemModifier::from_ast(db, ast.modifier()); + let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); + let modifier = ItemModifier::lower_ast(ast.modifier()); let body = ast.body().map(|body| { - Body::item_body_from_ast( - db, - file, + Body::lower_ast( + ctxt, id.clone(), ast::Expr::cast(body.syntax().clone()).unwrap(), ) }); - let origin = HirOrigin::raw(file, &ast); + let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new( - db, + let fn_ = Self::new( + ctxt.db, id, name, attributes, @@ -51,29 +88,31 @@ impl Fn { modifier, body, origin, - ) + ); + ctxt.leave_scope(fn_) } } impl Struct { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Struct, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Struct(name).join_opt(parent_id); + ctxt.enter_scope(); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); - let origin = HirOrigin::raw(file, &ast); + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Struct(name).join(parent_id); - Self::new( - db, + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); + let origin = HirOrigin::raw(ctxt.file, &ast); + + let struct_ = Self::new( + ctxt.db, id, name, attributes, @@ -82,48 +121,52 @@ impl Struct { where_clause, fields, origin, - ) + ); + ctxt.leave_scope(struct_) } } impl Contract { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Contract, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Contract(name).join_opt(parent_id); + ctxt.enter_scope(); + + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Contract(name).join(parent_id); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let fields = RecordFieldListId::from_ast_opt(db, file, ast.fields()); - let origin = HirOrigin::raw(file, &ast); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); + let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new(db, id, name, attributes, is_pub, fields, origin) + let contract = Self::new(ctxt.db, id, name, attributes, is_pub, fields, origin); + ctxt.leave_scope(contract) } } impl Enum { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Enum, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Enum(name).join_opt(parent_id); + ctxt.enter_scope(); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let variants = EnumVariantListId::from_ast_opt(db, file, ast.variants()); - let origin = HirOrigin::raw(file, &ast); + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Enum(name).join(parent_id); - Self::new( - db, + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let variants = EnumVariantListId::lower_ast_opt(ctxt, ast.variants()); + let origin = HirOrigin::raw(ctxt.file, &ast); + + let enum_ = Self::new( + ctxt.db, id, name, attributes, @@ -132,29 +175,31 @@ impl Enum { where_clause, variants, origin, - ) + ); + ctxt.leave_scope(enum_) } } impl TypeAlias { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::TypeAlias, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.alias()); - let id = TrackedItemId::TypeAlias(name).join_opt(parent_id); + ctxt.enter_scope(); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); - let origin = HirOrigin::raw(file, &ast); + let name = IdentId::maybe_lower_token(ctxt, ast.alias()); + let id = TrackedItemId::TypeAlias(name).join(parent_id); - Self::new( - db, + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let origin = HirOrigin::raw(ctxt.file, &ast); + + let alias = Self::new( + ctxt.db, id, name, attributes, @@ -163,47 +208,71 @@ impl TypeAlias { where_clause, ty, origin, - ) + ); + ctxt.leave_scope(alias) } } impl Impl { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Impl, ) -> Self { - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); - let id = TrackedItemId::Impl(ty).join_opt(parent_id); + ctxt.enter_scope(); + + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let id = TrackedItemId::Impl(ty).join(parent_id); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let origin = HirOrigin::raw(file, &ast); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new(db, id, ty, attributes, generic_params, where_clause, origin) + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Fn::lower_ast(ctxt, id.clone(), impl_item); + } + } + + let impl_ = Self::new( + ctxt.db, + id, + ty, + attributes, + generic_params, + where_clause, + origin, + ); + ctxt.leave_scope(impl_) } } impl Trait { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Trait, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Trait(name).join_opt(parent_id); + ctxt.enter_scope(); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let is_pub = ItemModifier::from_ast(db, ast.modifier()).is_pub(); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let origin = HirOrigin::raw(file, &ast); + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Trait(name).join(parent_id); - Self::new( - db, + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let origin = HirOrigin::raw(ctxt.file, &ast); + + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Fn::lower_ast(ctxt, id.clone(), impl_item); + } + } + + let trait_ = Self::new( + ctxt.db, id, name, attributes, @@ -211,28 +280,37 @@ impl Trait { generic_params, where_clause, origin, - ) + ); + + ctxt.leave_scope(trait_) } } impl ImplTrait { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::ImplTrait, ) -> Self { - let trait_ref = TraitRef::maybe_from_ast(db, file, ast.trait_ref()); - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); - let id = TrackedItemId::ImplTrait(trait_ref, ty).join_opt(parent_id); + ctxt.enter_scope(); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); - let generic_params = GenericParamListId::from_ast_opt(db, file, ast.generic_params()); - let where_clause = WhereClauseId::from_ast_opt(db, file, ast.where_clause()); - let origin = HirOrigin::raw(file, &ast); + let trait_ref = TraitRef::maybe_lower_ast(ctxt, ast.trait_ref()); + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let id = TrackedItemId::ImplTrait(trait_ref, ty).join(parent_id); - Self::new( - db, + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let origin = HirOrigin::raw(ctxt.file, &ast); + + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Fn::lower_ast(ctxt, id.clone(), impl_item); + } + } + + let impl_trait = Self::new( + ctxt.db, id, trait_ref, ty, @@ -240,69 +318,74 @@ impl ImplTrait { generic_params, where_clause, origin, - ) + ); + ctxt.leave_scope(impl_trait) } } impl Const { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Const, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Const(name).join_opt(parent_id); + ctxt.enter_scope(); + + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Const(name).join(parent_id); let body = ast .value() - .map(|ast| Body::item_body_from_ast(db, file, id.clone(), ast)) + .map(|ast| Body::lower_ast(ctxt, id.clone(), ast)) .into(); + let origin = HirOrigin::raw(ctxt.file, &ast); - let origin = HirOrigin::raw(file, &ast); - Self::new(db, id, name, body, origin) + let const_ = Self::new(ctxt.db, id, name, body, origin); + ctxt.leave_scope(const_) } } impl Use { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent_id: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, ast: ast::Use, ) -> Self { - let tree = UseTreeId::maybe_from_ast(db, ast.use_tree()); - let id = TrackedItemId::Use(tree).join_opt(parent_id); + ctxt.enter_scope(); + + let tree = UseTreeId::maybe_lower_ast(ctxt, ast.use_tree()); + let id = TrackedItemId::Use(tree).join(parent_id); - let origin = HirOrigin::raw(file, &ast); - Self::new(db, id, tree, origin) + let origin = HirOrigin::raw(ctxt.file, &ast); + Self::new(ctxt.db, id, tree, origin) } } impl ExternFn { - pub(crate) fn from_ast( - db: &dyn HirDb, - file: InputFile, - parent: Option, + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent: TrackedItemId, ast: ast::Fn, ) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let id = TrackedItemId::Extern.join_opt(parent); + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let id = TrackedItemId::Extern.join(parent); - let attributes = AttrListId::from_ast_opt(db, ast.attr_list()); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let params = ast .params() - .map(|params| FnParamListId::from_ast(db, file, params)) + .map(|params| FnParamListId::lower_ast(ctxt, params)) .into(); - let ret_ty = ast.ret_ty().map(|ty| TypeId::from_ast(db, file, ty)); - let modifier = ItemModifier::from_ast(db, ast.modifier()); - let origin = HirOrigin::raw(file, &ast); + let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); + let modifier = ItemModifier::lower_ast(ast.modifier()); + let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new(db, id, name, attributes, params, ret_ty, modifier, origin) + Self::new( + ctxt.db, id, name, attributes, params, ret_ty, modifier, origin, + ) } } impl ItemModifier { - fn from_ast(db: &dyn HirDb, ast: Option) -> Self { + fn lower_ast(ast: Option) -> Self { let Some(ast) = ast else { return Self::None; }; @@ -317,24 +400,24 @@ impl ItemModifier { } impl RecordFieldListId { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::RecordFieldDefList) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDefList) -> Self { let fields = ast .into_iter() - .map(|field| RecordField::from_ast(db, file, field)) + .map(|field| RecordField::lower_ast(ctxt, field)) .collect(); - Self::new(db, fields) + Self::new(ctxt.db, fields) } - fn from_ast_opt(db: &dyn HirDb, file: InputFile, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, file, ast)) - .unwrap_or(Self::new(db, Vec::new())) + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or(Self::new(ctxt.db, Vec::new())) } } impl RecordField { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::RecordFieldDef) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDef) -> Self { + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); let is_pub = ast.pub_kw().is_some(); Self { name, ty, is_pub } @@ -342,25 +425,72 @@ impl RecordField { } impl EnumVariantListId { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::EnumVariantDefList) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::EnumVariantDefList) -> Self { let variants = ast .into_iter() - .map(|variant| EnumVariant::from_ast(db, file, variant)) + .map(|variant| EnumVariant::lower_ast(ctxt, variant)) .collect(); - Self::new(db, variants) + Self::new(ctxt.db, variants) } - fn from_ast_opt(db: &dyn HirDb, file: InputFile, ast: Option) -> Self { - ast.map(|ast| Self::from_ast(db, file, ast)) - .unwrap_or(Self::new(db, Vec::new())) + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or(Self::new(ctxt.db, Vec::new())) } } impl EnumVariant { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::EnumVariantDef) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let ty = ast.ty().map(|ty| TypeId::from_ast(db, file, ty)); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::EnumVariantDef) -> Self { + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let ty = ast.ty().map(|ty| TypeId::lower_ast(ctxt, ty)); Self { name, ty } } } + +fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, id: TrackedItemId, items: ast::ItemList) { + for item in items { + match item.kind() { + ast::ItemKind::Mod(mod_) => { + Mod::lower_ast(ctxt, id.clone(), mod_); + } + ast::ItemKind::Fn(fn_) => { + Fn::lower_ast(ctxt, id.clone(), fn_); + } + ast::ItemKind::Struct(struct_) => { + Struct::lower_ast(ctxt, id.clone(), struct_); + } + ast::ItemKind::Contract(contract) => { + Contract::lower_ast(ctxt, id.clone(), contract); + } + ast::ItemKind::Enum(enum_) => { + Enum::lower_ast(ctxt, id.clone(), enum_); + } + ast::ItemKind::TypeAlias(alias) => { + TypeAlias::lower_ast(ctxt, id.clone(), alias); + } + ast::ItemKind::Impl(impl_) => { + Impl::lower_ast(ctxt, id.clone(), impl_); + } + ast::ItemKind::Trait(trait_) => { + Trait::lower_ast(ctxt, id.clone(), trait_); + } + ast::ItemKind::ImplTrait(impl_trait) => { + ImplTrait::lower_ast(ctxt, id.clone(), impl_trait); + } + ast::ItemKind::Const(const_) => { + Const::lower_ast(ctxt, id.clone(), const_); + } + ast::ItemKind::Use(use_) => { + Use::lower_ast(ctxt, id.clone(), use_); + } + ast::ItemKind::Extern(extern_) => { + if let Some(extern_block) = extern_.extern_block() { + for fn_ in extern_block { + ExternFn::lower_ast(ctxt, id.clone(), fn_); + } + } + } + } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index fa3ba961a2..c8dad0538b 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,9 +1,15 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use common::InputFile; use num_bigint::BigUint; use num_traits::Num; use parser::{ast, SyntaxToken}; use crate::{ - hir_def::{IdentId, IntegerId, LitKind, MaybeInvalid, StringId}, + hir_def::{ + IdentId, IntegerId, ItemKind, ItemTreeNode, LitKind, MaybeInvalid, ModuleItemTree, + StringId, TopLevelMod, + }, HirDb, }; @@ -18,23 +24,93 @@ mod stmt; mod types; mod use_tree; +pub(super) fn lower_file( + db: &dyn HirDb, + file: InputFile, + top_mod_name: IdentId, + root_node: ast::Root, +) -> ModuleItemTree { + let mut ctxt = FileLowerCtxt::new(db, file); + let top_mod = TopLevelMod::lower_ast(&mut ctxt, top_mod_name, root_node); + ctxt.build(top_mod) +} + +pub struct FileLowerCtxt<'db> { + db: &'db dyn HirDb, + file: InputFile, + scope_stack: Vec>, + item_tree: BTreeMap, +} + +impl<'db> FileLowerCtxt<'db> { + pub(super) fn new(db: &'db dyn HirDb, file: InputFile) -> Self { + Self { + db, + file, + scope_stack: vec![], + item_tree: BTreeMap::new(), + } + } + + pub(super) fn build(self, top_mod: TopLevelMod) -> ModuleItemTree { + ModuleItemTree { + file: self.file, + top_mod, + item_tree: self.item_tree, + } + } + + /// Creates a new scope for an item. + fn enter_scope(&mut self) { + self.scope_stack.push(BTreeSet::default()); + } + + /// Leaves the current scope, `item` should be the generated item which owns + /// the scope. + fn leave_scope(&mut self, item: I) -> I + where + I: Into + Copy, + { + let item_kind = item.into(); + let item_scope = self.scope_stack.pop().unwrap(); + + for item in &item_scope { + self.item_tree.get_mut(&item).unwrap().parent = Some(item_kind); + } + + self.item_tree.insert( + item_kind, + ItemTreeNode { + parent: None, + children: item_scope, + }, + ); + + self.scope_stack.last_mut().unwrap().insert(item.into()); + item + } +} + impl IdentId { - fn from_token(db: &dyn HirDb, token: SyntaxToken) -> Self { - Self::new(db, token.text().to_string()) + fn lower_token(ctxt: &mut FileLowerCtxt<'_>, token: SyntaxToken) -> Self { + Self::new(ctxt.db, token.text().to_string()) } - fn maybe_from_token(db: &dyn HirDb, token: Option) -> MaybeInvalid { - token.map(|token| Self::from_token(db, token)).into() + fn maybe_lower_token( + ctxt: &mut FileLowerCtxt<'_>, + token: Option, + ) -> MaybeInvalid { + token.map(|token| Self::lower_token(ctxt, token)).into() } } impl LitKind { - pub(super) fn from_ast(db: &dyn HirDb, ast: ast::Lit) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Lit) -> Self { match ast.kind() { - ast::LitKind::Int(int) => Self::Int(IntegerId::from_ast(db, int)), + ast::LitKind::Int(int) => Self::Int(IntegerId::lower_ast(ctxt, int)), ast::LitKind::String(string) => { let text = string.token().text(); - Self::String(StringId::new(db, text[1..text.len() - 1].to_string())) + Self::String(StringId::new(ctxt.db, text[1..text.len() - 1].to_string())) } ast::LitKind::Bool(bool) => match bool.token().text() { "true" => Self::Bool(true), @@ -46,11 +122,11 @@ impl LitKind { } impl IntegerId { - pub(super) fn from_ast(db: &dyn HirDb, ast: ast::LitInt) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::LitInt) -> Self { let text = ast.token().text(); // Parser ensures that the text is valid pair with a radix and a number. if text.len() < 2 { - return Self::new(db, BigUint::from_str_radix(&text, 10).unwrap()); + return Self::new(ctxt.db, BigUint::from_str_radix(&text, 10).unwrap()); } let int = match &text[0..2] { @@ -60,6 +136,6 @@ impl IntegerId { _ => BigUint::from_str_radix(&text, 10).unwrap(), }; - Self::new(db, int) + Self::new(ctxt.db, int) } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 4c4b878c92..8daa562730 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,87 +1,82 @@ -use common::InputFile; use parser::ast::{self}; -use crate::{ - hir_def::{params::*, Body, IdentId, PathId, TypeId}, - HirDb, -}; +use crate::hir_def::{params::*, Body, IdentId, PathId, TypeId}; + +use super::FileLowerCtxt; impl GenericArgListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericArgList) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::GenericArgList) -> Self { let args = ast .into_iter() - .map(|arg| GenericArg::from_ast(db, file, arg)) + .map(|arg| GenericArg::lower_ast(ctxt, arg)) .collect(); - Self::new(db, args) + Self::new(ctxt.db, args) } - pub(crate) fn from_ast_opt( - db: &dyn HirDb, - file: InputFile, + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'_>, ast: Option, ) -> Self { - ast.map(|ast| Self::from_ast(db, file, ast)) - .unwrap_or_else(|| Self::new(db, Vec::new())) + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) } } impl GenericParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericParamList) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::GenericParamList) -> Self { let params = ast .into_iter() - .map(|param| GenericParam::from_ast(db, file, param)) + .map(|param| GenericParam::lower_ast(ctxt, param)) .collect(); - Self::new(db, params) + Self::new(ctxt.db, params) } - pub(crate) fn from_ast_opt( - db: &dyn HirDb, - file: InputFile, + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'_>, ast: Option, ) -> Self { - ast.map(|ast| Self::from_ast(db, file, ast)) - .unwrap_or_else(|| Self::new(db, Vec::new())) + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) } } impl FnParamListId { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::FnParamList) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamList) -> Self { let params = ast .into_iter() - .map(|param| FnParam::from_ast(db, file, param)) + .map(|param| FnParam::lower_ast(ctxt, param)) .collect(); - Self::new(db, params) + Self::new(ctxt.db, params) } } impl WhereClauseId { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::WhereClause) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::WhereClause) -> Self { let predicates = ast .into_iter() - .map(|pred| WherePredicate::from_ast(db, file, pred)) + .map(|pred| WherePredicate::lower_ast(ctxt, pred)) .collect(); - Self::new(db, predicates) + Self::new(ctxt.db, predicates) } - pub(crate) fn from_ast_opt( - db: &dyn HirDb, - file: InputFile, + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'_>, ast: Option, ) -> Self { - ast.map(|ast| Self::from_ast(db, file, ast)) - .unwrap_or_else(|| Self::new(db, Vec::new())) + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) } } impl TypeGenericParam { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeGenericParam) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeGenericParam) -> Self { + let name = IdentId::maybe_lower_token(ctxt, ast.name()); let bounds = ast .bounds() .map(|bounds| { bounds .into_iter() - .map(|bound| TypeBound::from_ast(db, file, bound)) + .map(|bound| TypeBound::lower_ast(ctxt, bound)) .collect() }) .unwrap_or_default(); @@ -91,37 +86,37 @@ impl TypeGenericParam { } impl ConstGenericParam { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::ConstGenericParam) -> Self { - let name = IdentId::maybe_from_token(db, ast.name()); - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::ConstGenericParam) -> Self { + let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); Self { name, ty } } } impl GenericArg { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericArg) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::GenericArg) -> Self { match ast.kind() { ast::GenericArgKind::Type(type_param) => { - TypeGenericArg::from_ast(db, file, type_param).into() + TypeGenericArg::lower_ast(ctxt, type_param).into() } ast::GenericArgKind::Const(const_param) => { - ConstGenericArg::from_ast(db, file, const_param).into() + ConstGenericArg::lower_ast(ctxt, const_param).into() } } } } impl TypeGenericArg { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeGenericArg) -> Self { - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeGenericArg) -> Self { + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); Self { ty } } } impl ConstGenericArg { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::ConstGenericArg) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::ConstGenericArg) -> Self { let body = if let Some(expr) = ast.expr() { - Some(Body::nameless_body_from_ast(db, file, expr)) + Some(Body::lower_ast_nameless(ctxt, expr)) } else { None } @@ -132,24 +127,27 @@ impl ConstGenericArg { } impl GenericParam { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::GenericParam) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::GenericParam) -> Self { match ast.kind() { ast::GenericParamKind::Type(type_param) => { - TypeGenericParam::from_ast(db, file, type_param).into() + TypeGenericParam::lower_ast(ctxt, type_param).into() } ast::GenericParamKind::Const(const_param) => { - ConstGenericParam::from_ast(db, file, const_param).into() + ConstGenericParam::lower_ast(ctxt, const_param).into() } } } } impl FnParam { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::FnParam) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParam) -> Self { let is_mut = ast.mut_token().is_some(); - let label = ast.label().map(|ast| FnParamLabel::from_ast(db, ast)); - let name = ast.name().map(|ast| FnParamName::from_ast(db, ast)).into(); - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + let label = ast.label().map(|ast| FnParamLabel::lower_ast(ctxt, ast)); + let name = ast + .name() + .map(|ast| FnParamName::lower_ast(ctxt, ast)) + .into(); + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); Self { is_mut, @@ -161,14 +159,14 @@ impl FnParam { } impl WherePredicate { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::WherePredicate) -> Self { - let ty = TypeId::maybe_from_ast(db, file, ast.ty()); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::WherePredicate) -> Self { + let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); let bounds = ast .bounds() .map(|bounds| { bounds .into_iter() - .map(|bound| TypeBound::from_ast(db, file, bound)) + .map(|bound| TypeBound::lower_ast(ctxt, bound)) .collect() }) .unwrap_or_default(); @@ -177,20 +175,20 @@ impl WherePredicate { } impl TypeBound { - fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::TypeBound) -> Self { - let path = ast.path().map(|ast| PathId::from_ast(db, ast)).into(); + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeBound) -> Self { + let path = ast.path().map(|ast| PathId::lower_ast(ctxt, ast)).into(); let generic_args = ast .generic_args() - .map(|args| GenericArgListId::from_ast(db, file, args)); + .map(|args| GenericArgListId::lower_ast(ctxt, args)); Self { path, generic_args } } } impl FnParamName { - fn from_ast(db: &dyn HirDb, ast: ast::FnParamName) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamName) -> Self { match ast { ast::FnParamName::Ident(name) => { - FnParamName::Ident(IdentId::from_token(db, name.into())) + FnParamName::Ident(IdentId::lower_token(ctxt, name.into())) } ast::FnParamName::SelfParam(_) => FnParamName::Self_, ast::FnParamName::Underscore(_) => FnParamName::Underscore, @@ -199,9 +197,9 @@ impl FnParamName { } impl FnParamLabel { - fn from_ast(db: &dyn HirDb, ast: ast::FnParamLabel) -> Self { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamLabel) -> Self { match ast { - ast::FnParamLabel::Ident(name) => FnParamLabel::Ident(IdentId::from_token(db, name)), + ast::FnParamLabel::Ident(name) => FnParamLabel::Ident(IdentId::lower_token(ctxt, name)), ast::FnParamLabel::Underscore(_) => FnParamLabel::Underscore, } } diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 11d48164d1..fb875f5a71 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -8,7 +8,7 @@ use crate::{ use super::body::BodyCtxt; impl Pat { - pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Pat) -> PatId { + pub(super) fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::Pat) -> PatId { let pat = match &ast.kind() { ast::PatKind::WildCard(_) => Pat::WildCard, @@ -17,45 +17,39 @@ impl Pat { ast::PatKind::Lit(lit_pat) => { let lit_kind = lit_pat .lit() - .map(|lit| LitKind::from_ast(ctxt.db, lit)) + .map(|lit| LitKind::lower_ast(ctxt.f_ctxt, lit)) .into(); Pat::Lit(lit_kind) } ast::PatKind::Tuple(tup) => { let elems = match tup.elems() { - Some(elems) => elems - .iter() - .map(|pat| Pat::push_to_body(ctxt, pat)) - .collect(), + Some(elems) => elems.iter().map(|pat| Pat::lower_ast(ctxt, pat)).collect(), None => vec![], }; Pat::Tuple(elems) } ast::PatKind::Path(path) => { - let path = PathId::maybe_from_ast(ctxt.db, path.path()); + let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path.path()); Pat::Path(path) } ast::PatKind::PathTuple(path_tup) => { - let path = PathId::maybe_from_ast(ctxt.db, path_tup.path()); + let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path_tup.path()); let elems = match path_tup.elems() { - Some(elems) => elems - .iter() - .map(|pat| Pat::push_to_body(ctxt, pat)) - .collect(), + Some(elems) => elems.iter().map(|pat| Pat::lower_ast(ctxt, pat)).collect(), None => vec![], }; Pat::PathTuple(path, elems) } ast::PatKind::Record(record) => { - let path = PathId::maybe_from_ast(ctxt.db, record.path()); + let path = PathId::maybe_lower_ast(ctxt.f_ctxt, record.path()); let fields = match record.fields() { Some(fields) => fields .iter() - .map(|f| RecordPatField::from_ast(ctxt, &f)) + .map(|f| RecordPatField::lower_ast(ctxt, &f)) .collect(), None => vec![], }; @@ -63,8 +57,8 @@ impl Pat { } ast::PatKind::Or(or) => { - let lhs = Self::push_to_body_opt(ctxt, or.lhs()); - let rhs = Self::push_to_body_opt(ctxt, or.rhs()); + let lhs = Self::lower_ast_opt(ctxt, or.lhs()); + let rhs = Self::lower_ast_opt(ctxt, or.rhs()); Pat::Or(lhs, rhs) } }; @@ -72,9 +66,9 @@ impl Pat { ctxt.push_pat(pat, HirOriginKind::raw(&ast)) } - pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_>, ast: Option) -> PatId { + pub(super) fn lower_ast_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> PatId { if let Some(ast) = ast { - Pat::push_to_body(ctxt, ast) + Pat::lower_ast(ctxt, ast) } else { ctxt.push_missing_pat() } @@ -82,11 +76,11 @@ impl Pat { } impl RecordPatField { - fn from_ast(ctxt: &mut BodyCtxt<'_>, ast: &ast::RecordPatField) -> RecordPatField { - let label = IdentId::maybe_from_token(ctxt.db, ast.name()); + fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: &ast::RecordPatField) -> RecordPatField { + let label = IdentId::maybe_lower_token(ctxt.f_ctxt, ast.name()); let pat = ast .pat() - .map(|pat| Pat::push_to_body(ctxt, pat)) + .map(|pat| Pat::lower_ast(ctxt, pat)) .unwrap_or_else(|| ctxt.push_missing_pat()); RecordPatField { label, pat } } diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 3502258225..4341a6ce6d 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,12 +1,11 @@ use parser::{ast, SyntaxToken}; -use crate::{ - hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}, - HirDb, -}; +use crate::hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}; + +use super::FileLowerCtxt; impl PathId { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::Path) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Path) -> Self { let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = if seg.is_self() { @@ -15,7 +14,7 @@ impl PathId { Some(PathSegment::SelfTy) } else if let Some(ident) = seg.ident() { Some(PathSegment::Ident(IdentId::new( - db, + ctxt.db, ident.text().to_string(), ))) } else { @@ -25,16 +24,19 @@ impl PathId { segments.push(segment); } - Self::new(db, segments) + Self::new(ctxt.db, segments) } - pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: Option) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, ast)).into() + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + ast: Option, + ) -> MaybeInvalid { + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } - pub(super) fn from_ident(db: &dyn HirDb, ast: SyntaxToken) -> Self { - let ident_id = IdentId::new(db, ast.text().to_string()); + pub(super) fn from_ident(ctxt: &mut FileLowerCtxt<'_>, ast: SyntaxToken) -> Self { + let ident_id = IdentId::new(ctxt.db, ast.text().to_string()); let seg = vec![MaybeInvalid::Valid(PathSegment::Ident(ident_id))]; - Self::new(db, seg) + Self::new(ctxt.db, seg) } } diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index d0d884fa05..02552be0e0 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -8,27 +8,25 @@ use crate::{ use super::body::BodyCtxt; impl Stmt { - pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_>, ast: ast::Stmt) -> StmtId { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::Stmt) -> StmtId { let (stmt, origin_kind) = match ast.kind() { ast::StmtKind::Let(let_) => { - let pat = Pat::push_to_body_opt(ctxt, let_.pat()); + let pat = Pat::lower_ast_opt(ctxt, let_.pat()); let ty = let_ .type_annotation() - .map(|ty| TypeId::from_ast(ctxt.db, ctxt.file, ty)); - let init = let_ - .initializer() - .map(|init| Expr::push_to_body(ctxt, init)); + .map(|ty| TypeId::lower_ast(ctxt.f_ctxt, ty)); + let init = let_.initializer().map(|init| Expr::lower_ast(ctxt, init)); (Stmt::Let(pat, ty, init), HirOriginKind::raw(&ast)) } ast::StmtKind::Assign(assign) => { let lhs = assign .pat() - .map(|pat| Pat::push_to_body(ctxt, pat)) + .map(|pat| Pat::lower_ast(ctxt, pat)) .unwrap_or_else(|| ctxt.push_missing_pat()); let rhs = assign .expr() - .map(|expr| Expr::push_to_body(ctxt, expr)) + .map(|expr| Expr::lower_ast(ctxt, expr)) .unwrap_or_else(|| ctxt.push_missing_expr()); (Stmt::Assign(lhs, rhs), HirOriginKind::raw(&ast)) } @@ -36,7 +34,7 @@ impl Stmt { ast::StmtKind::AugAssign(aug_assign) => desugar_aug_assign(ctxt, &aug_assign), ast::StmtKind::For(for_) => { - let bind = Pat::push_to_body_opt(ctxt, for_.pat()); + let bind = Pat::lower_ast_opt(ctxt, for_.pat()); let iter = Expr::push_to_body_opt(ctxt, for_.iterable()); let body = Expr::push_to_body_opt( ctxt, @@ -83,13 +81,13 @@ impl Stmt { } fn desugar_aug_assign( - ctxt: &mut BodyCtxt<'_>, + ctxt: &mut BodyCtxt<'_, '_>, ast: &ast::AugAssignStmt, ) -> (Stmt, HirOriginKind) { let lhs_ident = ast.ident(); let path = lhs_ident .clone() - .map(|ident| PathId::from_ident(ctxt.db, ident)); + .map(|ident| PathId::from_ident(ctxt.f_ctxt, ident)); let lhs_origin: AugAssignDesugared = lhs_ident.clone().unwrap().text_range().into(); let lhs_pat = if let Some(path) = path { @@ -112,10 +110,10 @@ fn desugar_aug_assign( let binop_rhs = ast .expr() - .map(|expr| Expr::push_to_body(ctxt, expr)) + .map(|expr| Expr::lower_ast(ctxt, expr)) .unwrap_or_else(|| ctxt.push_missing_expr()); - let binop = ast.op().map(|op| ArithBinOp::from_ast(op).into()).into(); + let binop = ast.op().map(|op| ArithBinOp::lower_ast(op).into()).into(); let expr = ctxt.push_expr( Expr::Bin(binop_lhs, binop_rhs, binop), HirOriginKind::desugared(AugAssignDesugared::stmt(ast)), diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 99e12e3be0..6f62dda0aa 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,22 +1,20 @@ -use common::InputFile; use parser::ast::{self, prelude::*}; -use crate::{ - hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}, - HirDb, -}; +use crate::hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}; + +use super::FileLowerCtxt; impl TypeId { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::Type) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Type) -> Self { let kind = match ast.kind() { ast::TypeKind::Ptr(ty) => { - let inner = Self::maybe_from_ast(db, file, ty.inner()); + let inner = Self::maybe_lower_ast(ctxt, ty.inner()); TypeKind::Ptr(inner) } ast::TypeKind::Path(ty) => { - let path = PathId::maybe_from_ast(db, ty.path()).into(); - let generic_args = GenericArgListId::from_ast_opt(db, file, ty.generic_args()); + let path = PathId::maybe_lower_ast(ctxt, ty.path()).into(); + let generic_args = GenericArgListId::lower_ast_opt(ctxt, ty.generic_args()); TypeKind::Path(path, generic_args.into()) } @@ -25,45 +23,43 @@ impl TypeId { ast::TypeKind::Tuple(ty) => { let mut elem_tys = Vec::new(); for elem in ty { - elem_tys.push(Some(TypeId::from_ast(db, file, elem)).into()); + elem_tys.push(Some(TypeId::lower_ast(ctxt, elem)).into()); } TypeKind::Tuple(elem_tys) } ast::TypeKind::Array(ty) => { - let elem_ty = Self::maybe_from_ast(db, file, ty.elem_ty()); + let elem_ty = Self::maybe_lower_ast(ctxt, ty.elem_ty()); let body = ty .len() - .map(|ast| Body::nameless_body_from_ast(db, file, ast)) + .map(|ast| Body::lower_ast_nameless(ctxt, ast)) .into(); TypeKind::Array(elem_ty, body) } }; - TypeId::new(db, kind) + TypeId::new(ctxt.db, kind) } - pub(crate) fn maybe_from_ast( - db: &dyn HirDb, - file: InputFile, + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, ast: Option, ) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, file, ast)).into() + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } impl TraitRef { - pub(crate) fn from_ast(db: &dyn HirDb, file: InputFile, ast: ast::PathType) -> Self { - let path = PathId::maybe_from_ast(db, ast.path()).into(); - let generic_args = GenericArgListId::from_ast_opt(db, file, ast.generic_args()); + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::PathType) -> Self { + let path = PathId::maybe_lower_ast(ctxt, ast.path()).into(); + let generic_args = GenericArgListId::lower_ast_opt(ctxt, ast.generic_args()); Self { path, generic_args } } - pub(crate) fn maybe_from_ast( - db: &dyn HirDb, - file: InputFile, + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, ast: Option, ) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, file, ast)).into() + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 6b8602ce66..9de55ab288 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -1,15 +1,14 @@ use parser::ast; -use crate::{ - hir_def::{use_tree::*, IdentId, MaybeInvalid}, - HirDb, -}; +use crate::hir_def::{use_tree::*, IdentId, MaybeInvalid}; + +use super::FileLowerCtxt; impl UseTreeId { - pub(crate) fn from_ast(db: &dyn HirDb, ast: ast::UseTree) -> Self { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::UseTree) -> Self { let path = if let Some(path) = ast.path() { path.into_iter() - .map(|ast| UsePathSegment::maybe_from_ast(db, ast)) + .map(|ast| UsePathSegment::maybe_lower_ast(ctxt, ast)) .collect() } else { vec![] @@ -17,27 +16,35 @@ impl UseTreeId { let subtree = if let Some(children) = ast.children() { children .into_iter() - .map(|ast| UseTreeId::from_ast(db, ast)) + .map(|ast| UseTreeId::lower_ast(ctxt, ast)) .collect() } else { vec![] }; - let alias = ast.alias().map(|ast| UseTreeAlias::maybe_from_ast(db, ast)); + let alias = ast + .alias() + .map(|ast| UseTreeAlias::maybe_lower_ast(ctxt, ast)); - Self::new(db, path, subtree, alias) + Self::new(ctxt.db, path, subtree, alias) } - pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: Option) -> MaybeInvalid { - ast.map(|ast| Self::from_ast(db, ast)).into() + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + ast: Option, + ) -> MaybeInvalid { + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } impl UsePathSegment { - pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: ast::UsePathSegment) -> MaybeInvalid { + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + ast: ast::UsePathSegment, + ) -> MaybeInvalid { ast.kind() .map(|kind| match kind { ast::UsePathSegmentKind::Ident(ident) => { - Self::Ident(IdentId::from_token(db, ident)) + Self::Ident(IdentId::lower_token(ctxt, ident)) } ast::UsePathSegmentKind::SelfPath(_) => Self::SelfPath, ast::UsePathSegmentKind::Glob(_) => Self::Glob, @@ -47,9 +54,12 @@ impl UsePathSegment { } impl UseTreeAlias { - pub(crate) fn maybe_from_ast(db: &dyn HirDb, ast: ast::UseTreeAlias) -> MaybeInvalid { + pub(super) fn maybe_lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + ast: ast::UseTreeAlias, + ) -> MaybeInvalid { if let Some(ident) = ast.ident() { - Some(Self::Ident(IdentId::from_token(db, ident))) + Some(Self::Ident(IdentId::lower_token(ctxt, ident))) } else if ast.underscore().is_some() { Some(Self::Underscore) } else { diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index e8b5e58ecb..faba94b2ee 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -119,8 +119,4 @@ impl AugAssignDesugared { pub(crate) fn stmt(ast: &ast::AugAssignStmt) -> Self { Self::Stmt(AstPtr::new(ast)) } - - pub(crate) fn rhs(ast: &ast::Expr) -> Self { - Self::Rhs(AstPtr::new(ast)) - } } From c255b973b5a0812a6ab80629976ec2f9f4a44dd8 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 24 Mar 2023 00:29:25 +0100 Subject: [PATCH 111/678] Make clippy happy --- crates/analyzer/src/db/queries/module.rs | 43 +++++++++---------- crates/codegen/src/yul/runtime/data.rs | 12 +----- crates/common2/src/input.rs | 2 +- crates/hir/src/hir_def/body.rs | 7 ++- crates/hir/src/hir_def/module_tree.rs | 6 ++- crates/hir/src/lower/body.rs | 2 +- crates/hir/src/lower/expr.rs | 7 ++- crates/hir/src/lower/mod.rs | 6 +-- crates/hir/src/lower/params.rs | 14 +++--- crates/hir/src/lower/stmt.rs | 10 ++--- crates/hir/src/lower/types.rs | 6 +-- .../src/lower/pattern_match/decision_tree.rs | 1 - crates/parser2/src/ast/expr.rs | 8 ++-- crates/tests/src/features.rs | 7 +-- 14 files changed, 61 insertions(+), 70 deletions(-) diff --git a/crates/analyzer/src/db/queries/module.rs b/crates/analyzer/src/db/queries/module.rs index a4a10e15ad..a7947cba33 100644 --- a/crates/analyzer/src/db/queries/module.rs +++ b/crates/analyzer/src/db/queries/module.rs @@ -1,20 +1,25 @@ -use crate::context::{Analysis, AnalyzerContext, Constant, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, ConstEvalError, TypeError}; -use crate::namespace::items::{ - Contract, ContractId, Enum, Function, Impl, ImplId, Item, ModuleConstant, ModuleConstantId, - ModuleId, ModuleSource, Struct, StructId, Trait, TraitId, TypeAlias, TypeDef, +use crate::{ + context::{Analysis, AnalyzerContext, Constant, NamedThing}, + display::Displayable, + errors::{self, ConstEvalError, TypeError}, + namespace::{ + items::{ + Contract, ContractId, Enum, Function, Impl, ImplId, Item, ModuleConstant, + ModuleConstantId, ModuleId, ModuleSource, Struct, StructId, Trait, TraitId, TypeAlias, + TypeDef, + }, + scopes::ItemScope, + types::{self, TypeId}, + }, + traversal::{const_expr, expressions, types::type_desc}, + AnalyzerDb, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, TypeId}; -use crate::traversal::{const_expr, expressions, types::type_desc}; -use crate::AnalyzerDb; -use fe_common::diagnostics::Label; -use fe_common::files::Utf8Path; -use fe_common::Span; +use fe_common::{diagnostics::Label, files::Utf8Path, Span}; use fe_parser::{ast, node::Node}; -use indexmap::indexmap; -use indexmap::map::{Entry, IndexMap}; +use indexmap::{ + indexmap, + map::{Entry, IndexMap}, +}; use smol_str::SmolStr; use std::rc::Rc; @@ -293,13 +298,7 @@ pub fn module_structs(db: &dyn AnalyzerDb, module: ModuleId) -> Rc<[StructId]> { module .all_items(db) .iter() - .chain( - module - .used_items(db) - .values() - .into_iter() - .map(|(_, item)| item), - ) + .chain(module.used_items(db).values().map(|(_, item)| item)) .filter_map(|item| match item { Item::Type(TypeDef::Struct(id)) => Some(*id), _ => None, diff --git a/crates/codegen/src/yul/runtime/data.rs b/crates/codegen/src/yul/runtime/data.rs index d04fc7b0f0..02509d7129 100644 --- a/crates/codegen/src/yul/runtime/data.rs +++ b/crates/codegen/src/yul/runtime/data.rs @@ -238,11 +238,7 @@ pub(super) fn make_aggregate_init( let ptr = YulVariable::new("ptr"); let field_num = inner_ty.aggregate_field_num(db.upcast()); - let iter_field_args = || { - (0..field_num) - .into_iter() - .map(|i| YulVariable::new(format! {"arg{i}"})) - }; + let iter_field_args = || (0..field_num).map(|i| YulVariable::new(format! {"arg{i}"})); let mut body = vec![]; for (idx, field_arg) in iter_field_args().enumerate() { @@ -303,11 +299,7 @@ pub(super) fn make_enum_init( let ptr = YulVariable::new("ptr"); let disc = YulVariable::new("disc"); let disc_ty = arg_tys[0]; - let enum_data = || { - (0..arg_tys.len() - 1) - .into_iter() - .map(|i| YulVariable::new(format! {"arg{i}"})) - }; + let enum_data = || (0..arg_tys.len() - 1).map(|i| YulVariable::new(format! {"arg{i}"})); let tuple_def = TupleDef { items: arg_tys diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs index bdf430260f..fea7c03e63 100644 --- a/crates/common2/src/input.rs +++ b/crates/common2/src/input.rs @@ -46,7 +46,7 @@ pub struct InputFile { impl InputFile { pub fn abs_path(&self, db: &dyn InputDb) -> Utf8PathBuf { - self.ingot(db).path(db).join(&self.path(db)) + self.ingot(db).path(db).join(self.path(db)) } } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 7cdd7f0207..bff572abce 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -1,5 +1,10 @@ +// This is necessary because `salsa::tracked` structs generates a +// constructor +// that may take many arguments depending on the number of fields in the struct. +#![allow(clippy::too_many_arguments)] + use cranelift_entity::{PrimaryMap, SecondaryMap}; -use parser::ast::{self}; +use parser::ast; use crate::span::HirOrigin; diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 95ba4433a8..63e6d0f84e 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -18,7 +18,7 @@ use super::IdentId; /// /// /// Example: -/// ``` +/// ```text /// ingot/ /// ├─ main.fe /// ├─ mod1.fe @@ -30,8 +30,10 @@ use super::IdentId; /// ├─ mod3 /// │ ├─ baz.fe /// ``` +/// /// The resulting tree would be like below. -/// ``` +/// +/// ```text /// +------+ /// *---- | main |----* /// | +------+ | +------+ diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index c79cc4777a..783c68caf5 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -93,7 +93,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, bid: TrackedBodyId) -> Self { f_ctxt.enter_scope(); Self { - f_ctxt: f_ctxt, + f_ctxt, bid, stmts: BodyNodeMap::new(), exprs: BodyNodeMap::new(), diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index cf95ad1335..20cee729ba 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -31,13 +31,13 @@ impl Expr { ast::ExprKind::Bin(bin) => { let lhs = Self::push_to_body_opt(ctxt, bin.lhs()); let rhs = Self::push_to_body_opt(ctxt, bin.rhs()); - let op = bin.op().map(|op| BinOp::lower_ast(op)).into(); + let op = bin.op().map(BinOp::lower_ast).into(); Self::Bin(lhs, rhs, op) } ast::ExprKind::Un(un) => { let expr = Self::push_to_body_opt(ctxt, un.expr()); - let op = un.op().map(|op| UnOp::lower_ast(op)).into(); + let op = un.op().map(UnOp::lower_ast).into(); Self::Un(expr, op) } @@ -137,8 +137,7 @@ impl Expr { let then = Expr::push_to_body_opt( ctxt, if_.then() - .map(|body| ast::Expr::cast(body.syntax().clone())) - .flatten(), + .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); let else_ = if_.else_().map(|ast| Self::lower_ast(ctxt, ast)); Self::If(cond, then, else_) diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index c8dad0538b..3755875aed 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -75,7 +75,7 @@ impl<'db> FileLowerCtxt<'db> { let item_scope = self.scope_stack.pop().unwrap(); for item in &item_scope { - self.item_tree.get_mut(&item).unwrap().parent = Some(item_kind); + self.item_tree.get_mut(item).unwrap().parent = Some(item_kind); } self.item_tree.insert( @@ -126,14 +126,14 @@ impl IntegerId { let text = ast.token().text(); // Parser ensures that the text is valid pair with a radix and a number. if text.len() < 2 { - return Self::new(ctxt.db, BigUint::from_str_radix(&text, 10).unwrap()); + return Self::new(ctxt.db, BigUint::from_str_radix(text, 10).unwrap()); } let int = match &text[0..2] { "0x" | "0X" => BigUint::from_str_radix(&text[2..], 16).unwrap(), "0o" | "0O" => BigUint::from_str_radix(&text[2..], 8).unwrap(), "0b" | "0B" => BigUint::from_str_radix(&text[2..], 2).unwrap(), - _ => BigUint::from_str_radix(&text, 10).unwrap(), + _ => BigUint::from_str_radix(text, 10).unwrap(), }; Self::new(ctxt.db, int) diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 8daa562730..8fa89f8442 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -115,12 +115,10 @@ impl TypeGenericArg { impl ConstGenericArg { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::ConstGenericArg) -> Self { - let body = if let Some(expr) = ast.expr() { - Some(Body::lower_ast_nameless(ctxt, expr)) - } else { - None - } - .into(); + let body = ast + .expr() + .map(|expr| Body::lower_ast_nameless(ctxt, expr)) + .into(); Self { body } } @@ -187,9 +185,7 @@ impl TypeBound { impl FnParamName { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamName) -> Self { match ast { - ast::FnParamName::Ident(name) => { - FnParamName::Ident(IdentId::lower_token(ctxt, name.into())) - } + ast::FnParamName::Ident(name) => FnParamName::Ident(IdentId::lower_token(ctxt, name)), ast::FnParamName::SelfParam(_) => FnParamName::Self_, ast::FnParamName::Underscore(_) => FnParamName::Underscore, } diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index 02552be0e0..e12a99ee45 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -39,8 +39,7 @@ impl Stmt { let body = Expr::push_to_body_opt( ctxt, for_.body() - .map(|body| ast::Expr::cast(body.syntax().clone())) - .flatten(), + .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); (Stmt::For(bind, iter, body), HirOriginKind::raw(&ast)) @@ -52,8 +51,7 @@ impl Stmt { ctxt, while_ .body() - .map(|body| ast::Expr::cast(body.syntax().clone())) - .flatten(), + .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); (Stmt::While(cond, body), HirOriginKind::raw(&ast)) @@ -89,7 +87,7 @@ fn desugar_aug_assign( .clone() .map(|ident| PathId::from_ident(ctxt.f_ctxt, ident)); - let lhs_origin: AugAssignDesugared = lhs_ident.clone().unwrap().text_range().into(); + let lhs_origin: AugAssignDesugared = lhs_ident.unwrap().text_range().into(); let lhs_pat = if let Some(path) = path { ctxt.push_pat( Pat::Path(Some(path).into()), @@ -102,7 +100,7 @@ fn desugar_aug_assign( let binop_lhs = if let Some(path) = path { ctxt.push_expr( Expr::Path(Some(path).into()), - HirOriginKind::desugared(lhs_origin.clone()), + HirOriginKind::desugared(lhs_origin), ) } else { ctxt.push_missing_expr() diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 6f62dda0aa..a4d0bdd304 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -13,9 +13,9 @@ impl TypeId { } ast::TypeKind::Path(ty) => { - let path = PathId::maybe_lower_ast(ctxt, ty.path()).into(); + let path = PathId::maybe_lower_ast(ctxt, ty.path()); let generic_args = GenericArgListId::lower_ast_opt(ctxt, ty.generic_args()); - TypeKind::Path(path, generic_args.into()) + TypeKind::Path(path, generic_args) } ast::TypeKind::SelfType(_) => TypeKind::SelfType, @@ -51,7 +51,7 @@ impl TypeId { impl TraitRef { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::PathType) -> Self { - let path = PathId::maybe_lower_ast(ctxt, ast.path()).into(); + let path = PathId::maybe_lower_ast(ctxt, ast.path()); let generic_args = GenericArgListId::lower_ast_opt(ctxt, ast.generic_args()); Self { path, generic_args } } diff --git a/crates/mir/src/lower/pattern_match/decision_tree.rs b/crates/mir/src/lower/pattern_match/decision_tree.rs index d7cb8805c9..af909fb554 100644 --- a/crates/mir/src/lower/pattern_match/decision_tree.rs +++ b/crates/mir/src/lower/pattern_match/decision_tree.rs @@ -177,7 +177,6 @@ impl Occurrence { fn phi_specialize(&self, db: &dyn AnalyzerDb, ctor: ConstructorKind) -> Vec { let arity = ctor.arity(db); (0..arity) - .into_iter() .map(|i| { let mut inner = self.0.clone(); inner.push(i); diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 7fdeb7bf59..cacdc4d6c2 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -228,7 +228,7 @@ ast_node! { impl TupleExpr { /// Returns the expressions in the tuple. pub fn elems(&self) -> impl Iterator> { - self.syntax().children().map(|node| Expr::cast(node)) + self.syntax().children().map(Expr::cast) } } @@ -241,7 +241,7 @@ impl ArrayExpr { /// Returns the expressions in the array. /// Returns the expressions in the tuple. pub fn elems(&self) -> impl Iterator> { - self.syntax().children().map(|node| Expr::cast(node)) + self.syntax().children().map(Expr::cast) } } @@ -741,7 +741,7 @@ mod tests { fn tuple_expr() { let tuple_expr: TupleExpr = parse_expr("(1, 2, 3)"); - for (i, expr) in tuple_expr.elems().into_iter().flatten().enumerate() { + for (i, expr) in tuple_expr.elems().flatten().enumerate() { match i { 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), @@ -756,7 +756,7 @@ mod tests { fn array_expr() { let array_expr: ArrayExpr = parse_expr("[1, 2, 3]"); - for (i, expr) in array_expr.elems().into_iter().flatten().enumerate() { + for (i, expr) in array_expr.elems().flatten().enumerate() { match i { 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), diff --git a/crates/tests/src/features.rs b/crates/tests/src/features.rs index 35d4b585d6..7b1d367578 100644 --- a/crates/tests/src/features.rs +++ b/crates/tests/src/features.rs @@ -9,8 +9,9 @@ use rstest::rstest; use std::collections::BTreeMap; use fe_common::utils::keccak; -use fe_compiler_test_utils::*; -use fe_compiler_test_utils::{self as test_utils}; +use fe_compiler_test_utils::{ + *, {self as test_utils}, +}; const SOME_ADDRESS: &str = "2012301230123012301230123012301230123002"; @@ -1076,7 +1077,7 @@ fn sized_vals_in_sto() { let harness = deploy_contract(&mut executor, "sized_vals_in_sto.fe", "Foo", &[]); let num = uint_token(68); - let nums = uint_array_token(&(0..42).into_iter().collect::>()); + let nums = uint_array_token(&(0..42).collect::>()); let string = string_token("there are 26 protons in fe"); harness.test_function(&mut executor, "write_num", &[num.clone()], None); From 0ec662c40c51ae9eb3717e130c9090c8786cea83 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 24 Mar 2023 12:26:19 +0100 Subject: [PATCH 112/678] Rename `MaybeInvalid` to `Partial` --- crates/hir/src/hir_def/attr.rs | 8 ++-- crates/hir/src/hir_def/body.rs | 8 ++-- crates/hir/src/hir_def/expr.rs | 22 +++++----- crates/hir/src/hir_def/item.rs | 65 +++++++++++++++--------------- crates/hir/src/hir_def/mod.rs | 29 ++++++++----- crates/hir/src/hir_def/params.rs | 20 ++++----- crates/hir/src/hir_def/pat.rs | 12 +++--- crates/hir/src/hir_def/path.rs | 4 +- crates/hir/src/hir_def/types.rs | 12 +++--- crates/hir/src/hir_def/use_tree.rs | 6 +-- crates/hir/src/lower/attr.rs | 6 +-- crates/hir/src/lower/body.rs | 20 ++++----- crates/hir/src/lower/expr.rs | 6 +-- crates/hir/src/lower/item.rs | 34 ++++++++-------- crates/hir/src/lower/mod.rs | 8 ++-- crates/hir/src/lower/params.rs | 12 +++--- crates/hir/src/lower/pat.rs | 8 ++-- crates/hir/src/lower/path.rs | 8 ++-- crates/hir/src/lower/types.rs | 18 ++++----- crates/hir/src/lower/use_tree.rs | 18 ++++----- 20 files changed, 166 insertions(+), 158 deletions(-) diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs index a9a84df099..ff71ec9752 100644 --- a/crates/hir/src/hir_def/attr.rs +++ b/crates/hir/src/hir_def/attr.rs @@ -1,4 +1,4 @@ -use super::{IdentId, MaybeInvalid, StringId}; +use super::{IdentId, Partial, StringId}; #[salsa::interned] pub struct AttrListId { @@ -14,7 +14,7 @@ pub enum Attr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct NormalAttr { - pub name: MaybeInvalid, + pub name: Partial, pub args: Vec, } @@ -26,6 +26,6 @@ pub struct DocCommentAttr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AttrArg { - pub key: MaybeInvalid, - pub value: MaybeInvalid, + pub key: Partial, + pub value: Partial, } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index bff572abce..a6823f76d1 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -8,7 +8,7 @@ use parser::ast; use crate::span::HirOrigin; -use super::{Expr, ExprId, MaybeInvalid, Pat, PatId, Stmt, StmtId, TrackedItemId}; +use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TrackedItemId}; #[salsa::tracked] pub struct Body { @@ -16,11 +16,11 @@ pub struct Body { id: TrackedBodyId, #[return_ref] - pub stmts: BodyNodeMap>, + pub stmts: BodyNodeMap>, #[return_ref] - pub exprs: BodyNodeMap>, + pub exprs: BodyNodeMap>, #[return_ref] - pub pats: BodyNodeMap>, + pub pats: BodyNodeMap>, #[return_ref] pub(crate) stmt_source_map: BodySourceMap, diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 96d95234bc..8cb94de302 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -1,6 +1,6 @@ use cranelift_entity::entity_impl; -use super::{Body, IdentId, IntegerId, LitKind, MaybeInvalid, PatId, PathId, StmtId}; +use super::{Body, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Expr { @@ -10,26 +10,26 @@ pub enum Expr { /// /// **NOTE:** The `AugAssign` statement is desugared to a `Assign` statement /// and a `BinOp`. - Bin(ExprId, ExprId, MaybeInvalid), - Un(ExprId, MaybeInvalid), + Bin(ExprId, ExprId, Partial), + Un(ExprId, Partial), /// The first `ExprId` is the callee, the second is the arguments. Call(ExprId, Vec), /// The first `ExprId` is the method receiver, the second is the method /// name, the third is the arguments. - MethodCall(ExprId, MaybeInvalid, Vec), - Path(MaybeInvalid), + MethodCall(ExprId, Partial, Vec), + Path(Partial), /// The record construction expression. /// The fist `PathId` is the record type, the second is the record fields. - RecordInit(MaybeInvalid, Vec), - Field(ExprId, MaybeInvalid), + RecordInit(Partial, Vec), + Field(ExprId, Partial), Tuple(Vec), /// The first `ExprId` is the indexed expression, the second is the index. Index(ExprId, ExprId), Array(Vec), - /// The size of the rep should be the body instead of expression, becuase it - /// should be resolved as a contatnt expressison. - ArrayRep(ExprId, MaybeInvalid), + /// The size of the rep should be the body instead of expression, because it + /// should be resolved as a constant expression. + ArrayRep(ExprId, Partial), /// The first `ExprId` is the condition, the second is the then branch, the /// third is the else branch. @@ -37,7 +37,7 @@ pub enum Expr { If(ExprId, ExprId, Option), /// The first `ExprId` is the scrutinee, the second is the arms. - Match(ExprId, MaybeInvalid>), + Match(ExprId, Partial>), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 117b6b1606..ea3fef2d53 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -8,8 +8,7 @@ use parser::ast; use crate::{hir_def::TraitRef, span::HirOrigin}; use super::{ - AttrListId, Body, FnParamListId, GenericParamListId, IdentId, MaybeInvalid, TypeId, - WhereClauseId, + AttrListId, Body, FnParamListId, GenericParamListId, IdentId, Partial, TypeId, WhereClauseId, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, PartialOrd, Ord)] @@ -44,7 +43,7 @@ pub struct Mod { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, @@ -56,11 +55,11 @@ pub struct Fn { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub params: MaybeInvalid, + pub params: Partial, pub ret_ty: Option, pub modifier: ItemModifier, pub body: Option, @@ -73,9 +72,9 @@ pub struct ExternFn { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, - pub params: MaybeInvalid, + pub params: Partial, pub ret_ty: Option, pub modifier: ItemModifier, @@ -87,7 +86,7 @@ pub struct Struct { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -102,7 +101,7 @@ pub struct Contract { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, pub fields: RecordFieldListId, @@ -115,7 +114,7 @@ pub struct Enum { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, @@ -130,12 +129,12 @@ pub struct TypeAlias { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub ty: MaybeInvalid, + pub ty: Partial, pub(crate) origin: HirOrigin, } @@ -145,7 +144,7 @@ pub struct Impl { #[id] id: TrackedItemId, - pub ty: super::MaybeInvalid, + pub ty: super::Partial, pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, @@ -157,7 +156,7 @@ pub struct Trait { #[id] id: TrackedItemId, - pub name: MaybeInvalid, + pub name: Partial, pub attributes: AttrListId, pub is_pub: bool, @@ -171,8 +170,8 @@ pub struct ImplTrait { #[id] id: TrackedItemId, - pub trait_ref: MaybeInvalid, - pub ty: MaybeInvalid, + pub trait_ref: Partial, + pub ty: Partial, pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, @@ -184,8 +183,8 @@ pub struct Const { #[id] id: TrackedItemId, - pub name: MaybeInvalid, - pub body: MaybeInvalid, + pub name: Partial, + pub body: Partial, pub(crate) origin: HirOrigin, } @@ -194,7 +193,7 @@ pub struct Use { #[id] id: TrackedItemId, - pub tree: MaybeInvalid, + pub tree: Partial, pub(crate) origin: HirOrigin, } @@ -223,8 +222,8 @@ pub struct RecordFieldListId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordField { - pub name: MaybeInvalid, - pub ty: MaybeInvalid, + pub name: Partial, + pub ty: Partial, pub is_pub: bool, } @@ -236,7 +235,7 @@ pub struct EnumVariantListId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct EnumVariant { - pub name: MaybeInvalid, + pub name: Partial, pub ty: Option, } @@ -253,17 +252,17 @@ pub type ExternItemListId = ImplItemListId; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedItemId { TopLevelMod(IdentId), - Mod(MaybeInvalid), - Fn(MaybeInvalid), - Struct(MaybeInvalid), - Contract(MaybeInvalid), - Enum(MaybeInvalid), - TypeAlias(MaybeInvalid), - Impl(MaybeInvalid), - Trait(MaybeInvalid), - ImplTrait(MaybeInvalid, MaybeInvalid), - Const(MaybeInvalid), - Use(MaybeInvalid), + Mod(Partial), + Fn(Partial), + Struct(Partial), + Contract(Partial), + Enum(Partial), + TypeAlias(Partial), + Impl(Partial), + Trait(Partial), + ImplTrait(Partial, Partial), + Const(Partial), + Use(Partial), Extern, Joined(Box, Box), } diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 6dfcffa01c..b69f5ae8c5 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -59,26 +59,35 @@ pub enum LitKind { Bool(bool), } -/// This enum is used to represent a type that may be invalid in terms of the -/// syntax. +/// `Partial is a type that explicitly indicates the possibility that an HIR +/// node cannot be generated due to syntax errors in the source file. +/// +/// If a node is `Partial::Absent`, it means that the corresponding AST either +/// does not exist or is erroneous. When a `Partial::Absent` is generated, the +/// relevant error is always generated by the parser, so in Analysis phases, it +/// can often be ignored. +/// +/// This type is clearly distinguished from `Option`. The +/// `Option` type is used to hold syntactically optional nodes, while +/// `Partial` means that a syntactically required element may be missing. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum MaybeInvalid { - Valid(T), - Invalid, +pub enum Partial { + Present(T), + Absent, } -impl Default for MaybeInvalid { +impl Default for Partial { fn default() -> Self { - Self::Invalid + Self::Absent } } -impl From> for MaybeInvalid { +impl From> for Partial { fn from(value: Option) -> Self { if let Some(value) = value { - Self::Valid(value) + Self::Present(value) } else { - Self::Invalid + Self::Absent } } } diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 2c6f0d4192..91ee3f36a0 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,6 +1,6 @@ use crate::hir_def::TypeId; -use super::{Body, IdentId, MaybeInvalid, PathId}; +use super::{Body, IdentId, Partial, PathId}; #[salsa::interned] pub struct GenericArgListId { @@ -34,14 +34,14 @@ pub enum GenericParam { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericParam { - pub name: MaybeInvalid, + pub name: Partial, pub bounds: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericParam { - pub name: MaybeInvalid, - pub ty: MaybeInvalid, + pub name: Partial, + pub ty: Partial, } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -52,25 +52,25 @@ pub enum GenericArg { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeGenericArg { - pub ty: MaybeInvalid, + pub ty: Partial, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ConstGenericArg { - pub body: MaybeInvalid, + pub body: Partial, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct FnParam { pub is_mut: bool, pub label: Option, - pub name: MaybeInvalid, - pub ty: MaybeInvalid, + pub name: Partial, + pub ty: Partial, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WherePredicate { - pub ty: MaybeInvalid, + pub ty: Partial, pub bounds: Vec, } @@ -91,7 +91,7 @@ pub enum FnParamName { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeBound { /// The path to the trait. - pub path: MaybeInvalid, + pub path: Partial, /// The type arguments of the trait. pub generic_args: Option, } diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index 8174859f47..18c5669a51 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -1,16 +1,16 @@ use cranelift_entity::entity_impl; -use super::{IdentId, LitKind, MaybeInvalid, PathId}; +use super::{IdentId, LitKind, Partial, PathId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Pat { WildCard, Rest, - Lit(MaybeInvalid), + Lit(Partial), Tuple(Vec), - Path(MaybeInvalid), - PathTuple(MaybeInvalid, Vec), - Record(MaybeInvalid, Vec), + Path(Partial), + PathTuple(Partial, Vec), + Record(Partial, Vec), Or(PatId, PatId), } @@ -20,6 +20,6 @@ entity_impl!(PatId); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordPatField { - pub label: MaybeInvalid, + pub label: Partial, pub pat: PatId, } diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index 02af42e32e..657f318903 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -1,10 +1,10 @@ -use crate::hir_def::MaybeInvalid; +use crate::hir_def::Partial; use super::IdentId; #[salsa::interned] pub struct PathId { - segments: Vec>, + segments: Vec>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index 1987baf5a1..c4d59cbf3a 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -1,4 +1,4 @@ -use super::{Body, GenericArgListId, MaybeInvalid, PathId}; +use super::{Body, GenericArgListId, Partial, PathId}; #[salsa::interned] pub struct TypeId { @@ -7,19 +7,19 @@ pub struct TypeId { #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeKind { - Ptr(MaybeInvalid), + Ptr(Partial), /// The `PathId` is the path to the type, the `Option` is the generic /// arguments. - Path(MaybeInvalid, GenericArgListId), + Path(Partial, GenericArgListId), SelfType, /// The `Vec` contains the types of the tuple elements. - Tuple(Vec>), + Tuple(Vec>), /// The first `TypeId` is the element type, the second `Body` is the length. - Array(MaybeInvalid, MaybeInvalid), + Array(Partial, Partial), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct TraitRef { - pub path: MaybeInvalid, + pub path: Partial, pub generic_args: GenericArgListId, } diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index c2790f04f1..42f468c14c 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -1,4 +1,4 @@ -use crate::hir_def::MaybeInvalid; +use crate::hir_def::Partial; use super::IdentId; @@ -8,7 +8,7 @@ pub struct UseTreeId { /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` /// /// NOTE: If the tree root is started with `{}`, then the `path` is `None`. - pub path: Vec>, + pub path: Vec>, /// The subtree of the use tree. /// /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. @@ -16,7 +16,7 @@ pub struct UseTreeId { //// The alias of this use tree. /// `Bar` in `Foo as Bar;` - pub alias: Option>, + pub alias: Option>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 9cc3efaf08..87b0fe8652 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -29,7 +29,7 @@ impl Attr { impl NormalAttr { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::NormalAttr) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let args = ast .args() .map(|args| { @@ -57,8 +57,8 @@ impl DocCommentAttr { impl AttrArg { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::AttrArg) -> Self { - let key = IdentId::maybe_lower_token(ctxt, ast.key()); - let value = IdentId::maybe_lower_token(ctxt, ast.value()); + let key = IdentId::lower_token_partial(ctxt, ast.key()); + let value = IdentId::lower_token_partial(ctxt, ast.value()); Self { key, value } } } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 783c68caf5..71b00ac135 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{ - Body, BodyNodeMap, BodySourceMap, Expr, ExprId, MaybeInvalid, Pat, PatId, Stmt, StmtId, + Body, BodyNodeMap, BodySourceMap, Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TrackedBodyId, TrackedItemId, }, span::{HirOrigin, HirOriginKind}, @@ -45,9 +45,9 @@ pub(super) struct BodyCtxt<'ctxt, 'db> { pub(super) f_ctxt: &'ctxt mut FileLowerCtxt<'db>, pub(super) bid: TrackedBodyId, - pub(super) stmts: BodyNodeMap>, - pub(super) exprs: BodyNodeMap>, - pub(super) pats: BodyNodeMap>, + pub(super) stmts: BodyNodeMap>, + pub(super) exprs: BodyNodeMap>, + pub(super) pats: BodyNodeMap>, stmt_source_map: BodySourceMap, expr_source_map: BodySourceMap, @@ -55,37 +55,37 @@ pub(super) struct BodyCtxt<'ctxt, 'db> { } impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { - let expr_id = self.exprs.push(Some(expr).into()); + let expr_id = self.exprs.push(Partial::Present(expr)); self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); expr_id } pub(super) fn push_invalid_expr(&mut self, origin: HirOriginKind) -> ExprId { - let expr_id = self.exprs.push(None.into()); + let expr_id = self.exprs.push(Partial::Absent); self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); expr_id } pub(super) fn push_missing_expr(&mut self) -> ExprId { - let expr_id = self.exprs.push(None.into()); + let expr_id = self.exprs.push(Partial::Absent); self.expr_source_map[expr_id] = HirOrigin::none(self.f_ctxt.file); expr_id } pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOriginKind) -> StmtId { - let stmt_id = self.stmts.push(Some(stmt).into()); + let stmt_id = self.stmts.push(Partial::Present(stmt)); self.stmt_source_map[stmt_id] = HirOrigin::new(self.f_ctxt.file, origin); stmt_id } pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { - let pat_id = self.pats.push(Some(pat).into()); + let pat_id = self.pats.push(Partial::Present(pat)); self.pat_source_map[pat_id] = HirOrigin::new(self.f_ctxt.file, origin); pat_id } pub(super) fn push_missing_pat(&mut self) -> PatId { - let pat_id = self.pats.push(None.into()); + let pat_id = self.pats.push(Partial::Absent); self.pat_source_map[pat_id] = HirOrigin::none(self.f_ctxt.file); pat_id } diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 20cee729ba..ff2e05f5d6 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -57,7 +57,7 @@ impl Expr { ast::ExprKind::MethodCall(method_call) => { let receiver = Self::push_to_body_opt(ctxt, method_call.receiver()); let method_name = - IdentId::maybe_lower_token(ctxt.f_ctxt, method_call.method_name()); + IdentId::lower_token_partial(ctxt.f_ctxt, method_call.method_name()); let args = method_call .args() .map(|args| { @@ -70,12 +70,12 @@ impl Expr { } ast::ExprKind::Path(path) => { - let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path.path()); + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path.path()); Self::Path(path) } ast::ExprKind::RecordInit(record_init) => { - let path = PathId::maybe_lower_ast(ctxt.f_ctxt, record_init.path()); + let path = PathId::lower_ast_partial(ctxt.f_ctxt, record_init.path()); let fields = record_init .fields() .map(|fields| { diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index eee640b479..725d7eef70 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -33,7 +33,7 @@ impl Mod { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Mod(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); @@ -55,7 +55,7 @@ impl Fn { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Fn(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -101,7 +101,7 @@ impl Struct { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Struct(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -134,7 +134,7 @@ impl Contract { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Contract(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -155,7 +155,7 @@ impl Enum { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Enum(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -188,14 +188,14 @@ impl TypeAlias { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.alias()); + let name = IdentId::lower_token_partial(ctxt, ast.alias()); let id = TrackedItemId::TypeAlias(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let origin = HirOrigin::raw(ctxt.file, &ast); let alias = Self::new( @@ -221,7 +221,7 @@ impl Impl { ) -> Self { ctxt.enter_scope(); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let id = TrackedItemId::Impl(ty).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -256,7 +256,7 @@ impl Trait { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Trait(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -294,8 +294,8 @@ impl ImplTrait { ) -> Self { ctxt.enter_scope(); - let trait_ref = TraitRef::maybe_lower_ast(ctxt, ast.trait_ref()); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let trait_ref = TraitRef::lower_ast_partial(ctxt, ast.trait_ref()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let id = TrackedItemId::ImplTrait(trait_ref, ty).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -331,7 +331,7 @@ impl Const { ) -> Self { ctxt.enter_scope(); - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Const(name).join(parent_id); let body = ast .value() @@ -352,7 +352,7 @@ impl Use { ) -> Self { ctxt.enter_scope(); - let tree = UseTreeId::maybe_lower_ast(ctxt, ast.use_tree()); + let tree = UseTreeId::lower_ast_partial(ctxt, ast.use_tree()); let id = TrackedItemId::Use(tree).join(parent_id); let origin = HirOrigin::raw(ctxt.file, &ast); @@ -366,7 +366,7 @@ impl ExternFn { parent: TrackedItemId, ast: ast::Fn, ) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Extern.join(parent); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); @@ -416,8 +416,8 @@ impl RecordFieldListId { impl RecordField { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDef) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let is_pub = ast.pub_kw().is_some(); Self { name, ty, is_pub } @@ -441,7 +441,7 @@ impl EnumVariantListId { impl EnumVariant { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::EnumVariantDef) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let ty = ast.ty().map(|ty| TypeId::lower_ast(ctxt, ty)); Self { name, ty } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 3755875aed..8022c92b44 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -7,8 +7,8 @@ use parser::{ast, SyntaxToken}; use crate::{ hir_def::{ - IdentId, IntegerId, ItemKind, ItemTreeNode, LitKind, MaybeInvalid, ModuleItemTree, - StringId, TopLevelMod, + IdentId, IntegerId, ItemKind, ItemTreeNode, LitKind, ModuleItemTree, Partial, StringId, + TopLevelMod, }, HirDb, }; @@ -96,10 +96,10 @@ impl IdentId { Self::new(ctxt.db, token.text().to_string()) } - fn maybe_lower_token( + fn lower_token_partial( ctxt: &mut FileLowerCtxt<'_>, token: Option, - ) -> MaybeInvalid { + ) -> Partial { token.map(|token| Self::lower_token(ctxt, token)).into() } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 8fa89f8442..3064f99c0b 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -70,7 +70,7 @@ impl WhereClauseId { impl TypeGenericParam { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeGenericParam) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let bounds = ast .bounds() .map(|bounds| { @@ -87,8 +87,8 @@ impl TypeGenericParam { impl ConstGenericParam { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::ConstGenericParam) -> Self { - let name = IdentId::maybe_lower_token(ctxt, ast.name()); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); Self { name, ty } } } @@ -108,7 +108,7 @@ impl GenericArg { impl TypeGenericArg { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeGenericArg) -> Self { - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); Self { ty } } } @@ -145,7 +145,7 @@ impl FnParam { .name() .map(|ast| FnParamName::lower_ast(ctxt, ast)) .into(); - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); Self { is_mut, @@ -158,7 +158,7 @@ impl FnParam { impl WherePredicate { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::WherePredicate) -> Self { - let ty = TypeId::maybe_lower_ast(ctxt, ast.ty()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let bounds = ast .bounds() .map(|bounds| { diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index fb875f5a71..507ad7e336 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -31,12 +31,12 @@ impl Pat { } ast::PatKind::Path(path) => { - let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path.path()); + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path.path()); Pat::Path(path) } ast::PatKind::PathTuple(path_tup) => { - let path = PathId::maybe_lower_ast(ctxt.f_ctxt, path_tup.path()); + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path_tup.path()); let elems = match path_tup.elems() { Some(elems) => elems.iter().map(|pat| Pat::lower_ast(ctxt, pat)).collect(), None => vec![], @@ -45,7 +45,7 @@ impl Pat { } ast::PatKind::Record(record) => { - let path = PathId::maybe_lower_ast(ctxt.f_ctxt, record.path()); + let path = PathId::lower_ast_partial(ctxt.f_ctxt, record.path()); let fields = match record.fields() { Some(fields) => fields .iter() @@ -77,7 +77,7 @@ impl Pat { impl RecordPatField { fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: &ast::RecordPatField) -> RecordPatField { - let label = IdentId::maybe_lower_token(ctxt.f_ctxt, ast.name()); + let label = IdentId::lower_token_partial(ctxt.f_ctxt, ast.name()); let pat = ast .pat() .map(|pat| Pat::lower_ast(ctxt, pat)) diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 4341a6ce6d..5428ee80a3 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,6 +1,6 @@ use parser::{ast, SyntaxToken}; -use crate::hir_def::{IdentId, MaybeInvalid, PathId, PathSegment}; +use crate::hir_def::{IdentId, Partial, PathId, PathSegment}; use super::FileLowerCtxt; @@ -27,16 +27,16 @@ impl PathId { Self::new(ctxt.db, segments) } - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: Option, - ) -> MaybeInvalid { + ) -> Partial { ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } pub(super) fn from_ident(ctxt: &mut FileLowerCtxt<'_>, ast: SyntaxToken) -> Self { let ident_id = IdentId::new(ctxt.db, ast.text().to_string()); - let seg = vec![MaybeInvalid::Valid(PathSegment::Ident(ident_id))]; + let seg = vec![Partial::Present(PathSegment::Ident(ident_id))]; Self::new(ctxt.db, seg) } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index a4d0bdd304..a85af01dcc 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,6 +1,6 @@ use parser::ast::{self, prelude::*}; -use crate::hir_def::{Body, GenericArgListId, MaybeInvalid, PathId, TraitRef, TypeId, TypeKind}; +use crate::hir_def::{Body, GenericArgListId, Partial, PathId, TraitRef, TypeId, TypeKind}; use super::FileLowerCtxt; @@ -8,12 +8,12 @@ impl TypeId { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Type) -> Self { let kind = match ast.kind() { ast::TypeKind::Ptr(ty) => { - let inner = Self::maybe_lower_ast(ctxt, ty.inner()); + let inner = Self::lower_ast_partial(ctxt, ty.inner()); TypeKind::Ptr(inner) } ast::TypeKind::Path(ty) => { - let path = PathId::maybe_lower_ast(ctxt, ty.path()); + let path = PathId::lower_ast_partial(ctxt, ty.path()); let generic_args = GenericArgListId::lower_ast_opt(ctxt, ty.generic_args()); TypeKind::Path(path, generic_args) } @@ -29,7 +29,7 @@ impl TypeId { } ast::TypeKind::Array(ty) => { - let elem_ty = Self::maybe_lower_ast(ctxt, ty.elem_ty()); + let elem_ty = Self::lower_ast_partial(ctxt, ty.elem_ty()); let body = ty .len() .map(|ast| Body::lower_ast_nameless(ctxt, ast)) @@ -41,25 +41,25 @@ impl TypeId { TypeId::new(ctxt.db, kind) } - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: Option, - ) -> MaybeInvalid { + ) -> Partial { ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } impl TraitRef { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::PathType) -> Self { - let path = PathId::maybe_lower_ast(ctxt, ast.path()); + let path = PathId::lower_ast_partial(ctxt, ast.path()); let generic_args = GenericArgListId::lower_ast_opt(ctxt, ast.generic_args()); Self { path, generic_args } } - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: Option, - ) -> MaybeInvalid { + ) -> Partial { ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 9de55ab288..fbba3b7cc4 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -1,6 +1,6 @@ use parser::ast; -use crate::hir_def::{use_tree::*, IdentId, MaybeInvalid}; +use crate::hir_def::{use_tree::*, IdentId, Partial}; use super::FileLowerCtxt; @@ -8,7 +8,7 @@ impl UseTreeId { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::UseTree) -> Self { let path = if let Some(path) = ast.path() { path.into_iter() - .map(|ast| UsePathSegment::maybe_lower_ast(ctxt, ast)) + .map(|ast| UsePathSegment::lower_ast_partial(ctxt, ast)) .collect() } else { vec![] @@ -23,24 +23,24 @@ impl UseTreeId { }; let alias = ast .alias() - .map(|ast| UseTreeAlias::maybe_lower_ast(ctxt, ast)); + .map(|ast| UseTreeAlias::lower_ast_partial(ctxt, ast)); Self::new(ctxt.db, path, subtree, alias) } - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: Option, - ) -> MaybeInvalid { + ) -> Partial { ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } } impl UsePathSegment { - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: ast::UsePathSegment, - ) -> MaybeInvalid { + ) -> Partial { ast.kind() .map(|kind| match kind { ast::UsePathSegmentKind::Ident(ident) => { @@ -54,10 +54,10 @@ impl UsePathSegment { } impl UseTreeAlias { - pub(super) fn maybe_lower_ast( + pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, ast: ast::UseTreeAlias, - ) -> MaybeInvalid { + ) -> Partial { if let Some(ident) = ast.ident() { Some(Self::Ident(IdentId::lower_token(ctxt, ident))) } else if ast.underscore().is_some() { From ba4c3634fe5f8aade6f796dc972e41cf9a9602dc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 27 Mar 2023 18:15:26 +0200 Subject: [PATCH 113/678] Add `BodySourceMap` --- Cargo.lock | 1 + crates/hir/Cargo.toml | 1 + crates/hir/src/hir_def/body.rs | 85 ++++++++++++++++++++++++++++------ crates/hir/src/lower/body.rs | 53 ++++++++++----------- crates/hir/src/lower/expr.rs | 6 +-- crates/hir/src/lower/pat.rs | 4 +- crates/hir/src/lower/stmt.rs | 28 +++++------ crates/hir/src/span/mod.rs | 44 ++++++------------ 8 files changed, 133 insertions(+), 89 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ad279194b1..8e1791287a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -812,6 +812,7 @@ dependencies = [ "fe-parser2", "num-bigint", "num-traits", + "rustc-hash", "salsa-2022", "tracing", ] diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 968216b9ec..427c9cef0f 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -17,6 +17,7 @@ cranelift-entity = "0.91" num-bigint = "0.4.3" num-traits = "0.2.15" camino = "1.1.4" +rustc-hash = "1.1.0" parser = { path = "../parser2", package = "fe-parser2" } common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index a6823f76d1..9c830c0644 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -3,10 +3,13 @@ // that may take many arguments depending on the number of fields in the struct. #![allow(clippy::too_many_arguments)] -use cranelift_entity::{PrimaryMap, SecondaryMap}; -use parser::ast; +use std::hash::Hash; -use crate::span::HirOrigin; +use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap}; +use parser::ast::{self, prelude::*}; +use rustc_hash::FxHashMap; + +use crate::span::{HirOrigin, LocalOrigin}; use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TrackedItemId}; @@ -16,18 +19,14 @@ pub struct Body { id: TrackedBodyId, #[return_ref] - pub stmts: BodyNodeMap>, + pub stmts: NodeStore>, #[return_ref] - pub exprs: BodyNodeMap>, + pub exprs: NodeStore>, #[return_ref] - pub pats: BodyNodeMap>, + pub pats: NodeStore>, #[return_ref] - pub(crate) stmt_source_map: BodySourceMap, - #[return_ref] - pub(crate) expr_source_map: BodySourceMap, - #[return_ref] - pub(crate) pat_source_map: BodySourceMap, + pub(crate) source_map: BodySourceMap, #[return_fer] pub(crate) ast: HirOrigin, @@ -40,5 +39,65 @@ pub enum TrackedBodyId { NamelessBody, } -pub type BodyNodeMap = PrimaryMap; -pub type BodySourceMap = SecondaryMap>; +pub type NodeStore = PrimaryMap; + +pub trait SourceAst: AstNode + Clone + Hash + PartialEq + Eq {} +impl SourceAst for T where T: AstNode + Clone + Hash + PartialEq + Eq {} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct BodySourceMap { + pub stmt_map: SourceNodeMap, + pub expr_map: SourceNodeMap, + pub pat_map: SourceNodeMap, +} + +#[derive(Clone, Debug)] +pub struct SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + pub node_to_source: SecondaryMap>, + pub source_to_node: FxHashMap, Node>, +} + +impl SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + pub(crate) fn insert(&mut self, node: Node, ast: LocalOrigin) { + self.node_to_source[node] = ast.clone(); + self.source_to_node.insert(ast, node); + } +} + +impl PartialEq for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + fn eq(&self, other: &Self) -> bool { + self.node_to_source == other.node_to_source + } +} + +impl Eq for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ +} + +impl Default for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + fn default() -> Self { + Self { + source_to_node: FxHashMap::default(), + node_to_source: SecondaryMap::new(), + } + } +} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 71b00ac135..e9bd26e5f0 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -2,10 +2,10 @@ use parser::ast; use crate::{ hir_def::{ - Body, BodyNodeMap, BodySourceMap, Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, + Body, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, TrackedBodyId, TrackedItemId, }, - span::{HirOrigin, HirOriginKind}, + span::{HirOrigin, LocalOrigin}, }; use super::FileLowerCtxt; @@ -45,48 +45,49 @@ pub(super) struct BodyCtxt<'ctxt, 'db> { pub(super) f_ctxt: &'ctxt mut FileLowerCtxt<'db>, pub(super) bid: TrackedBodyId, - pub(super) stmts: BodyNodeMap>, - pub(super) exprs: BodyNodeMap>, - pub(super) pats: BodyNodeMap>, - - stmt_source_map: BodySourceMap, - expr_source_map: BodySourceMap, - pat_source_map: BodySourceMap, + pub(super) stmts: NodeStore>, + pub(super) exprs: NodeStore>, + pub(super) pats: NodeStore>, + pub(super) source_map: BodySourceMap, } + impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { - pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOriginKind) -> ExprId { + pub(super) fn push_expr(&mut self, expr: Expr, origin: LocalOrigin) -> ExprId { let expr_id = self.exprs.push(Partial::Present(expr)); - self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); + self.source_map.expr_map.insert(expr_id, origin); + expr_id } - pub(super) fn push_invalid_expr(&mut self, origin: HirOriginKind) -> ExprId { + pub(super) fn push_invalid_expr(&mut self, origin: LocalOrigin) -> ExprId { let expr_id = self.exprs.push(Partial::Absent); - self.expr_source_map[expr_id] = HirOrigin::new(self.f_ctxt.file, origin); + self.source_map.expr_map.insert(expr_id, origin); + expr_id } pub(super) fn push_missing_expr(&mut self) -> ExprId { let expr_id = self.exprs.push(Partial::Absent); - self.expr_source_map[expr_id] = HirOrigin::none(self.f_ctxt.file); + self.source_map.expr_map.insert(expr_id, LocalOrigin::None); expr_id } - pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOriginKind) -> StmtId { + pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: LocalOrigin) -> StmtId { let stmt_id = self.stmts.push(Partial::Present(stmt)); - self.stmt_source_map[stmt_id] = HirOrigin::new(self.f_ctxt.file, origin); + self.source_map.stmt_map.insert(stmt_id, origin); + stmt_id } - pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOriginKind) -> PatId { + pub(super) fn push_pat(&mut self, pat: Pat, origin: LocalOrigin) -> PatId { let pat_id = self.pats.push(Partial::Present(pat)); - self.pat_source_map[pat_id] = HirOrigin::new(self.f_ctxt.file, origin); + self.source_map.pat_map.insert(pat_id, origin); pat_id } pub(super) fn push_missing_pat(&mut self) -> PatId { let pat_id = self.pats.push(Partial::Absent); - self.pat_source_map[pat_id] = HirOrigin::none(self.f_ctxt.file); + self.source_map.pat_map.insert(pat_id, LocalOrigin::None); pat_id } @@ -95,12 +96,10 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { Self { f_ctxt, bid, - stmts: BodyNodeMap::new(), - exprs: BodyNodeMap::new(), - pats: BodyNodeMap::new(), - stmt_source_map: BodySourceMap::new(), - expr_source_map: BodySourceMap::new(), - pat_source_map: BodySourceMap::new(), + stmts: NodeStore::new(), + exprs: NodeStore::new(), + pats: NodeStore::new(), + source_map: BodySourceMap::default(), } } @@ -112,9 +111,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { self.stmts, self.exprs, self.pats, - self.stmt_source_map, - self.expr_source_map, - self.pat_source_map, + self.source_map, origin, ); diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index ff2e05f5d6..9bf039e3c2 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -2,7 +2,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, - span::HirOriginKind, + span::LocalOrigin, }; use super::body::BodyCtxt; @@ -15,7 +15,7 @@ impl Expr { let lit = LitKind::lower_ast(ctxt.f_ctxt, lit); Self::Lit(lit) } else { - return ctxt.push_invalid_expr(HirOriginKind::raw(&ast)); + return ctxt.push_invalid_expr(LocalOrigin::raw(&ast)); } } @@ -162,7 +162,7 @@ impl Expr { } }; - ctxt.push_expr(expr, HirOriginKind::raw(&ast)) + ctxt.push_expr(expr, LocalOrigin::raw(&ast)) } pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> ExprId { diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 507ad7e336..5ee28a6b33 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{pat::*, IdentId, LitKind, PathId}, - span::HirOriginKind, + span::LocalOrigin, }; use super::body::BodyCtxt; @@ -63,7 +63,7 @@ impl Pat { } }; - ctxt.push_pat(pat, HirOriginKind::raw(&ast)) + ctxt.push_pat(pat, LocalOrigin::raw(&ast)) } pub(super) fn lower_ast_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> PatId { diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index e12a99ee45..26321f4e86 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -2,7 +2,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{stmt::*, ArithBinOp, Expr, Pat, PathId, TypeId}, - span::{AugAssignDesugared, HirOriginKind}, + span::{AugAssignDesugared, LocalOrigin}, }; use super::body::BodyCtxt; @@ -16,7 +16,7 @@ impl Stmt { .type_annotation() .map(|ty| TypeId::lower_ast(ctxt.f_ctxt, ty)); let init = let_.initializer().map(|init| Expr::lower_ast(ctxt, init)); - (Stmt::Let(pat, ty, init), HirOriginKind::raw(&ast)) + (Stmt::Let(pat, ty, init), LocalOrigin::raw(&ast)) } ast::StmtKind::Assign(assign) => { let lhs = assign @@ -28,7 +28,7 @@ impl Stmt { .expr() .map(|expr| Expr::lower_ast(ctxt, expr)) .unwrap_or_else(|| ctxt.push_missing_expr()); - (Stmt::Assign(lhs, rhs), HirOriginKind::raw(&ast)) + (Stmt::Assign(lhs, rhs), LocalOrigin::raw(&ast)) } ast::StmtKind::AugAssign(aug_assign) => desugar_aug_assign(ctxt, &aug_assign), @@ -42,7 +42,7 @@ impl Stmt { .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); - (Stmt::For(bind, iter, body), HirOriginKind::raw(&ast)) + (Stmt::For(bind, iter, body), LocalOrigin::raw(&ast)) } ast::StmtKind::While(while_) => { @@ -54,23 +54,23 @@ impl Stmt { .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); - (Stmt::While(cond, body), HirOriginKind::raw(&ast)) + (Stmt::While(cond, body), LocalOrigin::raw(&ast)) } - ast::StmtKind::Continue(_) => (Stmt::Continue, HirOriginKind::raw(&ast)), + ast::StmtKind::Continue(_) => (Stmt::Continue, LocalOrigin::raw(&ast)), - ast::StmtKind::Break(_) => (Stmt::Break, HirOriginKind::raw(&ast)), + ast::StmtKind::Break(_) => (Stmt::Break, LocalOrigin::raw(&ast)), ast::StmtKind::Return(ret) => { let expr = ret .has_value() .then(|| Expr::push_to_body_opt(ctxt, ret.expr())); - (Stmt::Return(expr), HirOriginKind::raw(&ast)) + (Stmt::Return(expr), LocalOrigin::raw(&ast)) } ast::StmtKind::Expr(expr) => { let expr = Expr::push_to_body_opt(ctxt, expr.expr()); - (Stmt::Expr(expr), HirOriginKind::raw(&ast)) + (Stmt::Expr(expr), LocalOrigin::raw(&ast)) } }; @@ -81,7 +81,7 @@ impl Stmt { fn desugar_aug_assign( ctxt: &mut BodyCtxt<'_, '_>, ast: &ast::AugAssignStmt, -) -> (Stmt, HirOriginKind) { +) -> (Stmt, LocalOrigin) { let lhs_ident = ast.ident(); let path = lhs_ident .clone() @@ -91,7 +91,7 @@ fn desugar_aug_assign( let lhs_pat = if let Some(path) = path { ctxt.push_pat( Pat::Path(Some(path).into()), - HirOriginKind::desugared(lhs_origin.clone()), + LocalOrigin::desugared(lhs_origin.clone()), ) } else { ctxt.push_missing_pat() @@ -100,7 +100,7 @@ fn desugar_aug_assign( let binop_lhs = if let Some(path) = path { ctxt.push_expr( Expr::Path(Some(path).into()), - HirOriginKind::desugared(lhs_origin), + LocalOrigin::desugared(lhs_origin), ) } else { ctxt.push_missing_expr() @@ -114,11 +114,11 @@ fn desugar_aug_assign( let binop = ast.op().map(|op| ArithBinOp::lower_ast(op).into()).into(); let expr = ctxt.push_expr( Expr::Bin(binop_lhs, binop_rhs, binop), - HirOriginKind::desugared(AugAssignDesugared::stmt(ast)), + LocalOrigin::desugared(AugAssignDesugared::stmt(ast)), ); ( Stmt::Assign(lhs_pat, expr), - HirOriginKind::desugared(AugAssignDesugared::stmt(ast)), + LocalOrigin::desugared(AugAssignDesugared::stmt(ast)), ) } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index faba94b2ee..558a1cee45 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -11,14 +11,14 @@ where T: AstNode, { pub file: Option, - pub kind: HirOriginKind, + pub kind: LocalOrigin, } impl HirOrigin where T: AstNode, { - pub(crate) fn new(file: InputFile, origin: HirOriginKind) -> Self { + pub(crate) fn new(file: InputFile, origin: LocalOrigin) -> Self { HirOrigin { file: Some(file), kind: origin, @@ -26,41 +26,18 @@ where } pub(crate) fn raw(file: InputFile, ast: &T) -> Self { - HirOrigin { - file: Some(file), - kind: HirOriginKind::raw(ast), - } - } - - pub(crate) fn none(file: InputFile) -> Self { - HirOrigin { - file: Some(file), - kind: HirOriginKind::None, - } + Self::new(file, LocalOrigin::raw(ast)) } } -impl Default for HirOrigin -where - T: AstNode, -{ - /// The `Default` implemntation is necessary for - fn default() -> Self { - Self { - file: None, - kind: HirOriginKind::None, - } - } -} - -/// This enum represents the origin of the HIR node. +/// This enum represents the origin of the HIR node is a file. /// The origin has three possible kinds. /// 1. `Raw` is used for nodes that are created by the parser and not /// 2. `Expanded` is used for nodes that are created by the compiler and not /// 3. `Desugared` is used for nodes that are created by the compiler and not // TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum HirOriginKind +pub enum LocalOrigin where T: AstNode, { @@ -81,7 +58,7 @@ where None, } -impl HirOriginKind +impl LocalOrigin where T: AstNode, { @@ -94,6 +71,15 @@ where } } +impl Default for LocalOrigin +where + T: AstNode, +{ + fn default() -> Self { + Self::None + } +} + /// This enum represents the origin of the HIR node which is desugared into /// other HIR node kinds. // TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. From 2e0482e6bcb71b3e9aa473e0cc0723615fc64bdc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 27 Mar 2023 23:00:22 +0200 Subject: [PATCH 114/678] Add `SpannedHirDb` --- crates/hir/src/hir_def/item.rs | 19 +++++++++ crates/hir/src/span/jar.rs | 77 ++++++++++++++++++++++++++++++++++ crates/hir/src/span/mod.rs | 2 + 3 files changed, 98 insertions(+) create mode 100644 crates/hir/src/span/jar.rs diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index ea3fef2d53..a52464a384 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -35,6 +35,8 @@ pub struct TopLevelMod { // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument // of `module_item_tree`. pub name: IdentId, + + #[return_ref] pub(crate) origin: HirOrigin, } @@ -47,6 +49,7 @@ pub struct Mod { pub attributes: AttrListId, pub is_pub: bool, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -64,6 +67,7 @@ pub struct Fn { pub modifier: ItemModifier, pub body: Option, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -78,6 +82,7 @@ pub struct ExternFn { pub ret_ty: Option, pub modifier: ItemModifier, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -93,6 +98,7 @@ pub struct Struct { pub where_clause: WhereClauseId, pub fields: RecordFieldListId, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -106,6 +112,7 @@ pub struct Contract { pub is_pub: bool, pub fields: RecordFieldListId, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -121,6 +128,7 @@ pub struct Enum { pub where_clause: WhereClauseId, pub variants: EnumVariantListId, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -136,6 +144,7 @@ pub struct TypeAlias { pub where_clause: WhereClauseId, pub ty: Partial, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -148,6 +157,8 @@ pub struct Impl { pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + + #[return_ref] pub(crate) origin: HirOrigin, } @@ -162,6 +173,8 @@ pub struct Trait { pub is_pub: bool, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + + #[return_ref] pub(crate) origin: HirOrigin, } @@ -175,6 +188,8 @@ pub struct ImplTrait { pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + + #[return_ref] pub(crate) origin: HirOrigin, } @@ -185,6 +200,8 @@ pub struct Const { pub name: Partial, pub body: Partial, + + #[return_ref] pub(crate) origin: HirOrigin, } @@ -194,6 +211,8 @@ pub struct Use { id: TrackedItemId, pub tree: Partial, + + #[return_ref] pub(crate) origin: HirOrigin, } diff --git a/crates/hir/src/span/jar.rs b/crates/hir/src/span/jar.rs new file mode 100644 index 0000000000..1f73edd451 --- /dev/null +++ b/crates/hir/src/span/jar.rs @@ -0,0 +1,77 @@ +use common::Upcast; +use parser::ast; + +use crate::{ + hir_def::{ + Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, + }, + HirDb, +}; + +use super::HirOrigin; + +#[salsa::jar(db = SpannedHirDb)] +pub struct SpanJar(); + +/// `SpannedHirDb` is a feature gate for extracting span-dependent information +/// from HIR Items. All code that requires [`SpannedHirDb`] is considered to +/// invalidate the cache in salsa when a revision is updated. +/// Therefore, implementations relying on `SpannedHirDb` are prohibited in all +/// Analysis phases. +/// +/// SpanDb is mainly used to inject information about [`SyntaxNode`] to generate +/// [`FullDiagnostic`] from [`DiagnosticVoucher`]. +pub trait SpannedHirDb: HirDb + salsa::DbWithJar + Upcast { + fn toplevel_ast(&self, item: TopLevelMod) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn mod_ast(&self, item: Mod) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn fn_ast(&self, item: Fn) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn extern_fn_ast(&self, item: ExternFn) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn struct_ast(&self, item: Struct) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn contract_ast(&self, item: Contract) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn enum_ast(&self, item: Enum) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn type_alias_ast(&self, item: TypeAlias) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn impl_ast(&self, item: Impl) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn trait_ast(&self, item: Trait) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn impl_trait_ast(&self, item: ImplTrait) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn const_ast(&self, item: Const) -> &HirOrigin { + item.origin(self.upcast()) + } + + fn use_ast(&self, item: Use) -> &HirOrigin { + item.origin(self.upcast()) + } +} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 558a1cee45..7bf4617ba8 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -5,6 +5,8 @@ use parser::{ use common::InputFile; +pub mod jar; + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin where From 8a0087924e588ad3ba9f21e091d3588a6970b1cf Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 27 Mar 2023 23:22:11 +0200 Subject: [PATCH 115/678] Define `CompleteDiagnostic` --- Cargo.lock | 1 + crates/common2/Cargo.toml | 1 + crates/common2/src/diagnostics.rs | 58 +++++++++++++++++++++++++++ crates/common2/src/lib.rs | 1 + crates/hir/Cargo.toml | 2 +- crates/hir/src/hir_def/mod.rs | 2 +- crates/hir/src/hir_def/module_tree.rs | 2 +- crates/hir/src/span/jar.rs | 6 ++- crates/hir/src/span/mod.rs | 6 +-- 9 files changed, 70 insertions(+), 9 deletions(-) create mode 100644 crates/common2/src/diagnostics.rs diff --git a/Cargo.lock b/Cargo.lock index 8e1791287a..5ff0704c0b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -731,6 +731,7 @@ name = "fe-common2" version = "0.20.0-alpha" dependencies = [ "camino", + "fe-parser2", "salsa-2022", "semver 1.0.17", "smol_str", diff --git a/crates/common2/Cargo.toml b/crates/common2/Cargo.toml index 7b9bbb6e17..03b0f333db 100644 --- a/crates/common2/Cargo.toml +++ b/crates/common2/Cargo.toml @@ -14,3 +14,4 @@ semver = "1.0.17" camino = "1.1.4" smol_str = "0.1.24" salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +parser = { path = "../parser2", package = "fe-parser2" } diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs new file mode 100644 index 0000000000..cb3e1d4c4c --- /dev/null +++ b/crates/common2/src/diagnostics.rs @@ -0,0 +1,58 @@ +use parser::ast::SyntaxNodePtr; + +use crate::InputFile; + +pub struct CompleteDiagnostic { + pub severity: Severity, + pub message: String, + pub span: Span, + pub sub_diagnostics: Vec, + pub error_code: GlobalErrorCode, +} + +pub struct GlobalErrorCode { + pub pass: AnalysisPass, + pub local_code: u16, +} + +pub struct SubDiagnostic { + pub severity: Severity, + pub message: String, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Span { + pub file: InputFile, + pub node: SyntaxNodePtr, + pub origin: SpanOrigin, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub enum Severity { + Error, + Warning, + Note, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SpanOrigin { + Raw, + Expanded, + Desugared, +} + +#[repr(u16)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum AnalysisPass { + Parse = 1, + NameResolution, + TyCheck, + + ExternalAnalysis(ExternalAnalysisKey) = u16::MAX, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternalAnalysisKey { + name: String, +} diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs index f1c692a43a..f0e991e7e5 100644 --- a/crates/common2/src/lib.rs +++ b/crates/common2/src/lib.rs @@ -1,3 +1,4 @@ +pub mod diagnostics; pub mod input; pub use input::{InputFile, InputIngot}; diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 427c9cef0f..9eba42dc32 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -5,7 +5,7 @@ authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" repository = "https://github.com/ethereum/fe" -description = "Provides HIR definition and lowering for Fe lang." +description = "Provides HIR definition and lowering for Fe lang" [dependencies] tracing = "0.1" diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index b69f5ae8c5..adf13d249f 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -59,7 +59,7 @@ pub enum LitKind { Bool(bool), } -/// `Partial is a type that explicitly indicates the possibility that an HIR +/// `Partial` is a type that explicitly indicates the possibility that an HIR /// node cannot be generated due to syntax errors in the source file. /// /// If a node is `Partial::Absent`, it means that the corresponding AST either diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 63e6d0f84e..364fca0dfa 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -10,7 +10,7 @@ use super::IdentId; /// This tree represents the structure of an ingot. /// Internal modules are not included in this tree, instead, they are included -/// in [`crate::item_tree::ModuleItemTree`]. +/// in [ModuleItemTree](crate::hir_def::item_tree::ModuleItemTree). /// /// This is used in later name resolution phase. /// The tree is file contents agnostic, i.e., **only** depends on project diff --git a/crates/hir/src/span/jar.rs b/crates/hir/src/span/jar.rs index 1f73edd451..8681eb2512 100644 --- a/crates/hir/src/span/jar.rs +++ b/crates/hir/src/span/jar.rs @@ -20,8 +20,10 @@ pub struct SpanJar(); /// Therefore, implementations relying on `SpannedHirDb` are prohibited in all /// Analysis phases. /// -/// SpanDb is mainly used to inject information about [`SyntaxNode`] to generate -/// [`FullDiagnostic`] from [`DiagnosticVoucher`]. +/// SpanDb is mainly used to inject information about +/// [HirOrigin] to generate +/// [FullDiagnostic](crate::diagnostics::FullDiagnostic) from +/// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). pub trait SpannedHirDb: HirDb + salsa::DbWithJar + Upcast { fn toplevel_ast(&self, item: TopLevelMod) -> &HirOrigin { item.origin(self.upcast()) diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 7bf4617ba8..a28c68de44 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -12,7 +12,7 @@ pub struct HirOrigin where T: AstNode, { - pub file: Option, + pub file: InputFile, pub kind: LocalOrigin, } @@ -22,7 +22,7 @@ where { pub(crate) fn new(file: InputFile, origin: LocalOrigin) -> Self { HirOrigin { - file: Some(file), + file: file, kind: origin, } } @@ -37,7 +37,6 @@ where /// 1. `Raw` is used for nodes that are created by the parser and not /// 2. `Expanded` is used for nodes that are created by the compiler and not /// 3. `Desugared` is used for nodes that are created by the compiler and not -// TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum LocalOrigin where @@ -84,7 +83,6 @@ where /// This enum represents the origin of the HIR node which is desugared into /// other HIR node kinds. -// TODO: Change the visibility to `pub(crate)` when https://github.com/salsa-rs/salsa/issues/437 is resolved. #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum DesugaredOrigin { /// The HIR node is the result of desugaring an augmented assignment From 83c8306ed2d2cf575ec6e72d4a0728f6ea20c32f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 29 Mar 2023 18:33:37 +0200 Subject: [PATCH 116/678] Define `DiagnosticVoucher` --- crates/common2/src/diagnostics.rs | 9 ++++++--- crates/hir/src/diagnostics.rs | 33 +++++++++++++++++++++++++++++++ crates/hir/src/lib.rs | 1 + crates/hir/src/span/mod.rs | 5 +---- 4 files changed, 41 insertions(+), 7 deletions(-) create mode 100644 crates/hir/src/diagnostics.rs diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index cb3e1d4c4c..15505e6ba4 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -2,6 +2,7 @@ use parser::ast::SyntaxNodePtr; use crate::InputFile; +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CompleteDiagnostic { pub severity: Severity, pub message: String, @@ -10,32 +11,34 @@ pub struct CompleteDiagnostic { pub error_code: GlobalErrorCode, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GlobalErrorCode { pub pass: AnalysisPass, pub local_code: u16, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct SubDiagnostic { pub severity: Severity, pub message: String, pub span: Span, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Span { pub file: InputFile, pub node: SyntaxNodePtr, pub origin: SpanOrigin, } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Severity { Error, Warning, Note, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum SpanOrigin { Raw, Expanded, diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs new file mode 100644 index 0000000000..f7d562900d --- /dev/null +++ b/crates/hir/src/diagnostics.rs @@ -0,0 +1,33 @@ +//! This module defines the diagnostics that can be accumulated inside salsa-db +//! with span-agnostic forms. All diagnostics accumulated in salsa-db should +//! implement [`DiagnosticVoucher`] which defines the conversion into +//! [`CompleteDiagnostics`]. + +use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; + +use crate::span::jar::SpannedHirDb; + +/// All diagnostics accumulated in salsa-db should implement +/// [`DiagnosticVoucher`] which defines the conversion. +/// +/// All types that implements `DiagnosticVoucher` must NOT have a span +/// information which invalidates cache in salsa-db. Instead of it, the all +/// information is given by [`SpannedHirDB`] to allow evaluating span lazily. +/// +/// The utility structs for conversion from HIR-spanless types to nodes are +/// defined in [`crate::span`] module. +pub trait DiagnosticVoucher { + fn pass(&self) -> GlobalErrorCode; + /// Consumes voucher and makes a [`CompleteDiagnostic`]. + fn consume(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; +} + +impl DiagnosticVoucher for CompleteDiagnostic { + fn pass(&self) -> GlobalErrorCode { + self.error_code.clone() + } + + fn consume(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + self + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index fb76458923..bfa6ff8889 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,6 +1,7 @@ use common::{InputDb, InputFile, Upcast}; use parser::GreenNode; +pub mod diagnostics; pub mod hir_def; pub mod lower; pub mod span; diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index a28c68de44..1cb1a71c33 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -21,10 +21,7 @@ where T: AstNode, { pub(crate) fn new(file: InputFile, origin: LocalOrigin) -> Self { - HirOrigin { - file: file, - kind: origin, - } + HirOrigin { file, kind: origin } } pub(crate) fn raw(file: InputFile, ast: &T) -> Self { From ad610827839f75c54c99ff1ac7e23fc0b16c47b0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 30 Mar 2023 17:44:19 +0200 Subject: [PATCH 117/678] Add `ItemSpanState` --- crates/common2/src/diagnostics.rs | 18 ++-- crates/hir/src/diagnostics.rs | 2 +- crates/hir/src/hir_def/body.rs | 4 +- crates/hir/src/span/{jar.rs => db.rs} | 15 ++-- crates/hir/src/span/item.rs | 1 + crates/hir/src/span/mod.rs | 125 +++++++++++++++++++++++++- 6 files changed, 142 insertions(+), 23 deletions(-) rename crates/hir/src/span/{jar.rs => db.rs} (85%) create mode 100644 crates/hir/src/span/item.rs diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 15505e6ba4..025df904c1 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -1,4 +1,4 @@ -use parser::ast::SyntaxNodePtr; +use parser::SyntaxNode; use crate::InputFile; @@ -27,8 +27,13 @@ pub struct SubDiagnostic { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Span { pub file: InputFile, - pub node: SyntaxNodePtr, - pub origin: SpanOrigin, + pub node: SyntaxNode, +} + +impl Span { + pub fn new(file: InputFile, node: SyntaxNode) -> Self { + Self { file, node } + } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -38,13 +43,6 @@ pub enum Severity { Note, } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum SpanOrigin { - Raw, - Expanded, - Desugared, -} - #[repr(u16)] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AnalysisPass { diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index f7d562900d..eb95e10fd6 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -5,7 +5,7 @@ use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; -use crate::span::jar::SpannedHirDb; +use crate::span::db::SpannedHirDb; /// All diagnostics accumulated in salsa-db should implement /// [`DiagnosticVoucher`] which defines the conversion. diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 9c830c0644..ef0e633fb9 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -28,8 +28,8 @@ pub struct Body { #[return_ref] pub(crate) source_map: BodySourceMap, - #[return_fer] - pub(crate) ast: HirOrigin, + #[return_ref] + pub(crate) origin: HirOrigin, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/span/jar.rs b/crates/hir/src/span/db.rs similarity index 85% rename from crates/hir/src/span/jar.rs rename to crates/hir/src/span/db.rs index 8681eb2512..fd1f1bc8d0 100644 --- a/crates/hir/src/span/jar.rs +++ b/crates/hir/src/span/db.rs @@ -3,17 +3,14 @@ use parser::ast; use crate::{ hir_def::{ - Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, - TypeAlias, Use, + Body, Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, }, HirDb, }; use super::HirOrigin; -#[salsa::jar(db = SpannedHirDb)] -pub struct SpanJar(); - /// `SpannedHirDb` is a feature gate for extracting span-dependent information /// from HIR Items. All code that requires [`SpannedHirDb`] is considered to /// invalidate the cache in salsa when a revision is updated. @@ -22,9 +19,9 @@ pub struct SpanJar(); /// /// SpanDb is mainly used to inject information about /// [HirOrigin] to generate -/// [FullDiagnostic](crate::diagnostics::FullDiagnostic) from +/// [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from /// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). -pub trait SpannedHirDb: HirDb + salsa::DbWithJar + Upcast { +pub trait SpannedHirDb: HirDb + Upcast { fn toplevel_ast(&self, item: TopLevelMod) -> &HirOrigin { item.origin(self.upcast()) } @@ -76,4 +73,8 @@ pub trait SpannedHirDb: HirDb + salsa::DbWithJar + Upcast { fn use_ast(&self, item: Use) -> &HirOrigin { item.origin(self.upcast()) } + + fn body_ast(&self, item: Body) -> &HirOrigin { + item.origin(self.upcast()) + } } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/crates/hir/src/span/item.rs @@ -0,0 +1 @@ + diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 1cb1a71c33..86b0b56f60 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,11 +1,16 @@ use parser::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, - TextRange, + SyntaxNode, TextRange, }; -use common::InputFile; +use common::{diagnostics::Span, InputFile}; -pub mod jar; +use crate::{hir_def::ItemKind, parse_file}; + +use self::db::SpannedHirDb; + +pub mod db; +pub mod item; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin @@ -16,6 +21,19 @@ where pub kind: LocalOrigin, } +impl HirOrigin +where + T: AstNode, +{ + fn syntax_ptr(&self) -> Option { + match &self.kind { + LocalOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), + LocalOrigin::Expanded(ptr) => Some(ptr.clone()), + _ => None, + } + } +} + impl HirOrigin where T: AstNode, @@ -103,3 +121,104 @@ impl AugAssignDesugared { Self::Stmt(AstPtr::new(ast)) } } + +/// The trait provides a way to extract [`Span`] from types which don't have a +/// span information directly. +pub trait SpanSeed { + fn span(self, db: &dyn SpannedHirDb) -> Span; +} + +struct ItemSpanState { + root: ItemKind, + transition: Vec>, +} + +type TransitionFn = dyn FnOnce(&SyntaxNode, &dyn SpannedHirDb) -> Option + 'static; + +impl SpanSeed for ItemSpanState { + fn span(self, db: &dyn SpannedHirDb) -> Span { + let (file, ptr) = match self.root { + ItemKind::TopMod(top_level_mod) => { + let ast = db.toplevel_ast(top_level_mod); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Mod(mod_) => { + let ast = db.mod_ast(mod_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Fn(fn_) => { + let ast = db.fn_ast(fn_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::ExternFn(extern_fn) => { + let ast = db.extern_fn_ast(extern_fn); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Struct(struct_) => { + let ast = db.struct_ast(struct_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Contract(contract) => { + let ast = db.contract_ast(contract); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Enum(enum_) => { + let ast = db.enum_ast(enum_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::TypeAlias(alias) => { + let ast = db.type_alias_ast(alias); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Impl(impl_) => { + let ast = db.impl_ast(impl_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Trait(trait_) => { + let ast = db.trait_ast(trait_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::ImplTrait(impl_trait) => { + let ast = db.impl_trait_ast(impl_trait); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Const(const_) => { + let ast = db.const_ast(const_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Use(use_) => { + let ast = db.use_ast(use_); + (ast.file, ast.syntax_ptr().unwrap()) + } + + ItemKind::Body(body) => { + let ast = db.body_ast(body); + (ast.file, ast.syntax_ptr().unwrap()) + } + }; + + let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let mut node = ptr.to_node(&root_node); + + for transition in self.transition { + node = match transition(&node, db) { + Some(next) => next, + None => break, + }; + } + + Span::new(file, node) + } +} From 094616b722429f13893432a64dc76fc99c6f11a0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 2 Apr 2023 23:17:38 +0200 Subject: [PATCH 118/678] Define `SpanSeed` and `SpanTransitionChain` --- Cargo.lock | 1 + crates/common2/src/diagnostics.rs | 8 ++--- crates/hir/Cargo.toml | 1 + crates/hir/src/span/mod.rs | 53 +++++++++++++++++++++++++------ crates/hir/src/span/params.rs | 1 + crates/parser2/src/lib.rs | 4 +-- crates/parser2/src/syntax_node.rs | 2 ++ 7 files changed, 54 insertions(+), 16 deletions(-) create mode 100644 crates/hir/src/span/params.rs diff --git a/Cargo.lock b/Cargo.lock index 5ff0704c0b..d1c48c4775 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -815,6 +815,7 @@ dependencies = [ "num-traits", "rustc-hash", "salsa-2022", + "smallvec", "tracing", ] diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 025df904c1..3720466a6b 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -1,4 +1,4 @@ -use parser::SyntaxNode; +use parser::TextRange; use crate::InputFile; @@ -27,12 +27,12 @@ pub struct SubDiagnostic { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Span { pub file: InputFile, - pub node: SyntaxNode, + pub range: TextRange, } impl Span { - pub fn new(file: InputFile, node: SyntaxNode) -> Self { - Self { file, node } + pub fn new(file: InputFile, range: TextRange) -> Self { + Self { file, range } } } diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 9eba42dc32..665b63b95d 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -21,3 +21,4 @@ rustc-hash = "1.1.0" parser = { path = "../parser2", package = "fe-parser2" } common = { path = "../common2", package = "fe-common2" } +smallvec = "1.10.0" diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 86b0b56f60..4913a8587d 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,9 +1,11 @@ use parser::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, + syntax_node::NodeOrToken, SyntaxNode, TextRange, }; use common::{diagnostics::Span, InputFile}; +use smallvec::SmallVec; use crate::{hir_def::ItemKind, parse_file}; @@ -128,14 +130,30 @@ pub trait SpanSeed { fn span(self, db: &dyn SpannedHirDb) -> Span; } -struct ItemSpanState { +type TransitionFn = fn(SyntaxNode) -> Option; + +#[derive(Clone)] +struct SpanTransitionChain { root: ItemKind, - transition: Vec>, + chain: SmallVec<[TransitionFn; 4]>, } -type TransitionFn = dyn FnOnce(&SyntaxNode, &dyn SpannedHirDb) -> Option + 'static; +impl SpanTransitionChain { + fn new(item: ItemKind) -> Self { + Self { + root: item, + chain: SmallVec::new(), + } + } + + fn push_state(&self, transition: TransitionFn) -> Self { + let mut new_state = self.clone(); + new_state.chain.push(transition); + new_state + } +} -impl SpanSeed for ItemSpanState { +impl SpanSeed for SpanTransitionChain { fn span(self, db: &dyn SpannedHirDb) -> Span { let (file, ptr) = match self.root { ItemKind::TopMod(top_level_mod) => { @@ -212,13 +230,30 @@ impl SpanSeed for ItemSpanState { let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let mut node = ptr.to_node(&root_node); - for transition in self.transition { - node = match transition(&node, db) { - Some(next) => next, - None => break, + for transition in self.chain { + node = match transition(node.clone()) { + Some(NodeOrToken::Node(node)) => node, + Some(NodeOrToken::Token(token)) => { + return Span::new(file, token.text_range()); + } + None => { + return Span::new(file, node.text_range()); + } }; } - Span::new(file, node) + Span::new(file, node.text_range()) } } + +macro_rules! impl_item_span_seed { + ($name:ident) => { + impl crate::span::SpanSeed for $name { + fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { + self.0.span(db) + } + } + }; +} + +use impl_item_span_seed; diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs new file mode 100644 index 0000000000..3981547806 --- /dev/null +++ b/crates/hir/src/span/params.rs @@ -0,0 +1 @@ +pub struct \ No newline at end of file diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 5771df9a55..2931307a41 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -5,12 +5,10 @@ pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; -pub use syntax_node::{FeLang, GreenNode, SyntaxNode, SyntaxToken}; +pub use syntax_node::{FeLang, GreenNode, SyntaxNode, SyntaxToken, TextRange}; use parser::RootScope; -pub type TextRange = rowan::TextRange; - pub fn parse_source_file(text: &str) -> (GreenNode, Vec) { let lexer = lexer::Lexer::new(text); let mut parser = parser::Parser::new(lexer); diff --git a/crates/parser2/src/syntax_node.rs b/crates/parser2/src/syntax_node.rs index b1011cd9d5..70d47c4431 100644 --- a/crates/parser2/src/syntax_node.rs +++ b/crates/parser2/src/syntax_node.rs @@ -18,3 +18,5 @@ impl rowan::Language for FeLang { pub type SyntaxNode = rowan::SyntaxNode; pub type SyntaxToken = rowan::SyntaxToken; pub type GreenNode = rowan::GreenNode; +pub type TextRange = rowan::TextRange; +pub type NodeOrToken = rowan::NodeOrToken; From b191bebdbe35528ec56833cefd7ee7862b6f99e9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 2 Apr 2023 23:18:37 +0200 Subject: [PATCH 119/678] Define lazy span for items --- crates/hir/src/hir_def/item.rs | 72 +++++++- crates/hir/src/span/attr.rs | 3 + crates/hir/src/span/item.rs | 285 ++++++++++++++++++++++++++++++++ crates/hir/src/span/mod.rs | 24 ++- crates/hir/src/span/params.rs | 12 +- crates/hir/src/span/path.rs | 3 + crates/hir/src/span/types.rs | 5 + crates/hir/src/span/use_tree.rs | 3 + 8 files changed, 398 insertions(+), 9 deletions(-) create mode 100644 crates/hir/src/span/attr.rs create mode 100644 crates/hir/src/span/path.rs create mode 100644 crates/hir/src/span/types.rs create mode 100644 crates/hir/src/span/use_tree.rs diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index a52464a384..5af1081abc 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -5,7 +5,17 @@ use parser::ast; -use crate::{hir_def::TraitRef, span::HirOrigin}; +use crate::{ + hir_def::TraitRef, + span::{ + item::{ + LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyExternFnSpan, LazyFnSpan, + LazyImplSpan, LazyImplTraitSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, + LazyTypeAliasSpan, LazyUseSpan, + }, + HirOrigin, + }, +}; use super::{ AttrListId, Body, FnParamListId, GenericParamListId, IdentId, Partial, TypeId, WhereClauseId, @@ -39,6 +49,11 @@ pub struct TopLevelMod { #[return_ref] pub(crate) origin: HirOrigin, } +impl TopLevelMod { + pub fn lazy_span(self) -> LazyTopLevelModSpan { + LazyTopLevelModSpan::new(self) + } +} #[salsa::tracked] pub struct Mod { @@ -70,6 +85,11 @@ pub struct Fn { #[return_ref] pub(crate) origin: HirOrigin, } +impl Fn { + pub fn lazy_span(self) -> LazyFnSpan { + LazyFnSpan::new(self) + } +} #[salsa::tracked] pub struct ExternFn { @@ -85,6 +105,11 @@ pub struct ExternFn { #[return_ref] pub(crate) origin: HirOrigin, } +impl ExternFn { + pub fn lazy_span(self) -> LazyExternFnSpan { + LazyExternFnSpan::new(self) + } +} #[salsa::tracked] pub struct Struct { @@ -101,6 +126,11 @@ pub struct Struct { #[return_ref] pub(crate) origin: HirOrigin, } +impl Struct { + pub fn lazy_span(self) -> LazyStructSpan { + LazyStructSpan::new(self) + } +} #[salsa::tracked] pub struct Contract { @@ -115,6 +145,11 @@ pub struct Contract { #[return_ref] pub(crate) origin: HirOrigin, } +impl Contract { + pub fn lazy_span(self) -> LazyContractSpan { + LazyContractSpan::new(self) + } +} #[salsa::tracked] pub struct Enum { @@ -131,6 +166,11 @@ pub struct Enum { #[return_ref] pub(crate) origin: HirOrigin, } +impl Enum { + pub fn lazy_span(self) -> LazyEnumSpan { + LazyEnumSpan::new(self) + } +} #[salsa::tracked] pub struct TypeAlias { @@ -147,6 +187,11 @@ pub struct TypeAlias { #[return_ref] pub(crate) origin: HirOrigin, } +impl TypeAlias { + pub fn lazy_span(self) -> LazyTypeAliasSpan { + LazyTypeAliasSpan::new(self) + } +} #[salsa::tracked] pub struct Impl { @@ -161,6 +206,11 @@ pub struct Impl { #[return_ref] pub(crate) origin: HirOrigin, } +impl Impl { + pub fn lazy_span(self) -> LazyImplSpan { + LazyImplSpan::new(self) + } +} #[salsa::tracked] pub struct Trait { @@ -177,6 +227,11 @@ pub struct Trait { #[return_ref] pub(crate) origin: HirOrigin, } +impl Trait { + pub fn lazy_span(self) -> LazyTraitSpan { + LazyTraitSpan::new(self) + } +} #[salsa::tracked] pub struct ImplTrait { @@ -192,6 +247,11 @@ pub struct ImplTrait { #[return_ref] pub(crate) origin: HirOrigin, } +impl ImplTrait { + pub fn lazy_span(self) -> LazyImplTraitSpan { + LazyImplTraitSpan::new(self) + } +} #[salsa::tracked] pub struct Const { @@ -204,6 +264,11 @@ pub struct Const { #[return_ref] pub(crate) origin: HirOrigin, } +impl Const { + pub fn lazy_span(self) -> LazyConstSpan { + LazyConstSpan::new(self) + } +} #[salsa::tracked] pub struct Use { @@ -215,6 +280,11 @@ pub struct Use { #[return_ref] pub(crate) origin: HirOrigin, } +impl Use { + pub fn lazy_span(self) -> LazyUseSpan { + LazyUseSpan::new(self) + } +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ItemModifier { diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs new file mode 100644 index 0000000000..ff0c656500 --- /dev/null +++ b/crates/hir/src/span/attr.rs @@ -0,0 +1,3 @@ +use super::SpanTransitionChain; + +pub struct LazyAttrListSpan(pub(super) SpanTransitionChain); diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 8b13789179..e554d105d9 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1 +1,286 @@ +use parser::{ast, ast::prelude::*, SyntaxNode}; +use crate::hir_def::{ + Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, +}; + +use super::{ + attr::LazyAttrListSpan, + define_lazy_span_item, + params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, + types::{LazyPathTypeSpan, LazyTypeSpan}, + use_tree::LazyUseTreeSpan, + SpanTransitionChain, +}; + +define_lazy_span_item!(LazyTopLevelModSpan); +impl LazyTopLevelModSpan { + pub fn new(top_mod: TopLevelMod) -> Self { + Self(SpanTransitionChain::new(top_mod.into())) + } +} + +define_lazy_span_item!(LazyModSpan); +impl LazyModSpan { + pub fn new(mod_: Mod) -> Self { + Self(SpanTransitionChain::new(mod_.into())) + } + + span_impl_tokens!(ast::Mod, (name, name)); + span_impl_nodes!( + ast::Mod, + (attributes, attr_list, LazyAttrListSpan), + (modifier, modifier, LazyItemModifierSpan), + ); +} + +define_lazy_span_item!(LazyFnSpan); +impl LazyFnSpan { + pub fn new(fn_: Fn) -> Self { + Self(SpanTransitionChain::new(fn_.into())) + } + + span_impl_tokens!(ast::Fn, (name, name)); + span_impl_nodes!( + ast::Fn, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (params, params, LazyFnParamListSpan), + (ret_ty, ret_ty, LazyTypeSpan), + ); +} + +define_lazy_span_item!(LazyExternFnSpan); +impl LazyExternFnSpan { + pub fn new(fn_: ExternFn) -> Self { + Self(SpanTransitionChain::new(fn_.into())) + } + + span_impl_tokens!(ast::Fn, (name, name)); + span_impl_nodes!( + ast::Fn, + (attributes, attr_list, LazyAttrListSpan), + (modifier, modifier, LazyItemModifierSpan), + (params, params, LazyFnParamListSpan), + (ret_ty, ret_ty, LazyTypeSpan), + ); +} + +define_lazy_span_item!(LazyStructSpan); +impl LazyStructSpan { + pub fn new(struct_: Struct) -> Self { + Self(SpanTransitionChain::new(struct_.into())) + } + + span_impl_tokens!(ast::Struct, (name, name)); + span_impl_nodes!( + ast::Struct, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (fields, fields, LazyRecordFieldListSpan), + ); +} + +define_lazy_span_item!(LazyContractSpan); +impl LazyContractSpan { + pub fn new(contract: Contract) -> Self { + Self(SpanTransitionChain::new(contract.into())) + } + + span_impl_tokens!(ast::Contract, (name, name)); + span_impl_nodes!( + ast::Contract, + (attributes, attr_list, LazyAttrListSpan), + (modifier, modifier, LazyItemModifierSpan), + (fields, fields, LazyRecordFieldListSpan), + ); +} + +define_lazy_span_item!(LazyEnumSpan); +impl LazyEnumSpan { + pub fn new(enum_: Enum) -> Self { + Self(SpanTransitionChain::new(enum_.into())) + } + + span_impl_tokens!(ast::Enum, (name, name)); + span_impl_nodes!( + ast::Enum, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (variants, variants, LazyEnumVariantListSpan), + ); +} + +define_lazy_span_item!(LazyTypeAliasSpan); +impl LazyTypeAliasSpan { + pub fn new(alias: TypeAlias) -> Self { + Self(SpanTransitionChain::new(alias.into())) + } + + span_impl_tokens!(ast::TypeAlias, (alias, alias)); + span_impl_nodes!( + ast::TypeAlias, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (ty, ty, LazyTypeSpan) + ); +} + +define_lazy_span_item!(LazyImplSpan); +impl LazyImplSpan { + pub fn new(impl_: Impl) -> Self { + Self(SpanTransitionChain::new(impl_.into())) + } + + span_impl_nodes!( + ast::Impl, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (target_ty, ty, LazyTypeSpan), + ); +} + +define_lazy_span_item!(LazyTraitSpan); +impl LazyTraitSpan { + pub fn new(trait_: Trait) -> Self { + Self(SpanTransitionChain::new(trait_.into())) + } + + span_impl_tokens!(ast::Trait, (name, name)); + span_impl_nodes!( + ast::Trait, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + ); +} + +define_lazy_span_item!(LazyImplTraitSpan); +impl LazyImplTraitSpan { + pub fn new(impl_trait: ImplTrait) -> Self { + Self(SpanTransitionChain::new(impl_trait.into())) + } + + span_impl_nodes!( + ast::ImplTrait, + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (trait_ref, trait_ref, LazyPathTypeSpan), + (ty, ty, LazyTypeSpan), + ); +} + +define_lazy_span_item!(LazyConstSpan); +impl LazyConstSpan { + pub fn new(const_: Const) -> Self { + Self(SpanTransitionChain::new(const_.into())) + } + + span_impl_tokens!(ast::Const, (name, name)); + span_impl_nodes!( + ast::Const, + (attributes, attr_list, LazyAttrListSpan), + (ty, ty, LazyTypeSpan), + ); +} + +define_lazy_span_item!(LazyUseSpan); +impl LazyUseSpan { + pub fn new(use_: Use) -> Self { + Self(SpanTransitionChain::new(use_.into())) + } + + span_impl_nodes!( + ast::Use, + (attributes, attr_list, LazyAttrListSpan), + (use_tree, use_tree, LazyUseTreeSpan), + ); +} + +define_lazy_span_item!(LazyRecordFieldListSpan); +impl LazyRecordFieldListSpan { + pub fn field(&self, idx: usize) -> LazyRecordFieldSpan { + let transition = move |node: SyntaxNode| { + ast::RecordFieldList::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyRecordFieldSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyRecordFieldSpan); +impl LazyRecordFieldSpan { + span_impl_tokens!(ast::RecordFieldDef, (pub_kw, pub_kw), (name, name),); + span_impl_nodes!(ast::RecordFieldDef, (ty, ty, LazyTypeSpan)); +} + +define_lazy_span_item!(LazyEnumVariantListSpan); +impl LazyEnumVariantListSpan { + pub fn field(&self, idx: usize) -> LazyEnumVariantSpan { + let transition = move |node: SyntaxNode| { + ast::EnumVariantDefList::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyEnumVariantSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyEnumVariantSpan); +impl LazyEnumVariantSpan { + span_impl_tokens!(ast::EnumVariantDef, (name, name)); + span_impl_nodes!(ast::EnumVariantDef, (ty, ty, LazyTypeSpan)); +} + +define_lazy_span_item!(LazyItemModifierSpan); +impl LazyItemModifierSpan { + span_impl_tokens!(ast::ItemModifier, (pub_kw, pub_kw), (unsafe_kw, unsafe_kw)); +} + +macro_rules! span_impl_tokens { + ($parent: ty, $(($name:ident, $getter:ident)),* $(,)*) => { + $( + pub fn $name(&self) -> crate::span::LazyTokenSpan { + let transition = |node: SyntaxNode| { + <$parent as AstNode>::cast(node) + .and_then(|n| n.$getter()) + .map(|n| n.into()) + }; + crate::span::LazyTokenSpan( + self.0.push_state(std::sync::Arc::new(transition)) + ) + } + )* + }; +} + +macro_rules! span_impl_nodes { + ($parent: ty, $(($name:ident, $getter:ident, $result:tt)),* $(,)*) => { + $( + pub fn $name(&self) -> $result { + let transition = |node: SyntaxNode| { + <$parent as AstNode>::cast(node) + .and_then(|f| f.$getter()) + .map(|n| n.syntax().clone().into()) + }; + $result(self.0.push_state(std::sync::Arc::new(transition))) + } + )* + }; +} + +use span_impl_nodes; +use span_impl_tokens; diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 4913a8587d..666538d019 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use parser::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, syntax_node::NodeOrToken, @@ -11,8 +13,13 @@ use crate::{hir_def::ItemKind, parse_file}; use self::db::SpannedHirDb; +pub mod attr; pub mod db; pub mod item; +pub mod params; +pub mod path; +pub mod types; +pub mod use_tree; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin @@ -126,14 +133,14 @@ impl AugAssignDesugared { /// The trait provides a way to extract [`Span`] from types which don't have a /// span information directly. -pub trait SpanSeed { +pub trait LazySpan { fn span(self, db: &dyn SpannedHirDb) -> Span; } -type TransitionFn = fn(SyntaxNode) -> Option; +type TransitionFn = Arc Option>; #[derive(Clone)] -struct SpanTransitionChain { +pub(super) struct SpanTransitionChain { root: ItemKind, chain: SmallVec<[TransitionFn; 4]>, } @@ -153,7 +160,7 @@ impl SpanTransitionChain { } } -impl SpanSeed for SpanTransitionChain { +impl LazySpan for SpanTransitionChain { fn span(self, db: &dyn SpannedHirDb) -> Span { let (file, ptr) = match self.root { ItemKind::TopMod(top_level_mod) => { @@ -246,9 +253,11 @@ impl SpanSeed for SpanTransitionChain { } } -macro_rules! impl_item_span_seed { +macro_rules! define_lazy_span_item { ($name:ident) => { - impl crate::span::SpanSeed for $name { + #[derive(Clone)] + pub struct $name(pub(super) crate::span::SpanTransitionChain); + impl crate::span::LazySpan for $name { fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { self.0.span(db) } @@ -256,4 +265,5 @@ macro_rules! impl_item_span_seed { }; } -use impl_item_span_seed; +use define_lazy_span_item; +define_lazy_span_item!(LazyTokenSpan); diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 3981547806..94eec6efc6 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -1 +1,11 @@ -pub struct \ No newline at end of file +use super::SpanTransitionChain; + +pub struct LazyGenericParamListSpan(pub(super) SpanTransitionChain); + +pub struct LazyGenericArgListSpan(pub(super) SpanTransitionChain); + +pub struct LazyWhereClauseSpan(pub(super) SpanTransitionChain); + +pub struct TypeGenericParamListSpan(pub(super) SpanTransitionChain); + +pub struct LazyFnParamListSpan(pub(super) SpanTransitionChain); diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs new file mode 100644 index 0000000000..814ecb8a6f --- /dev/null +++ b/crates/hir/src/span/path.rs @@ -0,0 +1,3 @@ +use super::define_lazy_span_item; + +define_lazy_span_item!(LazyPathSpan); diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs new file mode 100644 index 0000000000..882ba0db93 --- /dev/null +++ b/crates/hir/src/span/types.rs @@ -0,0 +1,5 @@ +use super::define_lazy_span_item; + +define_lazy_span_item!(LazyTypeSpan); + +define_lazy_span_item!(LazyPathTypeSpan); diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs new file mode 100644 index 0000000000..013207cac2 --- /dev/null +++ b/crates/hir/src/span/use_tree.rs @@ -0,0 +1,3 @@ +use super::define_lazy_span_item; + +define_lazy_span_item!(LazyUseTreeSpan); From 77406aa7b2ec8a05140aa6634c6797d8b58ad83b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 4 Apr 2023 23:45:39 +0200 Subject: [PATCH 120/678] Add lazy span for Path --- crates/hir/src/span/path.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index 814ecb8a6f..27cafcb9da 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -1,3 +1,20 @@ +use parser::{ + ast::{self, prelude::*}, + SyntaxNode, +}; + use super::define_lazy_span_item; define_lazy_span_item!(LazyPathSpan); +impl LazyPathSpan { + pub fn segment(&self, idx: usize) -> LazyPathSegmentSpan { + let transition = move |node: SyntaxNode| { + ast::RecordFieldList::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyPathSegmentSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyPathSegmentSpan); From 34cb2e91b8838f95d896e80892df61791f328519 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 00:12:15 +0200 Subject: [PATCH 121/678] Add lazy span for use tree --- crates/hir/src/diagnostics.rs | 2 +- crates/hir/src/span/item.rs | 36 +-------------------------- crates/hir/src/span/mod.rs | 37 +++++++++++++++++++++++++++- crates/hir/src/span/path.rs | 2 +- crates/hir/src/span/use_tree.rs | 43 ++++++++++++++++++++++++++++++++- 5 files changed, 81 insertions(+), 39 deletions(-) diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index eb95e10fd6..96a7e539ee 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -16,7 +16,7 @@ use crate::span::db::SpannedHirDb; /// /// The utility structs for conversion from HIR-spanless types to nodes are /// defined in [`crate::span`] module. -pub trait DiagnosticVoucher { +pub trait DiagnosticVoucher: Send { fn pass(&self) -> GlobalErrorCode; /// Consumes voucher and makes a [`CompleteDiagnostic`]. fn consume(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index e554d105d9..ebede38ac6 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -9,6 +9,7 @@ use super::{ attr::LazyAttrListSpan, define_lazy_span_item, params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, + span_impl_nodes, span_impl_tokens, types::{LazyPathTypeSpan, LazyTypeSpan}, use_tree::LazyUseTreeSpan, SpanTransitionChain, @@ -249,38 +250,3 @@ define_lazy_span_item!(LazyItemModifierSpan); impl LazyItemModifierSpan { span_impl_tokens!(ast::ItemModifier, (pub_kw, pub_kw), (unsafe_kw, unsafe_kw)); } - -macro_rules! span_impl_tokens { - ($parent: ty, $(($name:ident, $getter:ident)),* $(,)*) => { - $( - pub fn $name(&self) -> crate::span::LazyTokenSpan { - let transition = |node: SyntaxNode| { - <$parent as AstNode>::cast(node) - .and_then(|n| n.$getter()) - .map(|n| n.into()) - }; - crate::span::LazyTokenSpan( - self.0.push_state(std::sync::Arc::new(transition)) - ) - } - )* - }; -} - -macro_rules! span_impl_nodes { - ($parent: ty, $(($name:ident, $getter:ident, $result:tt)),* $(,)*) => { - $( - pub fn $name(&self) -> $result { - let transition = |node: SyntaxNode| { - <$parent as AstNode>::cast(node) - .and_then(|f| f.$getter()) - .map(|n| n.syntax().clone().into()) - }; - $result(self.0.push_state(std::sync::Arc::new(transition))) - } - )* - }; -} - -use span_impl_nodes; -use span_impl_tokens; diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 666538d019..56dccf20ca 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -253,6 +253,8 @@ impl LazySpan for SpanTransitionChain { } } +define_lazy_span_item!(LazyTokenSpan); + macro_rules! define_lazy_span_item { ($name:ident) => { #[derive(Clone)] @@ -265,5 +267,38 @@ macro_rules! define_lazy_span_item { }; } +macro_rules! span_impl_tokens { + ($parent: ty, $(($name:ident, $getter:ident)),* $(,)*) => { + $( + pub fn $name(&self) -> crate::span::LazyTokenSpan { + let transition = |node: SyntaxNode| { + <$parent as AstNode>::cast(node) + .and_then(|n| n.$getter()) + .map(|n| n.into()) + }; + crate::span::LazyTokenSpan( + self.0.push_state(std::sync::Arc::new(transition)) + ) + } + )* + }; +} + +macro_rules! span_impl_nodes { + ($parent: ty, $(($name:ident, $getter:ident, $result:tt)),* $(,)*) => { + $( + pub fn $name(&self) -> $result { + let transition = |node: parser::SyntaxNode| { + <$parent as AstNode>::cast(node) + .and_then(|f| f.$getter()) + .map(|n| n.syntax().clone().into()) + }; + $result(self.0.push_state(std::sync::Arc::new(transition))) + } + )* + }; +} + use define_lazy_span_item; -define_lazy_span_item!(LazyTokenSpan); +use span_impl_nodes; +use span_impl_tokens; diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index 27cafcb9da..538bbd78dd 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -9,7 +9,7 @@ define_lazy_span_item!(LazyPathSpan); impl LazyPathSpan { pub fn segment(&self, idx: usize) -> LazyPathSegmentSpan { let transition = move |node: SyntaxNode| { - ast::RecordFieldList::cast(node) + ast::Path::cast(node) .and_then(|f| f.into_iter().nth(idx)) .map(|n| n.syntax().clone().into()) }; diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index 013207cac2..4852cc2855 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -1,3 +1,44 @@ -use super::define_lazy_span_item; +use parser::{ + ast::{self, prelude::*}, + SyntaxNode, +}; + +use super::{define_lazy_span_item, span_impl_nodes}; define_lazy_span_item!(LazyUseTreeSpan); +impl LazyUseTreeSpan { + span_impl_nodes!( + ast::UseTree, + (path, path, LazyUsePathSpan), + (subtree, children, LazySubUseTreeSpan), + (alias, alias, LazyUseTreeAliasSpan), + ); +} + +define_lazy_span_item!(LazyUsePathSpan); +impl LazyUsePathSpan { + pub fn segment(&self, idx: usize) -> LazyUsePathSegmentSpan { + let transition = move |node: SyntaxNode| { + ast::UsePath::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyUsePathSegmentSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyUsePathSegmentSpan); + +define_lazy_span_item!(LazySubUseTreeSpan); +impl LazySubUseTreeSpan { + pub fn subtree(&self, idx: usize) -> LazyUseTreeSpan { + let transition = move |node: SyntaxNode| { + ast::UseTreeList::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyUseTreeSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyUseTreeAliasSpan); From 8f2cccb7f692963117237e232a1754c9d6b3243a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 15:41:02 +0200 Subject: [PATCH 122/678] Improve `define_lazy_span_item` --- crates/hir/src/span/attr.rs | 4 +- crates/hir/src/span/item.rs | 333 +++++++++++++++----------------- crates/hir/src/span/mod.rs | 94 +++++---- crates/hir/src/span/params.rs | 15 +- crates/hir/src/span/path.rs | 21 +- crates/hir/src/span/types.rs | 1 - crates/hir/src/span/use_tree.rs | 52 ++--- 7 files changed, 252 insertions(+), 268 deletions(-) diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index ff0c656500..a4a100cc3b 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -1,3 +1,3 @@ -use super::SpanTransitionChain; +use super::define_lazy_span_item; -pub struct LazyAttrListSpan(pub(super) SpanTransitionChain); +define_lazy_span_item!(LazyAttrListSpan); diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index ebede38ac6..e14d6b5cdb 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -9,244 +9,227 @@ use super::{ attr::LazyAttrListSpan, define_lazy_span_item, params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, - span_impl_nodes, span_impl_tokens, types::{LazyPathTypeSpan, LazyTypeSpan}, use_tree::LazyUseTreeSpan, - SpanTransitionChain, }; -define_lazy_span_item!(LazyTopLevelModSpan); -impl LazyTopLevelModSpan { - pub fn new(top_mod: TopLevelMod) -> Self { - Self(SpanTransitionChain::new(top_mod.into())) - } -} +define_lazy_span_item!(LazyTopLevelModSpan, ast::Root, new(TopLevelMod),); -define_lazy_span_item!(LazyModSpan); -impl LazyModSpan { - pub fn new(mod_: Mod) -> Self { - Self(SpanTransitionChain::new(mod_.into())) +define_lazy_span_item!( + LazyModSpan, + ast::Mod, + new(Mod), + @token + { + (name, name), } - - span_impl_tokens!(ast::Mod, (name, name)); - span_impl_nodes!( - ast::Mod, + @node + { (attributes, attr_list, LazyAttrListSpan), (modifier, modifier, LazyItemModifierSpan), - ); -} - -define_lazy_span_item!(LazyFnSpan); -impl LazyFnSpan { - pub fn new(fn_: Fn) -> Self { - Self(SpanTransitionChain::new(fn_.into())) } - - span_impl_tokens!(ast::Fn, (name, name)); - span_impl_nodes!( - ast::Fn, +); + +define_lazy_span_item!( + LazyFnSpan, + ast::Fn, + new(Fn), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), (params, params, LazyFnParamListSpan), (ret_ty, ret_ty, LazyTypeSpan), - ); -} - -define_lazy_span_item!(LazyExternFnSpan); -impl LazyExternFnSpan { - pub fn new(fn_: ExternFn) -> Self { - Self(SpanTransitionChain::new(fn_.into())) } - - span_impl_tokens!(ast::Fn, (name, name)); - span_impl_nodes!( - ast::Fn, +); + +define_lazy_span_item!( + LazyExternFnSpan, + ast::Fn, + new(ExternFn), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (modifier, modifier, LazyItemModifierSpan), (params, params, LazyFnParamListSpan), (ret_ty, ret_ty, LazyTypeSpan), - ); -} - -define_lazy_span_item!(LazyStructSpan); -impl LazyStructSpan { - pub fn new(struct_: Struct) -> Self { - Self(SpanTransitionChain::new(struct_.into())) } - - span_impl_tokens!(ast::Struct, (name, name)); - span_impl_nodes!( - ast::Struct, +); + +define_lazy_span_item!( + LazyStructSpan, + ast::Struct, + new(Struct), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), (fields, fields, LazyRecordFieldListSpan), - ); -} - -define_lazy_span_item!(LazyContractSpan); -impl LazyContractSpan { - pub fn new(contract: Contract) -> Self { - Self(SpanTransitionChain::new(contract.into())) } - - span_impl_tokens!(ast::Contract, (name, name)); - span_impl_nodes!( - ast::Contract, +); + +define_lazy_span_item!( + LazyContractSpan, + ast::Contract, + new(Contract), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (modifier, modifier, LazyItemModifierSpan), (fields, fields, LazyRecordFieldListSpan), - ); -} - -define_lazy_span_item!(LazyEnumSpan); -impl LazyEnumSpan { - pub fn new(enum_: Enum) -> Self { - Self(SpanTransitionChain::new(enum_.into())) } - - span_impl_tokens!(ast::Enum, (name, name)); - span_impl_nodes!( - ast::Enum, +); + +define_lazy_span_item!( + LazyEnumSpan, + ast::Enum, + new(Enum), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), (variants, variants, LazyEnumVariantListSpan), - ); -} - -define_lazy_span_item!(LazyTypeAliasSpan); -impl LazyTypeAliasSpan { - pub fn new(alias: TypeAlias) -> Self { - Self(SpanTransitionChain::new(alias.into())) } - - span_impl_tokens!(ast::TypeAlias, (alias, alias)); - span_impl_nodes!( - ast::TypeAlias, +); + +define_lazy_span_item!( + LazyTypeAliasSpan, + ast::TypeAlias, + new(TypeAlias), + @token { + (alias, alias), + } + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (ty, ty, LazyTypeSpan) - ); -} - -define_lazy_span_item!(LazyImplSpan); -impl LazyImplSpan { - pub fn new(impl_: Impl) -> Self { - Self(SpanTransitionChain::new(impl_.into())) + (ty, ty, LazyTypeSpan), } +); - span_impl_nodes!( - ast::Impl, +define_lazy_span_item!( + LazyImplSpan, + ast::Impl, + new(Impl), + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (target_ty, ty, LazyTypeSpan), - ); -} - -define_lazy_span_item!(LazyTraitSpan); -impl LazyTraitSpan { - pub fn new(trait_: Trait) -> Self { - Self(SpanTransitionChain::new(trait_.into())) } - - span_impl_tokens!(ast::Trait, (name, name)); - span_impl_nodes!( - ast::Trait, +); +define_lazy_span_item!( + LazyTraitSpan, + ast::Trait, + new(Trait), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - ); -} - -define_lazy_span_item!(LazyImplTraitSpan); -impl LazyImplTraitSpan { - pub fn new(impl_trait: ImplTrait) -> Self { - Self(SpanTransitionChain::new(impl_trait.into())) } +); - span_impl_nodes!( - ast::ImplTrait, +define_lazy_span_item!( + LazyImplTraitSpan, + ast::ImplTrait, + new(ImplTrait), + @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (trait_ref, trait_ref, LazyPathTypeSpan), (ty, ty, LazyTypeSpan), - ); -} - -define_lazy_span_item!(LazyConstSpan); -impl LazyConstSpan { - pub fn new(const_: Const) -> Self { - Self(SpanTransitionChain::new(const_.into())) } - - span_impl_tokens!(ast::Const, (name, name)); - span_impl_nodes!( - ast::Const, +); + +define_lazy_span_item!( + LazyConstSpan, + ast::Const, + new(Const), + @token { + (name, name), + } + @node { (attributes, attr_list, LazyAttrListSpan), (ty, ty, LazyTypeSpan), - ); -} - -define_lazy_span_item!(LazyUseSpan); -impl LazyUseSpan { - pub fn new(use_: Use) -> Self { - Self(SpanTransitionChain::new(use_.into())) } +); - span_impl_nodes!( - ast::Use, +define_lazy_span_item!( + LazyUseSpan, + ast::Use, + new(Use), + @node { (attributes, attr_list, LazyAttrListSpan), (use_tree, use_tree, LazyUseTreeSpan), - ); -} - -define_lazy_span_item!(LazyRecordFieldListSpan); -impl LazyRecordFieldListSpan { - pub fn field(&self, idx: usize) -> LazyRecordFieldSpan { - let transition = move |node: SyntaxNode| { - ast::RecordFieldList::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - LazyRecordFieldSpan(self.0.push_state(std::sync::Arc::new(transition))) - } -} - -define_lazy_span_item!(LazyRecordFieldSpan); -impl LazyRecordFieldSpan { - span_impl_tokens!(ast::RecordFieldDef, (pub_kw, pub_kw), (name, name),); - span_impl_nodes!(ast::RecordFieldDef, (ty, ty, LazyTypeSpan)); -} - -define_lazy_span_item!(LazyEnumVariantListSpan); -impl LazyEnumVariantListSpan { - pub fn field(&self, idx: usize) -> LazyEnumVariantSpan { - let transition = move |node: SyntaxNode| { - ast::EnumVariantDefList::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - LazyEnumVariantSpan(self.0.push_state(std::sync::Arc::new(transition))) - } -} - -define_lazy_span_item!(LazyEnumVariantSpan); -impl LazyEnumVariantSpan { - span_impl_tokens!(ast::EnumVariantDef, (name, name)); - span_impl_nodes!(ast::EnumVariantDef, (ty, ty, LazyTypeSpan)); -} - -define_lazy_span_item!(LazyItemModifierSpan); -impl LazyItemModifierSpan { - span_impl_tokens!(ast::ItemModifier, (pub_kw, pub_kw), (unsafe_kw, unsafe_kw)); -} + } +); + +define_lazy_span_item!( + LazyRecordFieldListSpan, + ast::RecordFieldList, + @idx { + (field, LazyRecordFieldListSpan), + } +); + +define_lazy_span_item!( + LazyRecordFieldSpan, + ast::RecordFieldDef, + @token { + (pub_kw, pub_kw), + (name, name), + } + @node { + (ty, ty, LazyTypeSpan), + } +); + +define_lazy_span_item!( + LazyEnumVariantListSpan, + ast::EnumVariantDefList, + @idx { + (variant, LazyEnumVariantSpan), + } +); + +define_lazy_span_item!( + LazyEnumVariantSpan, + ast::EnumVariantDef, + @token { + (name, name), + } + @node { + (ty, ty, LazyTypeSpan), + } +); + +define_lazy_span_item!( + LazyItemModifierSpan, + ast::ItemModifier, + @token { + (pub_kw, pub_kw), + (unsafe_kw, unsafe_kw), + } +); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 56dccf20ca..155429c2af 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -256,9 +256,67 @@ impl LazySpan for SpanTransitionChain { define_lazy_span_item!(LazyTokenSpan); macro_rules! define_lazy_span_item { - ($name:ident) => { + ( + $name:ident + $(, + $sk_node: ty + $(, + $(new($hir_ty:ty),)? + $(@token {$(($name_token:ident, $getter_token:ident),)*})? + $(@node {$(($name_node:ident, $getter_node:ident, $result:tt),)*})? + $(@idx { $(($name_iter:ident, $result_iter:tt),)*})? + $(,)? + )? + )? + ) => { #[derive(Clone)] pub struct $name(pub(super) crate::span::SpanTransitionChain); + $( + $( + impl $name { + + $(pub fn new(hir: $hir_ty) -> Self { + Self(crate::span::SpanTransitionChain::new(hir.into())) + })? + + $($( + pub fn $name_token(&self) -> crate::span::LazyTokenSpan { + let transition = |node: SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_token()) + .map(|n| n.into()) + }; + crate::span::LazyTokenSpan( + self.0.push_state(std::sync::Arc::new(transition)) + ) + } + )*)? + + $($( + pub fn $name_node(&self) -> $result{ + let transition = |node: parser::SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|f| f.$getter_node()) + .map(|n| n.syntax().clone().into()) + }; + $result(self.0.push_state(std::sync::Arc::new(transition))) + } + )*)? + + $($( + + pub fn $name_iter(&self, idx: usize) -> $result_iter { + let transition = move |node: parser::SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + $result_iter(self.0.push_state(std::sync::Arc::new(transition))) + } + )*)? + })?)? + + impl crate::span::LazySpan for $name { fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { self.0.span(db) @@ -267,38 +325,4 @@ macro_rules! define_lazy_span_item { }; } -macro_rules! span_impl_tokens { - ($parent: ty, $(($name:ident, $getter:ident)),* $(,)*) => { - $( - pub fn $name(&self) -> crate::span::LazyTokenSpan { - let transition = |node: SyntaxNode| { - <$parent as AstNode>::cast(node) - .and_then(|n| n.$getter()) - .map(|n| n.into()) - }; - crate::span::LazyTokenSpan( - self.0.push_state(std::sync::Arc::new(transition)) - ) - } - )* - }; -} - -macro_rules! span_impl_nodes { - ($parent: ty, $(($name:ident, $getter:ident, $result:tt)),* $(,)*) => { - $( - pub fn $name(&self) -> $result { - let transition = |node: parser::SyntaxNode| { - <$parent as AstNode>::cast(node) - .and_then(|f| f.$getter()) - .map(|n| n.syntax().clone().into()) - }; - $result(self.0.push_state(std::sync::Arc::new(transition))) - } - )* - }; -} - use define_lazy_span_item; -use span_impl_nodes; -use span_impl_tokens; diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 94eec6efc6..fedd42e2ba 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -1,11 +1,6 @@ -use super::SpanTransitionChain; +use super::define_lazy_span_item; -pub struct LazyGenericParamListSpan(pub(super) SpanTransitionChain); - -pub struct LazyGenericArgListSpan(pub(super) SpanTransitionChain); - -pub struct LazyWhereClauseSpan(pub(super) SpanTransitionChain); - -pub struct TypeGenericParamListSpan(pub(super) SpanTransitionChain); - -pub struct LazyFnParamListSpan(pub(super) SpanTransitionChain); +define_lazy_span_item!(LazyFnParamListSpan); +define_lazy_span_item!(LazyGenericParamListSpan); +define_lazy_span_item!(LazyGenericArgListSpan); +define_lazy_span_item!(LazyWhereClauseSpan); diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index 538bbd78dd..2b83b4aae7 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -1,20 +1,13 @@ -use parser::{ - ast::{self, prelude::*}, - SyntaxNode, -}; +use parser::ast::{self, prelude::*}; use super::define_lazy_span_item; -define_lazy_span_item!(LazyPathSpan); -impl LazyPathSpan { - pub fn segment(&self, idx: usize) -> LazyPathSegmentSpan { - let transition = move |node: SyntaxNode| { - ast::Path::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - LazyPathSegmentSpan(self.0.push_state(std::sync::Arc::new(transition))) +define_lazy_span_item!( + LazyPathSpan, + ast::Path, + @idx { + (segment, LazyPathSegmentSpan), } -} +); define_lazy_span_item!(LazyPathSegmentSpan); diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 882ba0db93..8616cc71c0 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -1,5 +1,4 @@ use super::define_lazy_span_item; define_lazy_span_item!(LazyTypeSpan); - define_lazy_span_item!(LazyPathTypeSpan); diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index 4852cc2855..518b0a20f3 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -1,44 +1,34 @@ -use parser::{ - ast::{self, prelude::*}, - SyntaxNode, -}; +use parser::ast::{self, prelude::*}; -use super::{define_lazy_span_item, span_impl_nodes}; +use super::define_lazy_span_item; -define_lazy_span_item!(LazyUseTreeSpan); -impl LazyUseTreeSpan { - span_impl_nodes!( - ast::UseTree, +define_lazy_span_item!( + LazyUseTreeSpan, + ast::UseTree, + @node { (path, path, LazyUsePathSpan), (subtree, children, LazySubUseTreeSpan), (alias, alias, LazyUseTreeAliasSpan), - ); -} + } +); -define_lazy_span_item!(LazyUsePathSpan); -impl LazyUsePathSpan { - pub fn segment(&self, idx: usize) -> LazyUsePathSegmentSpan { - let transition = move |node: SyntaxNode| { - ast::UsePath::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - LazyUsePathSegmentSpan(self.0.push_state(std::sync::Arc::new(transition))) +define_lazy_span_item!( + LazyUsePathSpan, + ast::UsePath, + @idx { + (segment, LazyUsePathSegmentSpan), } -} + +); define_lazy_span_item!(LazyUsePathSegmentSpan); -define_lazy_span_item!(LazySubUseTreeSpan); -impl LazySubUseTreeSpan { - pub fn subtree(&self, idx: usize) -> LazyUseTreeSpan { - let transition = move |node: SyntaxNode| { - ast::UseTreeList::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - LazyUseTreeSpan(self.0.push_state(std::sync::Arc::new(transition))) +define_lazy_span_item!( + LazySubUseTreeSpan, + ast::UseTreeList, + @idx { + (segment, LazyUseTreeSpan), } -} +); define_lazy_span_item!(LazyUseTreeAliasSpan); From b0eb9474ba388f650c169f3529ca74376d768bfc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 16:00:16 +0200 Subject: [PATCH 123/678] Add lazy span for params --- crates/hir/src/span/item.rs | 2 +- crates/hir/src/span/mod.rs | 5 +- crates/hir/src/span/params.rs | 86 +++++++++++++++++++++++++++++++-- crates/hir/src/span/path.rs | 2 +- crates/hir/src/span/use_tree.rs | 2 +- crates/parser2/src/ast/param.rs | 17 +++++++ 6 files changed, 105 insertions(+), 9 deletions(-) diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index e14d6b5cdb..da1d452db5 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1,4 +1,4 @@ -use parser::{ast, ast::prelude::*, SyntaxNode}; +use parser::ast; use crate::hir_def::{ Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 155429c2af..f9e18b864c 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -281,7 +281,8 @@ macro_rules! define_lazy_span_item { $($( pub fn $name_token(&self) -> crate::span::LazyTokenSpan { - let transition = |node: SyntaxNode| { + use parser::ast::prelude::*; + let transition = |node: parser::SyntaxNode| { <$sk_node as AstNode>::cast(node) .and_then(|n| n.$getter_token()) .map(|n| n.into()) @@ -294,6 +295,7 @@ macro_rules! define_lazy_span_item { $($( pub fn $name_node(&self) -> $result{ + use parser::ast::prelude::*; let transition = |node: parser::SyntaxNode| { <$sk_node as AstNode>::cast(node) .and_then(|f| f.$getter_node()) @@ -306,6 +308,7 @@ macro_rules! define_lazy_span_item { $($( pub fn $name_iter(&self, idx: usize) -> $result_iter { + use parser::ast::prelude::*; let transition = move |node: parser::SyntaxNode| { <$sk_node as AstNode>::cast(node) .and_then(|f| f.into_iter().nth(idx)) diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index fedd42e2ba..13dd436043 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -1,6 +1,82 @@ -use super::define_lazy_span_item; +use parser::ast; -define_lazy_span_item!(LazyFnParamListSpan); -define_lazy_span_item!(LazyGenericParamListSpan); -define_lazy_span_item!(LazyGenericArgListSpan); -define_lazy_span_item!(LazyWhereClauseSpan); +use crate::span::path::LazyPathSpan; + +use super::{define_lazy_span_item, types::LazyTypeSpan}; + +define_lazy_span_item!( + LazyFnParamListSpan, + ast::FnParamList, + @idx { + (param, LazyFnParamSpan), + } +); + +define_lazy_span_item!( + LazyGenericParamListSpan, + ast::GenericParamList, + @idx { + (param, LazyGenericParamSpan), + } +); + +define_lazy_span_item!( + LazyGenericArgListSpan, + ast::GenericArgList, + @idx { + (param, LazyGenericArgParamSpan), + } + +); +define_lazy_span_item!( + LazyWhereClauseSpan, + ast::WhereClause, + @idx { + (predicate, LazyWherePredicateSpan), + } + +); + +define_lazy_span_item!( + LazyFnParamSpan, + ast::FnParam, + @token { + (mut_kw, mut_token), + } + @node { + (label, label, LazyFnParamLabelSpan), + (name, name, LazyFnParamNameSpan), + (ty, ty, LazyTypeSpan), + } +); + +define_lazy_span_item!(LazyFnParamLabelSpan); +define_lazy_span_item!(LazyFnParamNameSpan); +define_lazy_span_item!(LazyGenericParamSpan); +define_lazy_span_item!(LazyGenericArgParamSpan); + +define_lazy_span_item!( + LazyWherePredicateSpan, + ast::WherePredicate, + @node { + (ty, ty, LazyTypeSpan), + (bounds, bounds, LazyTypeBoundListSpan), + } +); + +define_lazy_span_item! { + LazyTypeBoundListSpan, + ast::TypeBoundList, + @idx { + (bound, LazyTypeBoundSpan), + } +} + +define_lazy_span_item!( + LazyTypeBoundSpan, + ast::TypeBound, + @node { + (path, path, LazyPathSpan), + (generic_args, generic_args, LazyGenericArgListSpan), + } +); diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index 2b83b4aae7..597e82cd9b 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -1,4 +1,4 @@ -use parser::ast::{self, prelude::*}; +use parser::ast; use super::define_lazy_span_item; diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index 518b0a20f3..a33e36e060 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -1,4 +1,4 @@ -use parser::ast::{self, prelude::*}; +use parser::ast; use super::define_lazy_span_item; diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index a4a2255e41..251d7eed88 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -305,6 +305,14 @@ pub enum FnParamLabel { Underscore(SyntaxToken), } impl FnParamLabel { + pub fn syntax(&self) -> SyntaxToken { + match self { + FnParamLabel::Ident(token) => token, + FnParamLabel::Underscore(token) => token, + } + .clone() + } + fn from_token(token: SyntaxToken) -> Option { match token.kind() { SK::Ident => Some(FnParamLabel::Ident(token)), @@ -323,6 +331,15 @@ pub enum FnParamName { Underscore(SyntaxToken), } impl FnParamName { + pub fn syntax(&self) -> SyntaxToken { + match self { + FnParamName::Ident(token) => token, + FnParamName::SelfParam(token) => token, + FnParamName::Underscore(token) => token, + } + .clone() + } + fn from_token(token: SyntaxToken) -> Option { match token.kind() { SK::Ident => Some(FnParamName::Ident(token)), From da2419080c4b158b002a0478982158b275073251 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 16:11:18 +0200 Subject: [PATCH 124/678] Add lazy span for attribute --- crates/hir/src/span/attr.rs | 49 ++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index a4a100cc3b..09ebb697b4 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -1,3 +1,50 @@ +use parser::ast::{self, prelude::*}; + use super::define_lazy_span_item; -define_lazy_span_item!(LazyAttrListSpan); +define_lazy_span_item!( + LazyAttrListSpan, + ast::AttrList, + @idx { + (attr, LazyAttrSpan), + } +); +impl LazyAttrListSpan { + pub fn normal_attr(&self, idx: usize) -> LazyNormalAttrSpan { + let transition = move |node: parser::SyntaxNode| { + ast::AttrList::cast(node) + .and_then(|f| f.normal_attrs().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + LazyNormalAttrSpan(self.0.push_state(std::sync::Arc::new(transition))) + } +} + +define_lazy_span_item!(LazyAttrSpan); +define_lazy_span_item!( + LazyNormalAttrSpan, + ast::NormalAttr, + @token { + (name, name), + } + @node { + (args, args, LazyAttrArgListSpan), + } +); + +define_lazy_span_item!( + LazyAttrArgListSpan, + ast::AttrArgList, + @idx { + (arg, LazyAttrArgSpan), + } +); + +define_lazy_span_item!( + LazyAttrArgSpan, + ast::AttrArg, + @token { + (key, key), + (value, value), + } +); From 353656f789ea40ec7e51963b81db87565429b6e4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 17:29:42 +0200 Subject: [PATCH 125/678] Add lazy span for types --- crates/hir/src/hir_def/mod.rs | 2 +- crates/hir/src/span/item.rs | 4 +- crates/hir/src/span/types.rs | 84 ++++++++++++++++++++++++++++++++- crates/parser2/src/ast/types.rs | 15 ++++++ 4 files changed, 102 insertions(+), 3 deletions(-) diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index adf13d249f..43bbe3d95a 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -68,7 +68,7 @@ pub enum LitKind { /// can often be ignored. /// /// This type is clearly distinguished from `Option`. The -/// `Option` type is used to hold syntactically optional nodes, while +/// `Option` type is used to hold syntactically valid optional nodes, while /// `Partial` means that a syntactically required element may be missing. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Partial { diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index da1d452db5..3805f1ebdd 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1,7 +1,7 @@ use parser::ast; use crate::hir_def::{ - Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + Body, Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, Use, }; @@ -186,6 +186,8 @@ define_lazy_span_item!( } ); +define_lazy_span_item!(LazyBodySpan, ast::Expr, new(Body),); + define_lazy_span_item!( LazyRecordFieldListSpan, ast::RecordFieldList, diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 8616cc71c0..6c72a5efad 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -1,4 +1,86 @@ +use parser::ast; + +use crate::span::{item::LazyBodySpan, params::LazyGenericArgListSpan, path::LazyPathSpan}; + use super::define_lazy_span_item; define_lazy_span_item!(LazyTypeSpan); -define_lazy_span_item!(LazyPathTypeSpan); +impl LazyTypeSpan { + /// Convert this [`LazyTypeSpan`] into a [`LazyPathTypeSpan`]. + /// + /// If the type that is pointed to by this is not a path type, the result + /// span will point to the same span of the original type. + pub fn into_path_type(self) -> LazyPathTypeSpan { + LazyPathTypeSpan(self.0) + } + + /// Convert this [`LazyTypeSpan`] into a [`LazyPtrTypeSpan`]. + /// + /// If the type that is pointed to by this is not a pointer type, the result + /// span will point to the same span of the original type. + pub fn into_ptr_type(self) -> LazyPtrTypeSpan { + LazyPtrTypeSpan(self.0) + } + + /// Convert this [`LazyTypeSpan`] into a [`LazyTupleTypeSpan`]. + /// + /// If the type that is pointed to by this is not a tuple type, the result + /// span will point to the same span of the original type. + pub fn into_tuple_type(self) -> LazyTupleTypeSpan { + LazyTupleTypeSpan(self.0) + } + + /// convert this [`LazyTypeSpan`] into a [`LazyArrayTypeSpan`]. + /// + /// If the type that is pointed to by this is not an array type, the result + /// span will point to the same span of the original type. + pub fn into_array_type(self) -> LazyArrayTypeSpan { + LazyArrayTypeSpan(self.0) + } +} + +define_lazy_span_item!( + LazyPtrTypeSpan, + ast::PtrType, + @token { + (star, star), + } + @node { + (ty, inner, LazyTypeSpan), + } +); + +define_lazy_span_item! +( + LazyPathTypeSpan, + ast::PathType, + @node { + (path, path, LazyPathSpan), + (generic_args, generic_args, LazyGenericArgListSpan), + } +); + +define_lazy_span_item!( + LazyTupleTypeSpan, + ast::TupleType, + @token { + (l_paren, l_paren), + (r_paren, r_paren), + } + @idx { + (elem_ty, LazyTypeSpan), + } +); + +define_lazy_span_item!( + LazyArrayTypeSpan, + ast::ArrayType, + @token { + (l_bracket, l_bracket), + (r_bracket, r_bracket), + } + @node { + (elem, elem_ty, LazyTypeSpan), + (len, len, LazyBodySpan), + } +); diff --git a/crates/parser2/src/ast/types.rs b/crates/parser2/src/ast/types.rs index 0f1d949e0a..0c9ddb6147 100644 --- a/crates/parser2/src/ast/types.rs +++ b/crates/parser2/src/ast/types.rs @@ -79,6 +79,14 @@ ast_node! { IntoIterator, } impl TupleType { + pub fn l_paren(&self) -> Option { + support::token(self.syntax(), SK::LParen) + } + + pub fn r_paren(&self) -> Option { + support::token(self.syntax(), SK::RParen) + } + /// Returns the types in the tuple. pub fn elem_tys(&self) -> AstChildren { support::children(self.syntax()) @@ -92,6 +100,13 @@ ast_node! { SK::ArrayType, } impl ArrayType { + pub fn l_bracket(&self) -> Option { + support::token(self.syntax(), SK::LBracket) + } + + pub fn r_bracket(&self) -> Option { + support::token(self.syntax(), SK::LBracket) + } /// Returns the type of the array elements. pub fn elem_ty(&self) -> Option { support::child(self.syntax()) From 937003369007c7e6ce2c6086f072349ee010f489 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 22:53:50 +0200 Subject: [PATCH 126/678] Add `ChainRoot` trait --- Cargo.lock | 39 ------ crates/hir/Cargo.toml | 1 - crates/hir/src/hir_def/item.rs | 14 +-- crates/hir/src/lib.rs | 4 +- crates/hir/src/lower/item.rs | 14 +-- crates/hir/src/span/attr.rs | 2 +- crates/hir/src/span/db.rs | 6 +- crates/hir/src/span/item.rs | 8 +- crates/hir/src/span/mod.rs | 201 +----------------------------- crates/hir/src/span/transition.rs | 170 +++++++++++++++++++++++++ 10 files changed, 197 insertions(+), 262 deletions(-) create mode 100644 crates/hir/src/span/transition.rs diff --git a/Cargo.lock b/Cargo.lock index d1c48c4775..2b4e3bf732 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -816,7 +816,6 @@ dependencies = [ "rustc-hash", "salsa-2022", "smallvec", - "tracing", ] [[package]] @@ -1452,12 +1451,6 @@ dependencies = [ "indexmap", ] -[[package]] -name = "pin-project-lite" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" - [[package]] name = "plotters" version = "0.3.4" @@ -2201,38 +2194,6 @@ dependencies = [ "toml_datetime", ] -[[package]] -name = "tracing" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" -dependencies = [ - "cfg-if 1.0.0", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" -dependencies = [ - "once_cell", -] - [[package]] name = "triehash" version = "0.8.4" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 665b63b95d..20ff3d0bc1 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -8,7 +8,6 @@ repository = "https://github.com/ethereum/fe" description = "Provides HIR definition and lowering for Fe lang" [dependencies] -tracing = "0.1" # We may need to fix this to a specific version, # but I want to keep up with the latest version until the new Fe implemeentation is merged into the master. salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 5af1081abc..3053a3c6b5 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -25,8 +25,8 @@ use super::{ pub enum ItemKind { TopMod(TopLevelMod), Mod(Mod), - Fn(Fn), - ExternFn(ExternFn), + Func(Func), + ExternFunc(ExternFunc), Struct(Struct), Contract(Contract), Enum(Enum), @@ -69,7 +69,7 @@ pub struct Mod { } #[salsa::tracked] -pub struct Fn { +pub struct Func { #[id] id: TrackedItemId, @@ -85,14 +85,14 @@ pub struct Fn { #[return_ref] pub(crate) origin: HirOrigin, } -impl Fn { +impl Func { pub fn lazy_span(self) -> LazyFnSpan { LazyFnSpan::new(self) } } #[salsa::tracked] -pub struct ExternFn { +pub struct ExternFunc { #[id] id: TrackedItemId, @@ -105,7 +105,7 @@ pub struct ExternFn { #[return_ref] pub(crate) origin: HirOrigin, } -impl ExternFn { +impl ExternFunc { pub fn lazy_span(self) -> LazyExternFnSpan { LazyExternFnSpan::new(self) } @@ -331,7 +331,7 @@ pub struct EnumVariant { #[salsa::interned] pub struct ImplItemListId { #[return_ref] - pub items: Vec, + pub items: Vec, } pub type TraitItemListId = ImplItemListId; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index bfa6ff8889..602beaf96c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -11,8 +11,8 @@ pub struct Jar( // Tracked Hir items. hir_def::TopLevelMod, hir_def::Mod, - hir_def::Fn, - hir_def::ExternFn, + hir_def::Func, + hir_def::ExternFunc, hir_def::Struct, hir_def::Contract, hir_def::Enum, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 725d7eef70..bee357810e 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -47,7 +47,7 @@ impl Mod { } } -impl Fn { +impl Func { pub(super) fn lower_ast( ctxt: &mut FileLowerCtxt<'_>, parent_id: TrackedItemId, @@ -231,7 +231,7 @@ impl Impl { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Fn::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item); } } @@ -267,7 +267,7 @@ impl Trait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Fn::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item); } } @@ -305,7 +305,7 @@ impl ImplTrait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Fn::lower_ast(ctxt, id.clone(), impl_item); + Func::lower_ast(ctxt, id.clone(), impl_item); } } @@ -360,7 +360,7 @@ impl Use { } } -impl ExternFn { +impl ExternFunc { pub(super) fn lower_ast( ctxt: &mut FileLowerCtxt<'_>, parent: TrackedItemId, @@ -455,7 +455,7 @@ fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, id: TrackedItemId, items: as Mod::lower_ast(ctxt, id.clone(), mod_); } ast::ItemKind::Fn(fn_) => { - Fn::lower_ast(ctxt, id.clone(), fn_); + Func::lower_ast(ctxt, id.clone(), fn_); } ast::ItemKind::Struct(struct_) => { Struct::lower_ast(ctxt, id.clone(), struct_); @@ -487,7 +487,7 @@ fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, id: TrackedItemId, items: as ast::ItemKind::Extern(extern_) => { if let Some(extern_block) = extern_.extern_block() { for fn_ in extern_block { - ExternFn::lower_ast(ctxt, id.clone(), fn_); + ExternFunc::lower_ast(ctxt, id.clone(), fn_); } } } diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index 09ebb697b4..df783b5887 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -16,7 +16,7 @@ impl LazyAttrListSpan { .and_then(|f| f.normal_attrs().nth(idx)) .map(|n| n.syntax().clone().into()) }; - LazyNormalAttrSpan(self.0.push_state(std::sync::Arc::new(transition))) + LazyNormalAttrSpan(self.0.push_transition(std::sync::Arc::new(transition))) } } diff --git a/crates/hir/src/span/db.rs b/crates/hir/src/span/db.rs index fd1f1bc8d0..6bef65d20b 100644 --- a/crates/hir/src/span/db.rs +++ b/crates/hir/src/span/db.rs @@ -3,7 +3,7 @@ use parser::ast; use crate::{ hir_def::{ - Body, Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, + Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, Use, }, HirDb, @@ -30,11 +30,11 @@ pub trait SpannedHirDb: HirDb + Upcast { item.origin(self.upcast()) } - fn fn_ast(&self, item: Fn) -> &HirOrigin { + fn func_ast(&self, item: Func) -> &HirOrigin { item.origin(self.upcast()) } - fn extern_fn_ast(&self, item: ExternFn) -> &HirOrigin { + fn extern_func_ast(&self, item: ExternFunc) -> &HirOrigin { item.origin(self.upcast()) } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 3805f1ebdd..488d7b0d20 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1,8 +1,8 @@ use parser::ast; use crate::hir_def::{ - Body, Const, Contract, Enum, ExternFn, Fn, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, - TypeAlias, Use, + Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, }; use super::{ @@ -33,7 +33,7 @@ define_lazy_span_item!( define_lazy_span_item!( LazyFnSpan, ast::Fn, - new(Fn), + new(Func), @token { (name, name), } @@ -50,7 +50,7 @@ define_lazy_span_item!( define_lazy_span_item!( LazyExternFnSpan, ast::Fn, - new(ExternFn), + new(ExternFunc), @token { (name, name), } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index f9e18b864c..66a69183eb 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -1,15 +1,9 @@ -use std::sync::Arc; - use parser::{ ast::{self, prelude::*, AstPtr, SyntaxNodePtr}, - syntax_node::NodeOrToken, - SyntaxNode, TextRange, + TextRange, }; use common::{diagnostics::Span, InputFile}; -use smallvec::SmallVec; - -use crate::{hir_def::ItemKind, parse_file}; use self::db::SpannedHirDb; @@ -18,6 +12,7 @@ pub mod db; pub mod item; pub mod params; pub mod path; +pub mod transition; pub mod types; pub mod use_tree; @@ -137,195 +132,5 @@ pub trait LazySpan { fn span(self, db: &dyn SpannedHirDb) -> Span; } -type TransitionFn = Arc Option>; - -#[derive(Clone)] -pub(super) struct SpanTransitionChain { - root: ItemKind, - chain: SmallVec<[TransitionFn; 4]>, -} - -impl SpanTransitionChain { - fn new(item: ItemKind) -> Self { - Self { - root: item, - chain: SmallVec::new(), - } - } - - fn push_state(&self, transition: TransitionFn) -> Self { - let mut new_state = self.clone(); - new_state.chain.push(transition); - new_state - } -} - -impl LazySpan for SpanTransitionChain { - fn span(self, db: &dyn SpannedHirDb) -> Span { - let (file, ptr) = match self.root { - ItemKind::TopMod(top_level_mod) => { - let ast = db.toplevel_ast(top_level_mod); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Mod(mod_) => { - let ast = db.mod_ast(mod_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Fn(fn_) => { - let ast = db.fn_ast(fn_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::ExternFn(extern_fn) => { - let ast = db.extern_fn_ast(extern_fn); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Struct(struct_) => { - let ast = db.struct_ast(struct_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Contract(contract) => { - let ast = db.contract_ast(contract); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Enum(enum_) => { - let ast = db.enum_ast(enum_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::TypeAlias(alias) => { - let ast = db.type_alias_ast(alias); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Impl(impl_) => { - let ast = db.impl_ast(impl_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Trait(trait_) => { - let ast = db.trait_ast(trait_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::ImplTrait(impl_trait) => { - let ast = db.impl_trait_ast(impl_trait); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Const(const_) => { - let ast = db.const_ast(const_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Use(use_) => { - let ast = db.use_ast(use_); - (ast.file, ast.syntax_ptr().unwrap()) - } - - ItemKind::Body(body) => { - let ast = db.body_ast(body); - (ast.file, ast.syntax_ptr().unwrap()) - } - }; - - let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); - let mut node = ptr.to_node(&root_node); - - for transition in self.chain { - node = match transition(node.clone()) { - Some(NodeOrToken::Node(node)) => node, - Some(NodeOrToken::Token(token)) => { - return Span::new(file, token.text_range()); - } - None => { - return Span::new(file, node.text_range()); - } - }; - } - - Span::new(file, node.text_range()) - } -} - +use transition::define_lazy_span_item; define_lazy_span_item!(LazyTokenSpan); - -macro_rules! define_lazy_span_item { - ( - $name:ident - $(, - $sk_node: ty - $(, - $(new($hir_ty:ty),)? - $(@token {$(($name_token:ident, $getter_token:ident),)*})? - $(@node {$(($name_node:ident, $getter_node:ident, $result:tt),)*})? - $(@idx { $(($name_iter:ident, $result_iter:tt),)*})? - $(,)? - )? - )? - ) => { - #[derive(Clone)] - pub struct $name(pub(super) crate::span::SpanTransitionChain); - $( - $( - impl $name { - - $(pub fn new(hir: $hir_ty) -> Self { - Self(crate::span::SpanTransitionChain::new(hir.into())) - })? - - $($( - pub fn $name_token(&self) -> crate::span::LazyTokenSpan { - use parser::ast::prelude::*; - let transition = |node: parser::SyntaxNode| { - <$sk_node as AstNode>::cast(node) - .and_then(|n| n.$getter_token()) - .map(|n| n.into()) - }; - crate::span::LazyTokenSpan( - self.0.push_state(std::sync::Arc::new(transition)) - ) - } - )*)? - - $($( - pub fn $name_node(&self) -> $result{ - use parser::ast::prelude::*; - let transition = |node: parser::SyntaxNode| { - <$sk_node as AstNode>::cast(node) - .and_then(|f| f.$getter_node()) - .map(|n| n.syntax().clone().into()) - }; - $result(self.0.push_state(std::sync::Arc::new(transition))) - } - )*)? - - $($( - - pub fn $name_iter(&self, idx: usize) -> $result_iter { - use parser::ast::prelude::*; - let transition = move |node: parser::SyntaxNode| { - <$sk_node as AstNode>::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) - }; - $result_iter(self.0.push_state(std::sync::Arc::new(transition))) - } - )*)? - })?)? - - - impl crate::span::LazySpan for $name { - fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { - self.0.span(db) - } - } - }; -} - -use define_lazy_span_item; diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs new file mode 100644 index 0000000000..db028b4bdb --- /dev/null +++ b/crates/hir/src/span/transition.rs @@ -0,0 +1,170 @@ +use std::sync::Arc; + +use common::{diagnostics::Span, InputFile}; +use parser::{syntax_node::NodeOrToken, SyntaxNode}; +use smallvec::SmallVec; + +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, + }, + parse_file, +}; + +use super::{db::SpannedHirDb, LazySpan}; + +type TransitionFn = Arc Option>; + +#[derive(Clone)] +pub(crate) struct SpanTransitionChain { + root: Arc, + chain: SmallVec<[TransitionFn; 4]>, +} + +impl SpanTransitionChain { + pub(super) fn new(root: T) -> Self { + let root = Arc::new(root); + Self { + root, + chain: SmallVec::new(), + } + } + + pub(super) fn push_transition(&self, transition: TransitionFn) -> Self { + let mut new_state = self.clone(); + new_state.chain.push(transition); + new_state + } +} + +impl LazySpan for SpanTransitionChain { + fn span(self, db: &dyn SpannedHirDb) -> Span { + let (file, mut node) = self.root.root(db); + + for transition in self.chain { + node = match transition(node.clone()) { + Some(NodeOrToken::Node(node)) => node, + Some(NodeOrToken::Token(token)) => { + return Span::new(file, token.text_range()); + } + None => { + return Span::new(file, node.text_range()); + } + }; + } + + Span::new(file, node.text_range()) + } +} + +pub(super) trait ChainRoot { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode); +} + +macro_rules! impl_chain_root { + ($(($ty:ty, $fn:ident),)*) => { + $( + impl ChainRoot for $ty { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + let ast = db.$fn(*self); + let file = ast.file; + let ptr = ast.syntax_ptr().unwrap(); + let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let node = ptr.to_node(&root_node); + (file, node) + } + })* + }; +} +impl_chain_root! { + (TopLevelMod, toplevel_ast), + (Mod, mod_ast), + (Func, func_ast), + (ExternFunc, extern_func_ast), + (Struct, struct_ast), + (Contract, contract_ast), + (Enum, enum_ast), + (TypeAlias, type_alias_ast), + (Impl, impl_ast), + (Trait, trait_ast), + (ImplTrait, impl_trait_ast), + (Const, const_ast), + (Use, use_ast), + (Body, body_ast), +} + +macro_rules! define_lazy_span_item { + ( + $name:ident + $(, + $sk_node: ty + $(, + $(new($hir_ty:ty),)? + $(@token {$(($name_token:ident, $getter_token:ident),)*})? + $(@node {$(($name_node:ident, $getter_node:ident, $result:tt),)*})? + $(@idx { $(($name_iter:ident, $result_iter:tt),)*})? + $(,)? + )? + )? + ) => { + #[derive(Clone)] + pub struct $name(pub(crate) crate::span::transition::SpanTransitionChain); + $( + $( + impl $name { + + $(pub fn new(hir: $hir_ty) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(hir)) + })? + + $($( + pub fn $name_token(&self) -> crate::span::LazyTokenSpan { + use parser::ast::prelude::*; + let transition = |node: parser::SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_token()) + .map(|n| n.into()) + }; + crate::span::LazyTokenSpan( + self.0.push_transition(std::sync::Arc::new(transition)) + ) + } + )*)? + + $($( + pub fn $name_node(&self) -> $result{ + use parser::ast::prelude::*; + let transition = |node: parser::SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|f| f.$getter_node()) + .map(|n| n.syntax().clone().into()) + }; + $result(self.0.push_transition(std::sync::Arc::new(transition))) + } + )*)? + + $($( + + pub fn $name_iter(&self, idx: usize) -> $result_iter { + use parser::ast::prelude::*; + let transition = move |node: parser::SyntaxNode| { + <$sk_node as AstNode>::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }; + $result_iter(self.0.push_transition(std::sync::Arc::new(transition))) + } + )*)? + })?)? + + + impl crate::span::LazySpan for $name { + fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { + self.0.span(db) + } + } + }; +} + +pub(super) use define_lazy_span_item; From 9c0b2d7db0ebddb6c88104261cfb8908498c265b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 5 Apr 2023 23:26:32 +0200 Subject: [PATCH 127/678] Add lazy span for pat --- crates/hir/src/hir_def/body.rs | 4 ++ crates/hir/src/span/db.rs | 4 ++ crates/hir/src/span/mod.rs | 24 +++++--- crates/hir/src/span/pat.rs | 95 +++++++++++++++++++++++++++++++ crates/hir/src/span/transition.rs | 6 +- 5 files changed, 121 insertions(+), 12 deletions(-) create mode 100644 crates/hir/src/span/pat.rs diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index ef0e633fb9..dfe2a31dc0 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -70,6 +70,10 @@ where self.node_to_source[node] = ast.clone(); self.source_to_node.insert(ast, node); } + + pub(crate) fn node_to_source(&self, node: Node) -> &LocalOrigin { + &self.node_to_source[node] + } } impl PartialEq for SourceNodeMap diff --git a/crates/hir/src/span/db.rs b/crates/hir/src/span/db.rs index 6bef65d20b..d51ef4f5fc 100644 --- a/crates/hir/src/span/db.rs +++ b/crates/hir/src/span/db.rs @@ -77,4 +77,8 @@ pub trait SpannedHirDb: HirDb + Upcast { fn body_ast(&self, item: Body) -> &HirOrigin { item.origin(self.upcast()) } + + fn body_source_map(&self, item: Body) -> &crate::hir_def::BodySourceMap { + item.source_map(self.upcast()) + } } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 66a69183eb..c5fa2629fb 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -11,11 +11,13 @@ pub mod attr; pub mod db; pub mod item; pub mod params; +pub mod pat; pub mod path; -pub mod transition; pub mod types; pub mod use_tree; +mod transition; + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HirOrigin where @@ -30,17 +32,13 @@ where T: AstNode, { fn syntax_ptr(&self) -> Option { - match &self.kind { - LocalOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), - LocalOrigin::Expanded(ptr) => Some(ptr.clone()), - _ => None, - } + self.kind.syntax_ptr() } } impl HirOrigin where - T: AstNode, + T: AstNode, { pub(crate) fn new(file: InputFile, origin: LocalOrigin) -> Self { HirOrigin { file, kind: origin } @@ -80,12 +78,20 @@ where impl LocalOrigin where - T: AstNode, + T: AstNode, { pub(crate) fn raw(ast: &T) -> Self { Self::Raw(AstPtr::new(ast)) } + fn syntax_ptr(&self) -> Option { + match self { + LocalOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), + LocalOrigin::Expanded(ptr) => Some(ptr.clone()), + _ => None, + } + } + pub(crate) fn desugared(origin: impl Into) -> Self { Self::Desugared(origin.into()) } @@ -129,7 +135,7 @@ impl AugAssignDesugared { /// The trait provides a way to extract [`Span`] from types which don't have a /// span information directly. pub trait LazySpan { - fn span(self, db: &dyn SpannedHirDb) -> Span; + fn span(&self, db: &dyn SpannedHirDb) -> Span; } use transition::define_lazy_span_item; diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs new file mode 100644 index 0000000000..67e06f5857 --- /dev/null +++ b/crates/hir/src/span/pat.rs @@ -0,0 +1,95 @@ +use common::InputFile; +use parser::{ast, SyntaxNode}; + +use crate::{ + hir_def::{Body, PatId}, + parse_file, + span::path::LazyPathSpan, +}; + +use super::{ + db::SpannedHirDb, + define_lazy_span_item, + transition::{ChainRoot, SpanTransitionChain}, +}; + +define_lazy_span_item!(LazyPatSpan, ast::Pat,); +impl LazyPatSpan { + pub fn new(pat: PatId, body: Body) -> Self { + let root = PatRoot { pat, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_path_pat(self) -> LazyPathPatSpan { + LazyPathPatSpan(self.0) + } + + pub fn into_path_tuple_pat(self) -> LazyPathPatSpan { + LazyPathPatSpan(self.0) + } + + pub fn into_record_pat(self) -> LazyRecordPatSpan { + LazyRecordPatSpan(self.0) + } +} + +define_lazy_span_item!( + LazyPathPatSpan, + ast::PathPat, + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_item!( + LazyPathTuplePatSpan, + ast::PathTuplePat, + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_item!( + LazyRecordPatSpan, + ast::RecordPat, + @node { + (path, path, LazyPathSpan), + (field, fields, LazyRecordPatFieldListSpan), + } +); + +define_lazy_span_item!( + LazyRecordPatFieldListSpan, + ast::RecordPatFieldList, + @idx { + (field, LazyRecordPatSpan), + } +); + +define_lazy_span_item!( + LazyRecordPatFieldSpan, + ast::RecordPatField, + @token { + (name, name), + } +); + +#[derive(Clone, Copy)] +struct PatRoot { + pat: PatId, + body: Body, +} + +impl ChainRoot for PatRoot { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + let body_ast = db.body_ast(self.body); + let file = body_ast.file; + let source_map = db.body_source_map(self.body); + let pat_source = source_map.pat_map.node_to_source(self.pat); + let ptr = pat_source.syntax_ptr().unwrap(); + + let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let node = ptr.to_node(&root_node); + (file, node) + } +} diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index db028b4bdb..3d66f1db3d 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -39,10 +39,10 @@ impl SpanTransitionChain { } impl LazySpan for SpanTransitionChain { - fn span(self, db: &dyn SpannedHirDb) -> Span { + fn span(&self, db: &dyn SpannedHirDb) -> Span { let (file, mut node) = self.root.root(db); - for transition in self.chain { + for transition in &self.chain { node = match transition(node.clone()) { Some(NodeOrToken::Node(node)) => node, Some(NodeOrToken::Token(token)) => { @@ -160,7 +160,7 @@ macro_rules! define_lazy_span_item { impl crate::span::LazySpan for $name { - fn span(self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { + fn span(&self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { self.0.span(db) } } From 20253e83a15164151844207c46c0a8ced7053855 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 6 Apr 2023 12:06:14 +0200 Subject: [PATCH 128/678] Add `EvaluatedSpan` to represent syntactically invalid node's spans --- crates/hir/src/span/mod.rs | 25 +++++++++++++++++++++---- crates/hir/src/span/pat.rs | 8 +++++--- crates/hir/src/span/transition.rs | 31 ++++++++++++++++++------------- 3 files changed, 44 insertions(+), 20 deletions(-) diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index c5fa2629fb..3f8eca2402 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -3,7 +3,7 @@ use parser::{ TextRange, }; -use common::{diagnostics::Span, InputFile}; +use common::InputFile; use self::db::SpannedHirDb; @@ -132,10 +132,27 @@ impl AugAssignDesugared { } } -/// The trait provides a way to extract [`Span`] from types which don't have a -/// span information directly. +/// The trait provides a way to extract [`EvaluatedSpan`] from types which don't +/// have a span information directly, but can be evaluated from the database +/// lazily. pub trait LazySpan { - fn span(&self, db: &dyn SpannedHirDb) -> Span; + fn eval(&self, db: &dyn SpannedHirDb) -> EvaluatedSpan; +} + +/// This struct represents a result of [`LazySpan::span`] method. +/// It contains the file and the text range. +/// +/// `range` is an optional field because some HIR nodes doesn't have a span when +/// they are syntactically invalid. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct EvaluatedSpan { + pub file: InputFile, + pub range: Option, +} +impl EvaluatedSpan { + pub fn new(file: InputFile, range: Option) -> Self { + Self { file, range } + } } use transition::define_lazy_span_item; diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 67e06f5857..49e59065b6 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -81,15 +81,17 @@ struct PatRoot { } impl ChainRoot for PatRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { let body_ast = db.body_ast(self.body); let file = body_ast.file; let source_map = db.body_source_map(self.body); let pat_source = source_map.pat_map.node_to_source(self.pat); - let ptr = pat_source.syntax_ptr().unwrap(); + let Some(ptr) = pat_source.syntax_ptr() else { + return (file, None); + }; let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node) + (file, node.into()) } } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 3d66f1db3d..bd1805d9ba 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use common::{diagnostics::Span, InputFile}; +use common::InputFile; use parser::{syntax_node::NodeOrToken, SyntaxNode}; use smallvec::SmallVec; @@ -12,7 +12,7 @@ use crate::{ parse_file, }; -use super::{db::SpannedHirDb, LazySpan}; +use super::{db::SpannedHirDb, EvaluatedSpan, LazySpan}; type TransitionFn = Arc Option>; @@ -39,40 +39,45 @@ impl SpanTransitionChain { } impl LazySpan for SpanTransitionChain { - fn span(&self, db: &dyn SpannedHirDb) -> Span { - let (file, mut node) = self.root.root(db); + fn eval(&self, db: &dyn SpannedHirDb) -> EvaluatedSpan { + let (file, node) = self.root.root(db); + let Some(mut node) = node else { + return EvaluatedSpan::new(file, None); + }; for transition in &self.chain { node = match transition(node.clone()) { Some(NodeOrToken::Node(node)) => node, Some(NodeOrToken::Token(token)) => { - return Span::new(file, token.text_range()); + return EvaluatedSpan::new(file, token.text_range().into()); } None => { - return Span::new(file, node.text_range()); + return EvaluatedSpan::new(file, None); } }; } - Span::new(file, node.text_range()) + EvaluatedSpan::new(file, node.text_range().into()) } } pub(super) trait ChainRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode); + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option); } macro_rules! impl_chain_root { ($(($ty:ty, $fn:ident),)*) => { $( impl ChainRoot for $ty { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { let ast = db.$fn(*self); let file = ast.file; - let ptr = ast.syntax_ptr().unwrap(); + let Some(ptr) = ast.syntax_ptr() else { + return (file, None); + }; let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node) + (file, node.into()) } })* }; @@ -160,8 +165,8 @@ macro_rules! define_lazy_span_item { impl crate::span::LazySpan for $name { - fn span(&self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { - self.0.span(db) + fn eval(&self, db: &dyn crate::span::SpannedHirDb) -> crate::span::EvaluatedSpan{ + self.0.eval(db) } } }; From 83ecd3e87290066b1aeb89629ce4c0cc094f2630 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 6 Apr 2023 18:02:55 +0200 Subject: [PATCH 129/678] Add lazy span for expr --- crates/hir/src/hir_def/expr.rs | 6 +- crates/hir/src/lower/expr.rs | 10 +- crates/hir/src/span/attr.rs | 12 +- crates/hir/src/span/expr.rs | 186 ++++++++++++++++++++++++++++++ crates/hir/src/span/item.rs | 52 ++++----- crates/hir/src/span/mod.rs | 5 +- crates/hir/src/span/params.rs | 26 ++--- crates/hir/src/span/pat.rs | 14 +-- crates/hir/src/span/path.rs | 6 +- crates/hir/src/span/transition.rs | 10 +- crates/hir/src/span/types.rs | 12 +- crates/hir/src/span/use_tree.rs | 12 +- crates/parser2/src/ast/expr.rs | 60 +++++++++- crates/parser2/src/lib.rs | 2 +- 14 files changed, 329 insertions(+), 84 deletions(-) create mode 100644 crates/hir/src/span/expr.rs diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 8cb94de302..589b20edb3 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -1,6 +1,6 @@ use cranelift_entity::entity_impl; -use super::{Body, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; +use super::{Body, GenericArgListId, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Expr { @@ -13,10 +13,10 @@ pub enum Expr { Bin(ExprId, ExprId, Partial), Un(ExprId, Partial), /// The first `ExprId` is the callee, the second is the arguments. - Call(ExprId, Vec), + Call(ExprId, GenericArgListId, Vec), /// The first `ExprId` is the method receiver, the second is the method /// name, the third is the arguments. - MethodCall(ExprId, Partial, Vec), + MethodCall(ExprId, Partial, GenericArgListId, Vec), Path(Partial), /// The record construction expression. /// The fist `PathId` is the record type, the second is the record fields. diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 9bf039e3c2..ac61bd8034 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -1,7 +1,7 @@ use parser::ast::{self, prelude::*}; use crate::{ - hir_def::{expr::*, Body, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, + hir_def::{expr::*, Body, GenericArgListId, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, span::LocalOrigin, }; @@ -43,6 +43,8 @@ impl Expr { ast::ExprKind::Call(call) => { let callee = Self::push_to_body_opt(ctxt, call.callee()); + let generic_args = + GenericArgListId::lower_ast_opt(ctxt.f_ctxt, call.generic_args()); let args = call .args() .map(|args| { @@ -51,13 +53,15 @@ impl Expr { .collect() }) .unwrap_or_default(); - Self::Call(callee, args) + Self::Call(callee, generic_args, args) } ast::ExprKind::MethodCall(method_call) => { let receiver = Self::push_to_body_opt(ctxt, method_call.receiver()); let method_name = IdentId::lower_token_partial(ctxt.f_ctxt, method_call.method_name()); + let generic_args = + GenericArgListId::lower_ast_opt(ctxt.f_ctxt, method_call.generic_args()); let args = method_call .args() .map(|args| { @@ -66,7 +70,7 @@ impl Expr { .collect() }) .unwrap_or_default(); - Self::MethodCall(receiver, method_name, args) + Self::MethodCall(receiver, method_name, generic_args, args) } ast::ExprKind::Path(path) => { diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index df783b5887..b0fbdfe24e 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -1,8 +1,8 @@ use parser::ast::{self, prelude::*}; -use super::define_lazy_span_item; +use super::define_lazy_span_node; -define_lazy_span_item!( +define_lazy_span_node!( LazyAttrListSpan, ast::AttrList, @idx { @@ -20,8 +20,8 @@ impl LazyAttrListSpan { } } -define_lazy_span_item!(LazyAttrSpan); -define_lazy_span_item!( +define_lazy_span_node!(LazyAttrSpan); +define_lazy_span_node!( LazyNormalAttrSpan, ast::NormalAttr, @token { @@ -32,7 +32,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyAttrArgListSpan, ast::AttrArgList, @idx { @@ -40,7 +40,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyAttrArgSpan, ast::AttrArg, @token { diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs new file mode 100644 index 0000000000..816c6a4b18 --- /dev/null +++ b/crates/hir/src/span/expr.rs @@ -0,0 +1,186 @@ +use common::InputFile; +use parser::{ast, SyntaxNode}; + +use crate::{ + hir_def::{Body, ExprId}, + parse_file, + span::{params::LazyGenericArgListSpan, path::LazyPathSpan, LazySpanAtom}, +}; + +use super::{ + db::SpannedHirDb, + define_lazy_span_node, + transition::{ChainRoot, SpanTransitionChain}, +}; + +define_lazy_span_node!(LazyExprSpan, ast::Expr,); +impl LazyExprSpan { + pub fn new(expr: ExprId, body: Body) -> Self { + let root = ExprRoot { expr, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_bin_expr(self) -> LazyBinExprSpan { + LazyBinExprSpan(self.0) + } + + pub fn into_un_expr(self) -> LazyUnExprSpan { + LazyUnExprSpan(self.0) + } + + pub fn into_call_expr(self) -> LazyCallExprSpan { + LazyCallExprSpan(self.0) + } + + pub fn into_method_call_expr(self) -> LazyMethodCallExprSpan { + LazyMethodCallExprSpan(self.0) + } + + pub fn into_path_expr(self) -> LazyPathExprSpan { + LazyPathExprSpan(self.0) + } + + pub fn into_record_init_expr(self) -> LazyRecordInitExprSpan { + LazyRecordInitExprSpan(self.0) + } + + pub fn into_field_expr(self) -> LazyFieldExprSpan { + LazyFieldExprSpan(self.0) + } + + pub fn into_match_expr(self) -> LazyMatchExprSpan { + LazyMatchExprSpan(self.0) + } +} + +define_lazy_span_node!( + LazyBinExprSpan, + ast::BinExpr, + @node { + (op, op, LazySpanAtom), + } +); + +define_lazy_span_node!( + LazyUnExprSpan, + ast::UnExpr, + @node { + (op, op, LazySpanAtom), + } +); + +define_lazy_span_node!( + LazyCallExprSpan, + ast::CallExpr, + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + (args, args, LazyCallArgListSpan), + } +); + +define_lazy_span_node!( + LazyMethodCallExprSpan, + ast::MethodCallExpr, + @token { + (method_name, method_name), + } + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + (args, args, LazyCallArgListSpan), + } +); + +define_lazy_span_node! { + LazyPathExprSpan, + ast::PathExpr, + @node { + (path, path, LazyPathSpan), + } +} + +define_lazy_span_node!( + LazyRecordInitExprSpan, + ast::RecordInitExpr, + @node { + (path, path, LazyPathSpan), + (fields, fields, LazyRecordFieldListSpan), + } +); + +define_lazy_span_node!( + LazyFieldExprSpan, + ast::FieldExpr, + @token { + (accessor, name_or_index), + } +); + +define_lazy_span_node!( + LazyMatchExprSpan, + ast::MatchExpr, + @node { + (arms, arms, LazyMatchArmListSpan), + } +); + +define_lazy_span_node!( + LazyCallArgListSpan, + ast::CallArgList, + @idx { + (arg, LazyCallArgSpan), + } +); + +define_lazy_span_node!( + LazyCallArgSpan, + ast::CallArg, + @token { + (label, label), + } +); + +define_lazy_span_node!( + LazyRecordFieldListSpan, + ast::RecordFieldList, + @idx { + (field, LazyRecordFieldSpan), + } +); + +define_lazy_span_node!( + LazyRecordFieldSpan, + ast::RecordField, + @token { + (label, label), + } +); + +define_lazy_span_node!( + LazyMatchArmListSpan, + ast::MatchArmList, + @idx { + (arm, LazySpanAtom), + } +); + +#[derive(Clone, Copy)] +struct ExprRoot { + expr: ExprId, + body: Body, +} + +impl ChainRoot for ExprRoot { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + let body_ast = db.body_ast(self.body); + let file = body_ast.file; + let source_map = db.body_source_map(self.body); + let pat_source = source_map.expr_map.node_to_source(self.expr); + let Some(ptr) = pat_source.syntax_ptr() else { + return (file, None); + }; + + let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let node = ptr.to_node(&root_node); + (file, node.into()) + } +} diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 488d7b0d20..4ee6227b77 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -7,15 +7,15 @@ use crate::hir_def::{ use super::{ attr::LazyAttrListSpan, - define_lazy_span_item, + define_lazy_span_node, params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, types::{LazyPathTypeSpan, LazyTypeSpan}, use_tree::LazyUseTreeSpan, }; -define_lazy_span_item!(LazyTopLevelModSpan, ast::Root, new(TopLevelMod),); +define_lazy_span_node!(LazyTopLevelModSpan, ast::Root, new(TopLevelMod),); -define_lazy_span_item!( +define_lazy_span_node!( LazyModSpan, ast::Mod, new(Mod), @@ -30,7 +30,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyFnSpan, ast::Fn, new(Func), @@ -47,7 +47,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyExternFnSpan, ast::Fn, new(ExternFunc), @@ -62,7 +62,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyStructSpan, ast::Struct, new(Struct), @@ -74,11 +74,11 @@ define_lazy_span_item!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (fields, fields, LazyRecordFieldListSpan), + (fields, fields, LazyRecordFieldDefListSpan), } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyContractSpan, ast::Contract, new(Contract), @@ -88,11 +88,11 @@ define_lazy_span_item!( @node { (attributes, attr_list, LazyAttrListSpan), (modifier, modifier, LazyItemModifierSpan), - (fields, fields, LazyRecordFieldListSpan), + (fields, fields, LazyRecordFieldDefListSpan), } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyEnumSpan, ast::Enum, new(Enum), @@ -108,7 +108,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyTypeAliasSpan, ast::TypeAlias, new(TypeAlias), @@ -124,7 +124,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyImplSpan, ast::Impl, new(Impl), @@ -135,7 +135,7 @@ define_lazy_span_item!( (target_ty, ty, LazyTypeSpan), } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyTraitSpan, ast::Trait, new(Trait), @@ -150,7 +150,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyImplTraitSpan, ast::ImplTrait, new(ImplTrait), @@ -163,7 +163,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyConstSpan, ast::Const, new(Const), @@ -176,7 +176,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyUseSpan, ast::Use, new(Use), @@ -186,18 +186,18 @@ define_lazy_span_item!( } ); -define_lazy_span_item!(LazyBodySpan, ast::Expr, new(Body),); +define_lazy_span_node!(LazyBodySpan, ast::Expr, new(Body),); -define_lazy_span_item!( - LazyRecordFieldListSpan, - ast::RecordFieldList, +define_lazy_span_node!( + LazyRecordFieldDefListSpan, + ast::RecordFieldDefList, @idx { - (field, LazyRecordFieldListSpan), + (field, LazyRecordFieldDefSpan), } ); -define_lazy_span_item!( - LazyRecordFieldSpan, +define_lazy_span_node!( + LazyRecordFieldDefSpan, ast::RecordFieldDef, @token { (pub_kw, pub_kw), @@ -208,7 +208,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyEnumVariantListSpan, ast::EnumVariantDefList, @idx { @@ -216,7 +216,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyEnumVariantSpan, ast::EnumVariantDef, @token { @@ -227,7 +227,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyItemModifierSpan, ast::ItemModifier, @token { diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 3f8eca2402..e9924710d9 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -9,6 +9,7 @@ use self::db::SpannedHirDb; pub mod attr; pub mod db; +pub mod expr; pub mod item; pub mod params; pub mod pat; @@ -155,5 +156,5 @@ impl EvaluatedSpan { } } -use transition::define_lazy_span_item; -define_lazy_span_item!(LazyTokenSpan); +use transition::define_lazy_span_node; +define_lazy_span_node!(LazySpanAtom); diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 13dd436043..81049ee213 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -2,9 +2,9 @@ use parser::ast; use crate::span::path::LazyPathSpan; -use super::{define_lazy_span_item, types::LazyTypeSpan}; +use super::{define_lazy_span_node, types::LazyTypeSpan}; -define_lazy_span_item!( +define_lazy_span_node!( LazyFnParamListSpan, ast::FnParamList, @idx { @@ -12,7 +12,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyGenericParamListSpan, ast::GenericParamList, @idx { @@ -20,7 +20,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyGenericArgListSpan, ast::GenericArgList, @idx { @@ -28,7 +28,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyWhereClauseSpan, ast::WhereClause, @idx { @@ -37,7 +37,7 @@ define_lazy_span_item!( ); -define_lazy_span_item!( +define_lazy_span_node!( LazyFnParamSpan, ast::FnParam, @token { @@ -50,12 +50,12 @@ define_lazy_span_item!( } ); -define_lazy_span_item!(LazyFnParamLabelSpan); -define_lazy_span_item!(LazyFnParamNameSpan); -define_lazy_span_item!(LazyGenericParamSpan); -define_lazy_span_item!(LazyGenericArgParamSpan); +define_lazy_span_node!(LazyFnParamLabelSpan); +define_lazy_span_node!(LazyFnParamNameSpan); +define_lazy_span_node!(LazyGenericParamSpan); +define_lazy_span_node!(LazyGenericArgParamSpan); -define_lazy_span_item!( +define_lazy_span_node!( LazyWherePredicateSpan, ast::WherePredicate, @node { @@ -64,7 +64,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item! { +define_lazy_span_node! { LazyTypeBoundListSpan, ast::TypeBoundList, @idx { @@ -72,7 +72,7 @@ define_lazy_span_item! { } } -define_lazy_span_item!( +define_lazy_span_node!( LazyTypeBoundSpan, ast::TypeBound, @node { diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 49e59065b6..1ed9f24765 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -9,11 +9,11 @@ use crate::{ use super::{ db::SpannedHirDb, - define_lazy_span_item, + define_lazy_span_node, transition::{ChainRoot, SpanTransitionChain}, }; -define_lazy_span_item!(LazyPatSpan, ast::Pat,); +define_lazy_span_node!(LazyPatSpan, ast::Pat,); impl LazyPatSpan { pub fn new(pat: PatId, body: Body) -> Self { let root = PatRoot { pat, body }; @@ -33,7 +33,7 @@ impl LazyPatSpan { } } -define_lazy_span_item!( +define_lazy_span_node!( LazyPathPatSpan, ast::PathPat, @node { @@ -41,7 +41,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyPathTuplePatSpan, ast::PathTuplePat, @node { @@ -49,7 +49,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyRecordPatSpan, ast::RecordPat, @node { @@ -58,7 +58,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyRecordPatFieldListSpan, ast::RecordPatFieldList, @idx { @@ -66,7 +66,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyRecordPatFieldSpan, ast::RecordPatField, @token { diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index 597e82cd9b..b3053222b5 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -1,8 +1,8 @@ use parser::ast; -use super::define_lazy_span_item; +use super::define_lazy_span_node; -define_lazy_span_item!( +define_lazy_span_node!( LazyPathSpan, ast::Path, @idx { @@ -10,4 +10,4 @@ define_lazy_span_item!( } ); -define_lazy_span_item!(LazyPathSegmentSpan); +define_lazy_span_node!(LazyPathSegmentSpan); diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index bd1805d9ba..2a30b83c73 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -99,7 +99,7 @@ impl_chain_root! { (Body, body_ast), } -macro_rules! define_lazy_span_item { +macro_rules! define_lazy_span_node { ( $name:ident $(, @@ -124,21 +124,21 @@ macro_rules! define_lazy_span_item { })? $($( - pub fn $name_token(&self) -> crate::span::LazyTokenSpan { + pub fn $name_token(&self) -> crate::span::LazySpanAtom { use parser::ast::prelude::*; let transition = |node: parser::SyntaxNode| { <$sk_node as AstNode>::cast(node) .and_then(|n| n.$getter_token()) .map(|n| n.into()) }; - crate::span::LazyTokenSpan( + crate::span::LazySpanAtom( self.0.push_transition(std::sync::Arc::new(transition)) ) } )*)? $($( - pub fn $name_node(&self) -> $result{ + pub fn $name_node(&self) -> $result { use parser::ast::prelude::*; let transition = |node: parser::SyntaxNode| { <$sk_node as AstNode>::cast(node) @@ -172,4 +172,4 @@ macro_rules! define_lazy_span_item { }; } -pub(super) use define_lazy_span_item; +pub(super) use define_lazy_span_node; diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 6c72a5efad..3697619e0b 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -2,9 +2,9 @@ use parser::ast; use crate::span::{item::LazyBodySpan, params::LazyGenericArgListSpan, path::LazyPathSpan}; -use super::define_lazy_span_item; +use super::define_lazy_span_node; -define_lazy_span_item!(LazyTypeSpan); +define_lazy_span_node!(LazyTypeSpan); impl LazyTypeSpan { /// Convert this [`LazyTypeSpan`] into a [`LazyPathTypeSpan`]. /// @@ -39,7 +39,7 @@ impl LazyTypeSpan { } } -define_lazy_span_item!( +define_lazy_span_node!( LazyPtrTypeSpan, ast::PtrType, @token { @@ -50,7 +50,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item! +define_lazy_span_node! ( LazyPathTypeSpan, ast::PathType, @@ -60,7 +60,7 @@ define_lazy_span_item! } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyTupleTypeSpan, ast::TupleType, @token { @@ -72,7 +72,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyArrayTypeSpan, ast::ArrayType, @token { diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index a33e36e060..ca4ff7c93b 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -1,8 +1,8 @@ use parser::ast; -use super::define_lazy_span_item; +use super::define_lazy_span_node; -define_lazy_span_item!( +define_lazy_span_node!( LazyUseTreeSpan, ast::UseTree, @node { @@ -12,7 +12,7 @@ define_lazy_span_item!( } ); -define_lazy_span_item!( +define_lazy_span_node!( LazyUsePathSpan, ast::UsePath, @idx { @@ -21,9 +21,9 @@ define_lazy_span_item!( ); -define_lazy_span_item!(LazyUsePathSegmentSpan); +define_lazy_span_node!(LazyUsePathSegmentSpan); -define_lazy_span_item!( +define_lazy_span_node!( LazySubUseTreeSpan, ast::UseTreeList, @idx { @@ -31,4 +31,4 @@ define_lazy_span_item!( } ); -define_lazy_span_item!(LazyUseTreeAliasSpan); +define_lazy_span_node!(LazyUseTreeAliasSpan); diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index cacdc4d6c2..aeac65be3f 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -201,6 +201,11 @@ impl FieldExpr { pub fn field_index(&self) -> Option { support::token(self.syntax(), SK::Int).map(|it| LitInt { token: it }) } + + pub fn name_or_index(&self) -> Option { + self.field_name() + .or_else(|| self.field_index().map(|i| i.token().clone())) + } } ast_node! { @@ -395,9 +400,15 @@ pub enum BinOp { } impl BinOp { - pub(super) fn from_node_or_token( - node_or_token: rowan::NodeOrToken, - ) -> Option { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + BinOp::Arith(op) => op.syntax(), + BinOp::Comp(op) => op.syntax(), + BinOp::Logical(op) => op.syntax(), + } + } + + pub(super) fn from_node_or_token(node_or_token: crate::NodeOrToken) -> Option { match node_or_token { rowan::NodeOrToken::Token(token) => Self::from_token(token), rowan::NodeOrToken::Node(node) => Self::from_node(node), @@ -429,6 +440,15 @@ pub enum UnOp { BitNot(SyntaxToken), } impl UnOp { + pub fn syntax(&self) -> SyntaxToken { + match self { + UnOp::Plus(token) => token.clone(), + UnOp::Minus(token) => token.clone(), + UnOp::Not(token) => token.clone(), + UnOp::BitNot(token) => token.clone(), + } + } + fn from_token(token: SyntaxToken) -> Option { match token.kind() { SK::Plus => Some(Self::Plus(token)), @@ -466,6 +486,22 @@ pub enum ArithBinOp { BitXor(SyntaxToken), } impl ArithBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + ArithBinOp::Add(token) => token.clone().into(), + ArithBinOp::Sub(token) => token.clone().into(), + ArithBinOp::Mul(token) => token.clone().into(), + ArithBinOp::Div(token) => token.clone().into(), + ArithBinOp::Mod(token) => token.clone().into(), + ArithBinOp::Pow(token) => token.clone().into(), + ArithBinOp::LShift(node) => node.clone().into(), + ArithBinOp::RShift(node) => node.clone().into(), + ArithBinOp::BitAnd(token) => token.clone().into(), + ArithBinOp::BitOr(token) => token.clone().into(), + ArithBinOp::BitXor(token) => token.clone().into(), + } + } + pub(super) fn from_node_or_token( node_or_token: rowan::NodeOrToken, ) -> Option { @@ -517,6 +553,17 @@ pub enum CompBinOp { GtEq(SyntaxNode), } impl CompBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + CompBinOp::Eq(token) => token.clone().into(), + CompBinOp::NotEq(token) => token.clone().into(), + CompBinOp::Lt(token) => token.clone().into(), + CompBinOp::LtEq(node) => node.clone().into(), + CompBinOp::Gt(token) => token.clone().into(), + CompBinOp::GtEq(node) => node.clone().into(), + } + } + pub(super) fn from_token(token: SyntaxToken) -> Option { match token.kind() { SK::Eq2 => Some(Self::Eq(token)), @@ -544,6 +591,13 @@ pub enum LogicalBinOp { Or(SyntaxToken), } impl LogicalBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + LogicalBinOp::And(token) => token.clone().into(), + LogicalBinOp::Or(token) => token.clone().into(), + } + } + pub(super) fn from_token(token: SyntaxToken) -> Option { match token.kind() { SK::Amp2 => Some(Self::And(token)), diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs index 2931307a41..a142970bcf 100644 --- a/crates/parser2/src/lib.rs +++ b/crates/parser2/src/lib.rs @@ -5,7 +5,7 @@ pub mod syntax_kind; pub mod syntax_node; pub use syntax_kind::SyntaxKind; -pub use syntax_node::{FeLang, GreenNode, SyntaxNode, SyntaxToken, TextRange}; +pub use syntax_node::{FeLang, GreenNode, NodeOrToken, SyntaxNode, SyntaxToken, TextRange}; use parser::RootScope; From a80133452a431f0b8e9a1d81b883504c80a5ab7e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Apr 2023 13:39:18 +0200 Subject: [PATCH 130/678] Add lazy span for stmt --- crates/hir/src/span/mod.rs | 1 + crates/hir/src/span/stmt.rs | 56 +++++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) create mode 100644 crates/hir/src/span/stmt.rs diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index e9924710d9..6074e2650d 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -14,6 +14,7 @@ pub mod item; pub mod params; pub mod pat; pub mod path; +pub mod stmt; pub mod types; pub mod use_tree; diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs new file mode 100644 index 0000000000..709fed710a --- /dev/null +++ b/crates/hir/src/span/stmt.rs @@ -0,0 +1,56 @@ +use common::InputFile; +use parser::{ast, SyntaxNode}; + +use crate::{ + hir_def::{Body, StmtId}, + parse_file, + span::types::LazyTypeSpan, +}; + +use super::{ + db::SpannedHirDb, + define_lazy_span_node, + transition::{ChainRoot, SpanTransitionChain}, +}; + +define_lazy_span_node!(LazyStmtSpan, ast::Stmt,); +impl LazyStmtSpan { + pub fn new(stmt: StmtId, body: Body) -> Self { + let root = StmtRoot { stmt, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_let_stmt(self) -> LazyLetStmtSpan { + LazyLetStmtSpan(self.0) + } +} + +define_lazy_span_node!( + LazyLetStmtSpan, + ast::LetStmt, + @node { + (ty, type_annotation, LazyTypeSpan), + } +); + +#[derive(Clone, Copy)] +struct StmtRoot { + stmt: StmtId, + body: Body, +} + +impl ChainRoot for StmtRoot { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + let body_ast = db.body_ast(self.body); + let file = body_ast.file; + let source_map = db.body_source_map(self.body); + let pat_source = source_map.stmt_map.node_to_source(self.stmt); + let Some(ptr) = pat_source.syntax_ptr() else { + return (file, None); + }; + + let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let node = ptr.to_node(&root_node); + (file, node.into()) + } +} From e70b2b80648b79b94c35fad278e323a71790aa7d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Apr 2023 16:56:43 +0200 Subject: [PATCH 131/678] Add `TestDb` --- crates/common2/src/input.rs | 42 ++++++++++++++++++++++++++--- crates/hir/src/lib.rs | 54 +++++++++++++++++++++++++++++++++++++ 2 files changed, 93 insertions(+), 3 deletions(-) diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs index fea7c03e63..2839b58f28 100644 --- a/crates/common2/src/input.rs +++ b/crates/common2/src/input.rs @@ -7,9 +7,10 @@ use crate::InputDb; /// An ingot is a collection of files which are compiled together. /// Ingot can depend on other ingots. -#[salsa::input] +#[salsa::input(constructor = __new_impl)] pub struct InputIngot { /// An absolute path to the ingot root directory. + /// The all files in the ingot should be located under this directory. #[return_ref] pub path: Utf8PathBuf, @@ -20,8 +21,6 @@ pub struct InputIngot { #[return_ref] pub version: Version, - pub root_file: InputFile, - /// A list of ingots which the current ingot depends on. #[return_ref] pub dependency: BTreeSet, @@ -29,6 +28,43 @@ pub struct InputIngot { /// A list of files which the current ingot contains. #[return_ref] pub files: BTreeSet, + + #[set(__set_root_file_impl)] + #[get(__get_root_file_impl)] + root_file: Option, +} +impl InputIngot { + pub fn new( + db: &mut dyn InputDb, + path: &str, + kind: IngotKind, + version: Version, + dependency: BTreeSet, + ) -> InputIngot { + let path = Utf8PathBuf::from(path); + let root_file = None; + Self::__new_impl( + db, + path, + kind, + version, + dependency, + BTreeSet::default(), + root_file, + ) + } + + /// Set the root file of the ingot. + /// The root file must be set before the ingot is used. + pub fn set_root_file(self, db: &mut dyn InputDb, file: InputFile) { + self.__set_root_file_impl(db).to(Some(file)); + } + + /// Returns the root file of the ingot. + /// Panics if the root file is not set. + pub fn root_file(&self, db: &dyn InputDb) -> InputFile { + self.__get_root_file_impl(db).unwrap() + } } #[salsa::input] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 602beaf96c..e74dacfa37 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -38,6 +38,7 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, + /// Tracked functions hir_def::ingot_module_tree, hir_def::module_item_tree, parse_file, @@ -53,3 +54,56 @@ pub(crate) fn parse_file(db: &dyn HirDb, file: InputFile) -> GreenNode { pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} + +#[cfg(test)] +mod test_db { + use std::collections::BTreeSet; + + use common::{ + input::{IngotKind, Version}, + InputFile, InputIngot, Upcast, + }; + + use crate::{ + hir_def::{module_item_tree, ItemTree}, + span::db::SpannedHirDb, + }; + + #[derive(Default)] + #[salsa::db(common::Jar, crate::Jar)] + pub(crate) struct TestDb { + storage: salsa::Storage, + } + impl SpannedHirDb for TestDb {} + impl salsa::Database for TestDb { + fn salsa_event(&self, _: salsa::Event) {} + } + impl Upcast for TestDb { + fn upcast(&self) -> &(dyn common::InputDb + 'static) { + self + } + } + impl Upcast for TestDb { + fn upcast(&self) -> &(dyn crate::HirDb + 'static) { + self + } + } + + impl TestDb { + pub fn parse_source(&mut self, text: &str) -> &ItemTree { + let file = self.standalone_file(text); + module_item_tree(self, file) + } + + fn standalone_file(&mut self, text: &str) -> InputFile { + let path = "hir_test"; + let kind = IngotKind::StandAlone; + let version = Version::new(0, 0, 1); + let ingot = InputIngot::new(self, path, kind, version, BTreeSet::default()); + let file = InputFile::new(self, ingot, "test_file.fe".into(), text.to_string()); + ingot.set_root_file(self, file); + ingot.set_files(self).to([file].into()); + file + } + } +} From 1610f69896286792cc8c215c888920c3f8ef1c5f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Apr 2023 16:56:59 +0200 Subject: [PATCH 132/678] Add test for `ItemTree` --- crates/hir/src/hir_def/item_tree.rs | 77 +++++++++++++++++++++++++++-- crates/hir/src/lib.rs | 12 +++-- crates/hir/src/lower/item.rs | 10 ++-- crates/hir/src/lower/mod.rs | 12 +++-- 4 files changed, 96 insertions(+), 15 deletions(-) diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index 0731784bb3..49ad7a4f8c 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -17,12 +17,39 @@ use super::ItemKind; /// The root node of the tree is the top level module, which corresponds to the /// `module_tree::TopLevelModule`. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct ModuleItemTree { - pub(crate) file: InputFile, - pub(crate) top_mod: TopLevelMod, +pub struct ItemTree { + pub file: InputFile, + pub top_mod: TopLevelMod, pub(crate) item_tree: BTreeMap, } +impl ItemTree { + /// Returns the depth-first iterator of the item tree. + pub fn dfs(&self) -> impl Iterator + '_ { + let mut stack = vec![self.top_mod.into()]; + std::iter::from_fn(move || { + let item = stack.pop()?; + stack.extend(self.item_tree[&item].children.iter().rev()); + Some(item) + }) + } + + /// Returns the parent of the item. + pub fn parent(&self, item: ItemKind) -> Option { + self.item_tree[&item].parent + } + + /// Returns the children of the item. + pub fn children(&self, item: ItemKind) -> impl Iterator + '_ { + self.item_tree[&item].children.iter().copied() + } + + /// Returns the number of items in the tree. + pub fn len(&self) -> usize { + self.item_tree.len() + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct ItemTreeNode { pub(crate) parent: Option, @@ -30,7 +57,7 @@ pub(crate) struct ItemTreeNode { } #[salsa::tracked(return_ref)] -pub fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ModuleItemTree { +pub fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ItemTree { let node = SyntaxNode::new_root(crate::parse_file(db, file)); let module_tree = module_tree::ingot_module_tree(db, file.ingot(db.upcast())); @@ -39,3 +66,45 @@ pub fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ModuleItemTree { let top_mod_name = module_tree.module_name(file); lower::lower_file(db, file, top_mod_name, ast_root) } + +#[cfg(test)] +mod tests { + + use crate::{hir_def::ItemKind, test_db::TestDb}; + + #[test] + fn item_tree() { + let mut db = TestDb::default(); + + let text = r#" + mod foo { + fn bar() {} + extern { + fn baz() + } + } + + enum MyEnum {} + + mod baz { + struct MyS {} + } + "#; + + let (_, item_tree) = db.parse_source(text); + let top_mod = item_tree.top_mod; + assert_eq!(item_tree.len(), 8); + + let inner_items: Vec<_> = item_tree.children(top_mod.into()).collect(); + assert!(matches!(inner_items[0], ItemKind::Mod(_))); + assert!(matches!(inner_items[1], ItemKind::Mod(_))); + assert!(matches!(inner_items[2], ItemKind::Enum(_))); + + let foo_children: Vec<_> = item_tree.children(inner_items[0]).collect(); + assert!(matches!(foo_children[0], ItemKind::Func(_))); + assert!(matches!(foo_children[1], ItemKind::ExternFunc(_))); + + let baz_children: Vec<_> = item_tree.children(inner_items[1]).collect(); + assert!(matches!(baz_children[0], ItemKind::Struct(_))); + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e74dacfa37..e998b6094a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -66,7 +66,7 @@ mod test_db { use crate::{ hir_def::{module_item_tree, ItemTree}, - span::db::SpannedHirDb, + span::{db::SpannedHirDb, LazySpan}, }; #[derive(Default)] @@ -90,9 +90,15 @@ mod test_db { } impl TestDb { - pub fn parse_source(&mut self, text: &str) -> &ItemTree { + pub fn parse_source(&mut self, text: &str) -> (InputFile, &ItemTree) { let file = self.standalone_file(text); - module_item_tree(self, file) + (file, module_item_tree(self, file)) + } + + pub fn text_at(&self, file: InputFile, span: impl LazySpan) -> &str { + let range = span.resolve(self).range.unwrap(); + let text = file.text(self.upcast()); + std::str::from_utf8(&text.as_bytes()[range.start().into()..range.end().into()]).unwrap() } fn standalone_file(&mut self, text: &str) -> InputFile { diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index bee357810e..80e553c803 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -356,7 +356,8 @@ impl Use { let id = TrackedItemId::Use(tree).join(parent_id); let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new(ctxt.db, id, tree, origin) + let use_ = Self::new(ctxt.db, id, tree, origin); + ctxt.leave_scope(use_) } } @@ -366,6 +367,8 @@ impl ExternFunc { parent: TrackedItemId, ast: ast::Fn, ) -> Self { + ctxt.enter_scope(); + let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Extern.join(parent); @@ -378,9 +381,10 @@ impl ExternFunc { let modifier = ItemModifier::lower_ast(ast.modifier()); let origin = HirOrigin::raw(ctxt.file, &ast); - Self::new( + let extern_func = Self::new( ctxt.db, id, name, attributes, params, ret_ty, modifier, origin, - ) + ); + ctxt.leave_scope(extern_func) } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 8022c92b44..0685f5c5ee 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -7,7 +7,7 @@ use parser::{ast, SyntaxToken}; use crate::{ hir_def::{ - IdentId, IntegerId, ItemKind, ItemTreeNode, LitKind, ModuleItemTree, Partial, StringId, + IdentId, IntegerId, ItemKind, ItemTree, ItemTreeNode, LitKind, Partial, StringId, TopLevelMod, }, HirDb, @@ -29,7 +29,7 @@ pub(super) fn lower_file( file: InputFile, top_mod_name: IdentId, root_node: ast::Root, -) -> ModuleItemTree { +) -> ItemTree { let mut ctxt = FileLowerCtxt::new(db, file); let top_mod = TopLevelMod::lower_ast(&mut ctxt, top_mod_name, root_node); ctxt.build(top_mod) @@ -52,8 +52,8 @@ impl<'db> FileLowerCtxt<'db> { } } - pub(super) fn build(self, top_mod: TopLevelMod) -> ModuleItemTree { - ModuleItemTree { + pub(super) fn build(self, top_mod: TopLevelMod) -> ItemTree { + ItemTree { file: self.file, top_mod, item_tree: self.item_tree, @@ -86,7 +86,9 @@ impl<'db> FileLowerCtxt<'db> { }, ); - self.scope_stack.last_mut().unwrap().insert(item.into()); + if !matches!(item_kind, ItemKind::TopMod(_)) { + self.scope_stack.last_mut().unwrap().insert(item.into()); + } item } } From 28625361f43fd96e0c68d5e992bb68a9abaa4b4b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Apr 2023 19:08:43 +0200 Subject: [PATCH 133/678] Add test for lazy span --- crates/hir/src/diagnostics.rs | 22 ++-- crates/hir/src/hir_def/item.rs | 22 +++- crates/hir/src/hir_def/item_tree.rs | 13 +- crates/hir/src/lib.rs | 33 ++++- crates/hir/src/span/expr.rs | 10 +- crates/hir/src/span/item.rs | 186 +++++++++++++++++++++++++++- crates/hir/src/span/mod.rs | 26 +--- crates/hir/src/span/params.rs | 47 +++++-- crates/hir/src/span/pat.rs | 10 +- crates/hir/src/span/stmt.rs | 10 +- crates/hir/src/span/transition.rs | 31 ++--- crates/hir/src/span/use_tree.rs | 10 +- crates/parser2/src/ast/param.rs | 18 +++ crates/parser2/src/ast/use_tree.rs | 4 + 14 files changed, 348 insertions(+), 94 deletions(-) diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 96a7e539ee..2a8ce95b78 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -8,26 +8,20 @@ use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; use crate::span::db::SpannedHirDb; /// All diagnostics accumulated in salsa-db should implement -/// [`DiagnosticVoucher`] which defines the conversion. +/// [`DiagnosticVoucher`] which defines the conversion into +/// [`CompleteDiagnostic`]. /// /// All types that implements `DiagnosticVoucher` must NOT have a span /// information which invalidates cache in salsa-db. Instead of it, the all /// information is given by [`SpannedHirDB`] to allow evaluating span lazily. /// -/// The utility structs for conversion from HIR-spanless types to nodes are -/// defined in [`crate::span`] module. +/// The reason why we use `DiagnosticVoucher` is that we want to evaluate span +/// lazily to avoid invalidating cache in salsa-db. +/// +/// To obtain a span from HIR nodes in a lazy manner, it's recommended to use +/// `[LazySpan]`(crate::span::LazySpan) and types that implements `LazySpan`. pub trait DiagnosticVoucher: Send { fn pass(&self) -> GlobalErrorCode; /// Consumes voucher and makes a [`CompleteDiagnostic`]. - fn consume(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; -} - -impl DiagnosticVoucher for CompleteDiagnostic { - fn pass(&self) -> GlobalErrorCode { - self.error_code.clone() - } - - fn consume(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { - self - } + fn to_complete(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 3053a3c6b5..8ab994db87 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -10,8 +10,8 @@ use crate::{ span::{ item::{ LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyExternFnSpan, LazyFnSpan, - LazyImplSpan, LazyImplTraitSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, - LazyTypeAliasSpan, LazyUseSpan, + LazyImplSpan, LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, + LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, }, HirOrigin, }, @@ -21,7 +21,18 @@ use super::{ AttrListId, Body, FnParamListId, GenericParamListId, IdentId, Partial, TypeId, WhereClauseId, }; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, PartialOrd, Ord)] +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + derive_more::From, + derive_more::TryInto, +)] pub enum ItemKind { TopMod(TopLevelMod), Mod(Mod), @@ -67,6 +78,11 @@ pub struct Mod { #[return_ref] pub(crate) origin: HirOrigin, } +impl Mod { + pub fn lazy_span(self) -> LazyModSpan { + LazyModSpan::new(self) + } +} #[salsa::tracked] pub struct Func { diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index 49ad7a4f8c..83701290eb 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -40,13 +40,8 @@ impl ItemTree { } /// Returns the children of the item. - pub fn children(&self, item: ItemKind) -> impl Iterator + '_ { - self.item_tree[&item].children.iter().copied() - } - - /// Returns the number of items in the tree. - pub fn len(&self) -> usize { - self.item_tree.len() + pub fn children(&self, item: impl Into) -> impl Iterator + '_ { + self.item_tree[&item.into()].children.iter().copied() } } @@ -93,9 +88,9 @@ mod tests { let (_, item_tree) = db.parse_source(text); let top_mod = item_tree.top_mod; - assert_eq!(item_tree.len(), 8); + assert_eq!(item_tree.dfs().count(), 8); - let inner_items: Vec<_> = item_tree.children(top_mod.into()).collect(); + let inner_items: Vec<_> = item_tree.children(top_mod).collect(); assert!(matches!(inner_items[0], ItemKind::Mod(_))); assert!(matches!(inner_items[1], ItemKind::Mod(_))); assert!(matches!(inner_items[2], ItemKind::Enum(_))); diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e998b6094a..eadbf18cae 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -65,7 +65,7 @@ mod test_db { }; use crate::{ - hir_def::{module_item_tree, ItemTree}, + hir_def::{module_item_tree, ItemKind, ItemTree}, span::{db::SpannedHirDb, LazySpan}, }; @@ -74,10 +74,21 @@ mod test_db { pub(crate) struct TestDb { storage: salsa::Storage, } + impl SpannedHirDb for TestDb {} + impl salsa::Database for TestDb { fn salsa_event(&self, _: salsa::Event) {} } + + impl salsa::ParallelDatabase for TestDb { + fn snapshot(&self) -> salsa::Snapshot { + salsa::Snapshot::new(TestDb { + storage: salsa::Storage::default(), + }) + } + } + impl Upcast for TestDb { fn upcast(&self) -> &(dyn common::InputDb + 'static) { self @@ -95,10 +106,24 @@ mod test_db { (file, module_item_tree(self, file)) } - pub fn text_at(&self, file: InputFile, span: impl LazySpan) -> &str { - let range = span.resolve(self).range.unwrap(); + /// Parses the given source text and returns the first inner item in the + /// file. + pub fn parse_source_to_first_item(&mut self, text: &str) -> (InputFile, T) + where + ItemKind: TryInto, + { + let (file, tree) = self.parse_source(text); + let top_mod = tree.top_mod; + ( + file, + tree.children(top_mod).next().unwrap().try_into().unwrap(), + ) + } + + pub fn text_at(&self, file: InputFile, span: &impl LazySpan) -> &str { + let range = span.resolve(self).range; let text = file.text(self.upcast()); - std::str::from_utf8(&text.as_bytes()[range.start().into()..range.end().into()]).unwrap() + &text[range.start().into()..range.end().into()] } fn standalone_file(&mut self, text: &str) -> InputFile { diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 816c6a4b18..32d2057b67 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -170,17 +170,17 @@ struct ExprRoot { } impl ChainRoot for ExprRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let body_ast = db.body_ast(self.body); let file = body_ast.file; let source_map = db.body_source_map(self.body); let pat_source = source_map.expr_map.node_to_source(self.expr); - let Some(ptr) = pat_source.syntax_ptr() else { - return (file, None); - }; + let ptr = pat_source + .syntax_ptr() + .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node.into()) + (file, node) } } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 4ee6227b77..98e2d1c093 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -200,7 +200,7 @@ define_lazy_span_node!( LazyRecordFieldDefSpan, ast::RecordFieldDef, @token { - (pub_kw, pub_kw), + (pub_span, pub_kw), (name, name), } @node { @@ -235,3 +235,187 @@ define_lazy_span_node!( (unsafe_kw, unsafe_kw), } ); + +#[cfg(test)] +mod tests { + use crate::{ + hir_def::{Enum, Func, Mod, Struct, TypeAlias, Use}, + test_db::TestDb, + }; + + #[test] + fn top_mod_span() { + let mut db = TestDb::default(); + + let text = r#" + mod foo { + fn bar() {} + } + + mod baz { + fn qux() {} + } + "#; + + let (file, item_tree) = db.parse_source(text); + let top_mod = item_tree.top_mod; + assert_eq!(text, db.text_at(file, &top_mod.lazy_span())); + } + + #[test] + fn mod_span() { + let mut db = TestDb::default(); + + let text = r#" + + mod foo { + fn bar() {} + } + "#; + + let (file, mod_) = db.parse_source_to_first_item::(text); + let mod_span = mod_.lazy_span(); + assert_eq!( + r#"mod foo { + fn bar() {} + }"#, + db.text_at(file, &mod_span) + ); + assert_eq!("foo", db.text_at(file, &mod_span.name())); + } + + #[test] + fn fn_span() { + let mut db = TestDb::default(); + + let text = r#" + fn my_func(x: u32, label y: foo::Bar<2>) -> FooResult + where U: Add + "#; + + let (file, fn_) = db.parse_source_to_first_item::(text); + let fn_span = fn_.lazy_span(); + assert_eq!("my_func", db.text_at(file, &fn_span.name())); + + let generic_params = fn_span.generic_params(); + let type_generic_param_1 = generic_params.param(0).into_type_param(); + let type_generic_param_2 = generic_params.param(1).into_type_param(); + let const_generic_param = generic_params.param(2).into_const_param(); + + assert_eq!("T", db.text_at(file, &type_generic_param_1.name())); + assert_eq!( + "Debug", + db.text_at(file, &type_generic_param_1.bounds().bound(0)) + ); + assert_eq!("U", db.text_at(file, &type_generic_param_2.name())); + assert_eq!( + "const", + db.text_at(file, &const_generic_param.const_token()) + ); + assert_eq!("LEN", db.text_at(file, &const_generic_param.name())); + assert_eq!("usize", db.text_at(file, &const_generic_param.ty())); + + let params = fn_span.params(); + let param_1 = params.param(0); + let param_2 = params.param(1); + + assert_eq!("x", db.text_at(file, ¶m_1.name())); + assert_eq!("u32", db.text_at(file, ¶m_1.ty())); + assert_eq!("label", db.text_at(file, ¶m_2.label())); + assert_eq!("foo::Bar<2>", db.text_at(file, ¶m_2.ty())); + + assert_eq!("FooResult", db.text_at(file, &fn_span.ret_ty())); + + let where_clause = fn_span.where_clause(); + let where_predicate = where_clause.predicate(0); + assert_eq!("where", db.text_at(file, &where_clause.where_token())); + assert_eq!("U", db.text_at(file, &where_predicate.ty())); + assert_eq!(": Add", db.text_at(file, &where_predicate.bounds())); + } + + #[test] + fn struct_span() { + let mut db = TestDb::default(); + + let text = r#" + struct Foo { + x: u32 + pub y: foo::Bar<2> + }"#; + + let (file, struct_) = db.parse_source_to_first_item::(text); + let struct_span = struct_.lazy_span(); + assert_eq!("Foo", db.text_at(file, &struct_span.name())); + + let fields = struct_span.fields(); + let field_1 = fields.field(0); + let field_2 = fields.field(1); + + assert_eq!("x", db.text_at(file, &field_1.name())); + assert_eq!("u32", db.text_at(file, &field_1.ty())); + + assert_eq!("pub", db.text_at(file, &field_2.pub_span())); + assert_eq!("y", db.text_at(file, &field_2.name())); + assert_eq!("foo::Bar<2>", db.text_at(file, &field_2.ty())); + } + + #[test] + fn enum_span() { + let mut db = TestDb::default(); + + let text = r#" + enum Foo { + Bar + Baz(u32, i32) + }"#; + + let (file, enum_) = db.parse_source_to_first_item::(text); + let enum_span = enum_.lazy_span(); + assert_eq!("Foo", db.text_at(file, &enum_span.name())); + + let variants = enum_span.variants(); + let variant_1 = variants.variant(0); + let variant_2 = variants.variant(1); + + assert_eq!("Bar", db.text_at(file, &variant_1.name())); + assert_eq!("Baz", db.text_at(file, &variant_2.name())); + assert_eq!("(u32, i32)", db.text_at(file, &variant_2.ty())); + } + + #[test] + fn type_alias_span() { + let mut db = TestDb::default(); + + let text = r#" + pub type Foo = u32 + "#; + + let (file, type_alias) = db.parse_source_to_first_item::(text); + let type_alias_span = type_alias.lazy_span(); + assert_eq!("Foo", db.text_at(file, &type_alias_span.alias())); + assert_eq!("u32", db.text_at(file, &type_alias_span.ty())); + assert_eq!("pub", db.text_at(file, &type_alias_span.modifier())); + } + + #[test] + fn use_span() { + let mut db = TestDb::default(); + + let text = r#" + use foo::bar::{baz::*, qux as Alias} + "#; + + let (file, use_) = db.parse_source_to_first_item::(text); + let use_tree = use_.lazy_span().use_tree(); + + assert_eq!("foo::bar", db.text_at(file, &use_tree.path())); + let use_tree_list = use_tree.subtree(); + let use_tree_1 = use_tree_list.tree(0); + let use_tree_2 = use_tree_list.tree(1); + + assert_eq!("baz::*", db.text_at(file, &use_tree_1.path())); + assert_eq!("qux", db.text_at(file, &use_tree_2.path())); + assert_eq!("as Alias", db.text_at(file, &use_tree_2.alias())); + assert_eq!("Alias", db.text_at(file, &use_tree_2.alias().alias_name())); + } +} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 6074e2650d..cd670b3dee 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -3,7 +3,7 @@ use parser::{ TextRange, }; -use common::InputFile; +use common::{diagnostics::Span, InputFile}; use self::db::SpannedHirDb; @@ -134,27 +134,11 @@ impl AugAssignDesugared { } } -/// The trait provides a way to extract [`EvaluatedSpan`] from types which don't -/// have a span information directly, but can be evaluated from the database -/// lazily. +/// The trait provides a way to extract [`Span`](common::diagnostics::Span) from +/// types which don't have a span information directly, but can be resolved into +/// a span lazily. pub trait LazySpan { - fn eval(&self, db: &dyn SpannedHirDb) -> EvaluatedSpan; -} - -/// This struct represents a result of [`LazySpan::span`] method. -/// It contains the file and the text range. -/// -/// `range` is an optional field because some HIR nodes doesn't have a span when -/// they are syntactically invalid. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct EvaluatedSpan { - pub file: InputFile, - pub range: Option, -} -impl EvaluatedSpan { - pub fn new(file: InputFile, range: Option) -> Self { - Self { file, range } - } + fn resolve(&self, db: &dyn SpannedHirDb) -> Span; } use transition::define_lazy_span_node; diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 81049ee213..560328435c 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -1,6 +1,6 @@ use parser::ast; -use crate::span::path::LazyPathSpan; +use crate::span::{path::LazyPathSpan, LazySpanAtom}; use super::{define_lazy_span_node, types::LazyTypeSpan}; @@ -31,10 +31,12 @@ define_lazy_span_node!( define_lazy_span_node!( LazyWhereClauseSpan, ast::WhereClause, + @token { + (where_token, where_kw), + } @idx { (predicate, LazyWherePredicateSpan), } - ); define_lazy_span_node!( @@ -44,15 +46,46 @@ define_lazy_span_node!( (mut_kw, mut_token), } @node { - (label, label, LazyFnParamLabelSpan), - (name, name, LazyFnParamNameSpan), + (label, label, LazySpanAtom), + (name, name, LazySpanAtom), + (ty, ty, LazyTypeSpan), + } +); + +define_lazy_span_node!(LazyGenericParamSpan, ast::GenericParam); +impl LazyGenericParamSpan { + pub fn into_type_param(self) -> LazyTypeGenericParamSpan { + LazyTypeGenericParamSpan(self.0) + } + + pub fn into_const_param(self) -> LazyConstGenericParamSpan { + LazyConstGenericParamSpan(self.0) + } +} + +define_lazy_span_node!( + LazyTypeGenericParamSpan, + ast::TypeGenericParam, + @token { + (name, name), + } + @node { + (bounds, bounds, LazyTypeBoundListSpan), + } +); + +define_lazy_span_node!( + LazyConstGenericParamSpan, + ast::ConstGenericParam, + @token { + (const_token, const_kw), + (name, name), + } + @node { (ty, ty, LazyTypeSpan), } ); -define_lazy_span_node!(LazyFnParamLabelSpan); -define_lazy_span_node!(LazyFnParamNameSpan); -define_lazy_span_node!(LazyGenericParamSpan); define_lazy_span_node!(LazyGenericArgParamSpan); define_lazy_span_node!( diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 1ed9f24765..67d07d17bf 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -81,17 +81,17 @@ struct PatRoot { } impl ChainRoot for PatRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let body_ast = db.body_ast(self.body); let file = body_ast.file; let source_map = db.body_source_map(self.body); let pat_source = source_map.pat_map.node_to_source(self.pat); - let Some(ptr) = pat_source.syntax_ptr() else { - return (file, None); - }; + let ptr = pat_source + .syntax_ptr() + .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node.into()) + (file, node) } } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index 709fed710a..bbb717568a 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -40,17 +40,17 @@ struct StmtRoot { } impl ChainRoot for StmtRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let body_ast = db.body_ast(self.body); let file = body_ast.file; let source_map = db.body_source_map(self.body); let pat_source = source_map.stmt_map.node_to_source(self.stmt); - let Some(ptr) = pat_source.syntax_ptr() else { - return (file, None); - }; + let ptr = pat_source + .syntax_ptr() + .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node.into()) + (file, node) } } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 2a30b83c73..e4153abd41 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use common::InputFile; +use common::{diagnostics::Span, InputFile}; use parser::{syntax_node::NodeOrToken, SyntaxNode}; use smallvec::SmallVec; @@ -12,7 +12,7 @@ use crate::{ parse_file, }; -use super::{db::SpannedHirDb, EvaluatedSpan, LazySpan}; +use super::{db::SpannedHirDb, LazySpan}; type TransitionFn = Arc Option>; @@ -39,45 +39,40 @@ impl SpanTransitionChain { } impl LazySpan for SpanTransitionChain { - fn eval(&self, db: &dyn SpannedHirDb) -> EvaluatedSpan { - let (file, node) = self.root.root(db); - let Some(mut node) = node else { - return EvaluatedSpan::new(file, None); - }; + fn resolve(&self, db: &dyn SpannedHirDb) -> Span { + let (file, mut node) = self.root.root(db); for transition in &self.chain { node = match transition(node.clone()) { Some(NodeOrToken::Node(node)) => node, Some(NodeOrToken::Token(token)) => { - return EvaluatedSpan::new(file, token.text_range().into()); + return Span::new(file, token.text_range()); } None => { - return EvaluatedSpan::new(file, None); + break; } }; } - EvaluatedSpan::new(file, node.text_range().into()) + Span::new(file, node.text_range()) } } pub(super) trait ChainRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option); + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode); } macro_rules! impl_chain_root { ($(($ty:ty, $fn:ident),)*) => { $( impl ChainRoot for $ty { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, Option) { + fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let ast = db.$fn(*self); let file = ast.file; - let Some(ptr) = ast.syntax_ptr() else { - return (file, None); - }; + let ptr = ast.syntax_ptr().unwrap(); let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); let node = ptr.to_node(&root_node); - (file, node.into()) + (file, node) } })* }; @@ -165,8 +160,8 @@ macro_rules! define_lazy_span_node { impl crate::span::LazySpan for $name { - fn eval(&self, db: &dyn crate::span::SpannedHirDb) -> crate::span::EvaluatedSpan{ - self.0.eval(db) + fn resolve(&self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { + self.0.resolve(db) } } }; diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index ca4ff7c93b..2d9c3c3d46 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -27,8 +27,14 @@ define_lazy_span_node!( LazySubUseTreeSpan, ast::UseTreeList, @idx { - (segment, LazyUseTreeSpan), + (tree, LazyUseTreeSpan), } ); -define_lazy_span_node!(LazyUseTreeAliasSpan); +define_lazy_span_node!( + LazyUseTreeAliasSpan, + ast::UseTreeAlias, + @token { + (alias_name, ident), + } +); diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 251d7eed88..8b9c6a08c5 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -101,6 +101,15 @@ pub enum GenericParamKind { Const(ConstGenericParam), } +impl GenericParamKind { + pub fn syntax(&self) -> &rowan::SyntaxNode { + match self { + GenericParamKind::Type(param) => param.syntax(), + GenericParamKind::Const(param) => param.syntax(), + } + } +} + ast_node! { /// `(label1: arg1, arg2, ..)` pub struct CallArgList, @@ -156,6 +165,10 @@ impl ConstGenericParam { support::token(self.syntax(), SK::Ident) } + pub fn const_kw(&self) -> Option { + support::token(self.syntax(), SK::ConstKw) + } + /// Returns the type of the const generic parameter. pub fn ty(&self) -> Option { support::child(self.syntax()) @@ -220,6 +233,11 @@ ast_node! { SK::WhereClause, IntoIterator, } +impl WhereClause { + pub fn where_kw(&self) -> Option { + support::token(self.syntax(), SK::WhereKw) + } +} ast_node! { /// `T: Trait` diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs index 56f3301dad..bfb40c2cb0 100644 --- a/crates/parser2/src/ast/use_tree.rs +++ b/crates/parser2/src/ast/use_tree.rs @@ -90,6 +90,10 @@ impl UseTreeAlias { pub fn underscore(&self) -> Option { support::token(self.syntax(), SK::Underscore) } + + pub fn alias_syntax(&self) -> Option { + self.ident().or_else(|| self.underscore()) + } } /// A path segment in a use tree. From df3003aa8ef331cb78deee7e1448661688c9a74b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Apr 2023 23:31:12 +0200 Subject: [PATCH 134/678] Add an accumulator for `ParseDiagnostic` --- crates/common2/src/diagnostics.rs | 24 +++++++++++++++++ crates/hir/src/diagnostics.rs | 6 ++--- crates/hir/src/hir_def/item.rs | 3 ++- crates/hir/src/lib.rs | 25 +++++++++++------- crates/hir/src/lower/mod.rs | 2 ++ crates/hir/src/lower/parse.rs | 43 +++++++++++++++++++++++++++++++ 6 files changed, 89 insertions(+), 14 deletions(-) create mode 100644 crates/hir/src/lower/parse.rs diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 3720466a6b..bd9235b84d 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -11,12 +11,36 @@ pub struct CompleteDiagnostic { pub error_code: GlobalErrorCode, } +impl CompleteDiagnostic { + pub fn new( + severity: Severity, + message: String, + span: Span, + sub_diagnostics: Vec, + error_code: GlobalErrorCode, + ) -> Self { + Self { + severity, + message, + span, + sub_diagnostics, + error_code, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GlobalErrorCode { pub pass: AnalysisPass, pub local_code: u16, } +impl GlobalErrorCode { + pub fn new(pass: AnalysisPass, local_code: u16) -> Self { + Self { pass, local_code } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct SubDiagnostic { pub severity: Severity, diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 2a8ce95b78..129be8da75 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -11,7 +11,7 @@ use crate::span::db::SpannedHirDb; /// [`DiagnosticVoucher`] which defines the conversion into /// [`CompleteDiagnostic`]. /// -/// All types that implements `DiagnosticVoucher` must NOT have a span +/// All types that implement `DiagnosticVoucher` must NOT have a span /// information which invalidates cache in salsa-db. Instead of it, the all /// information is given by [`SpannedHirDB`] to allow evaluating span lazily. /// @@ -19,9 +19,9 @@ use crate::span::db::SpannedHirDb; /// lazily to avoid invalidating cache in salsa-db. /// /// To obtain a span from HIR nodes in a lazy manner, it's recommended to use -/// `[LazySpan]`(crate::span::LazySpan) and types that implements `LazySpan`. +/// `[LazySpan]`(crate::span::LazySpan) and types that implement `LazySpan`. pub trait DiagnosticVoucher: Send { - fn pass(&self) -> GlobalErrorCode; + fn error_code(&self) -> GlobalErrorCode; /// Consumes voucher and makes a [`CompleteDiagnostic`]. fn to_complete(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 8ab994db87..8cf77c1441 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -47,7 +47,8 @@ pub enum ItemKind { ImplTrait(ImplTrait), Const(Const), Use(Use), - /// Body is not an `Item`, but this makes it easier to analyze items. + /// Body is not an `Item`, but this makes it easier for analyzers to handle + /// it. Body(Body), } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index eadbf18cae..a7d1e6f6da 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,5 +1,7 @@ -use common::{InputDb, InputFile, Upcast}; -use parser::GreenNode; +use common::{InputDb, Upcast}; +pub use lower::parse::ParseDiagnostic; + +use lower::parse::{parse_file, ParseDiagnosticAccumulator}; pub mod diagnostics; pub mod hir_def; @@ -38,21 +40,24 @@ pub struct Jar( hir_def::ImplItemListId, hir_def::TypeId, hir_def::UseTreeId, + /// Accumulated diagnostics. + ParseDiagnosticAccumulator, /// Tracked functions hir_def::ingot_module_tree, hir_def::module_item_tree, parse_file, ); -#[salsa::tracked] -pub(crate) fn parse_file(db: &dyn HirDb, file: InputFile) -> GreenNode { - let text = file.text(db.upcast()); - // TODO: Register errors when we define the diagnostics API. - let (node, _errs) = parser::parse_source_file(text); - node +pub trait HirDb: salsa::DbWithJar + InputDb + Upcast { + /// Returns the diagnostics produced by parsing the given file. + fn diagnostics_for_parse(&self, file: common::InputFile) -> Vec + where + Self: Sized, + { + parse_file(self, file); + parse_file::accumulated::(self, file) + } } - -pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} #[cfg(test)] diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 0685f5c5ee..e9f9209003 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -13,6 +13,8 @@ use crate::{ HirDb, }; +pub(crate) mod parse; + mod attr; mod body; mod expr; diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs new file mode 100644 index 0000000000..777e167942 --- /dev/null +++ b/crates/hir/src/lower/parse.rs @@ -0,0 +1,43 @@ +use common::{ + diagnostics::{AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, Span}, + InputFile, +}; +use parser::GreenNode; + +use crate::{diagnostics::DiagnosticVoucher, span::db::SpannedHirDb, HirDb}; + +#[salsa::tracked] +pub(crate) fn parse_file(db: &dyn HirDb, file: InputFile) -> GreenNode { + let text = file.text(db.upcast()); + let (node, parse_errors) = parser::parse_source_file(text); + + for error in parse_errors { + ParseDiagnosticAccumulator::push(db, ParseDiagnostic { file, error }); + } + node +} + +#[doc(hidden)] +#[salsa::accumulator] +pub struct ParseDiagnosticAccumulator(ParseDiagnostic); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParseDiagnostic { + file: InputFile, + error: parser::ParseError, +} + +// `ParseError` has span information, but this is not a problem because the +// parsing procedure itself depends on the file content, and thus span +// information. +impl DiagnosticVoucher for ParseDiagnostic { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(AnalysisPass::Parse, 0) + } + + fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + let error_code = self.error_code(); + let span = Span::new(self.file, self.error.range); + CompleteDiagnostic::new(Severity::Error, self.error.msg, span, vec![], error_code) + } +} From 9ac03bc34582b13a55b81a69684cb29e85c97f1f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 8 Apr 2023 16:40:50 +0200 Subject: [PATCH 135/678] Make `HirDb` completely span-independent from the perspective of external crates --- crates/hir/src/diagnostics.rs | 2 +- crates/hir/src/hir_def/body.rs | 14 +-- crates/hir/src/hir_def/item.rs | 41 +++++-- crates/hir/src/hir_def/item_tree.rs | 23 +--- crates/hir/src/hir_def/module_tree.rs | 119 ++++++++++--------- crates/hir/src/lib.rs | 76 +++++++----- crates/hir/src/lower/body.rs | 17 +-- crates/hir/src/lower/expr.rs | 6 +- crates/hir/src/lower/item.rs | 163 ++++++++++++++------------ crates/hir/src/lower/mod.rs | 93 ++++++++++++--- crates/hir/src/lower/parse.rs | 5 +- crates/hir/src/lower/pat.rs | 4 +- crates/hir/src/lower/stmt.rs | 28 ++--- crates/hir/src/span/db.rs | 84 ------------- crates/hir/src/span/expr.rs | 17 ++- crates/hir/src/span/item.rs | 4 +- crates/hir/src/span/mod.rs | 118 ++++++++++++------- crates/hir/src/span/pat.rs | 13 +- crates/hir/src/span/stmt.rs | 17 ++- crates/hir/src/span/transition.rs | 34 ++++-- 20 files changed, 467 insertions(+), 411 deletions(-) delete mode 100644 crates/hir/src/span/db.rs diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 129be8da75..837daded19 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -5,7 +5,7 @@ use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; -use crate::span::db::SpannedHirDb; +use crate::SpannedHirDb; /// All diagnostics accumulated in salsa-db should implement /// [`DiagnosticVoucher`] which defines the conversion into diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index dfe2a31dc0..be3ff48e00 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -9,9 +9,9 @@ use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap}; use parser::ast::{self, prelude::*}; use rustc_hash::FxHashMap; -use crate::span::{HirOrigin, LocalOrigin}; +use crate::span::HirOrigin; -use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TrackedItemId}; +use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, TrackedItemId}; #[salsa::tracked] pub struct Body { @@ -24,10 +24,10 @@ pub struct Body { pub exprs: NodeStore>, #[return_ref] pub pats: NodeStore>, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) source_map: BodySourceMap, - #[return_ref] pub(crate) origin: HirOrigin, } @@ -57,8 +57,8 @@ where Ast: SourceAst, Node: EntityRef, { - pub node_to_source: SecondaryMap>, - pub source_to_node: FxHashMap, Node>, + pub node_to_source: SecondaryMap>, + pub source_to_node: FxHashMap, Node>, } impl SourceNodeMap @@ -66,12 +66,12 @@ where Ast: SourceAst, Node: EntityRef, { - pub(crate) fn insert(&mut self, node: Node, ast: LocalOrigin) { + pub(crate) fn insert(&mut self, node: Node, ast: HirOrigin) { self.node_to_source[node] = ast.clone(); self.source_to_node.insert(ast, node); } - pub(crate) fn node_to_source(&self, node: Node) -> &LocalOrigin { + pub(crate) fn node_to_source(&self, node: Node) -> &HirOrigin { &self.node_to_source[node] } } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 8cf77c1441..036538efe2 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -3,22 +3,26 @@ // that may take many arguments depending on the number of fields in the struct. #![allow(clippy::too_many_arguments)] +use common::{InputFile, InputIngot}; use parser::ast; use crate::{ hir_def::TraitRef, + lower, span::{ item::{ - LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyExternFnSpan, LazyFnSpan, + LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyExternFuncSpan, LazyFuncSpan, LazyImplSpan, LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, }, HirOrigin, }, + HirDb, }; use super::{ - AttrListId, Body, FnParamListId, GenericParamListId, IdentId, Partial, TypeId, WhereClauseId, + ingot_module_tree_impl, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, + IngotModuleTree, ItemTree, Partial, TypeId, WhereClauseId, }; #[derive( @@ -58,13 +62,21 @@ pub struct TopLevelMod { // of `module_item_tree`. pub name: IdentId, - #[return_ref] - pub(crate) origin: HirOrigin, + pub(crate) ingot: InputIngot, + pub(crate) file: InputFile, } impl TopLevelMod { pub fn lazy_span(self) -> LazyTopLevelModSpan { LazyTopLevelModSpan::new(self) } + + pub fn module_item_tree(self, db: &dyn HirDb) -> &ItemTree { + lower::module_item_tree_impl(db, self) + } + + pub fn ingot_module_tree(self, db: &dyn HirDb) -> &IngotModuleTree { + ingot_module_tree_impl(db, self.ingot(db)) + } } #[salsa::tracked] @@ -76,6 +88,8 @@ pub struct Mod { pub attributes: AttrListId, pub is_pub: bool, + pub top_mod: TopLevelMod, + #[return_ref] pub(crate) origin: HirOrigin, } @@ -98,13 +112,14 @@ pub struct Func { pub ret_ty: Option, pub modifier: ItemModifier, pub body: Option, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, } impl Func { - pub fn lazy_span(self) -> LazyFnSpan { - LazyFnSpan::new(self) + pub fn lazy_span(self) -> LazyFuncSpan { + LazyFuncSpan::new(self) } } @@ -118,13 +133,14 @@ pub struct ExternFunc { pub params: Partial, pub ret_ty: Option, pub modifier: ItemModifier, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, } impl ExternFunc { - pub fn lazy_span(self) -> LazyExternFnSpan { - LazyExternFnSpan::new(self) + pub fn lazy_span(self) -> LazyExternFuncSpan { + LazyExternFuncSpan::new(self) } } @@ -139,6 +155,7 @@ pub struct Struct { pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub fields: RecordFieldListId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -158,6 +175,7 @@ pub struct Contract { pub attributes: AttrListId, pub is_pub: bool, pub fields: RecordFieldListId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -179,6 +197,7 @@ pub struct Enum { pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub variants: EnumVariantListId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -200,6 +219,7 @@ pub struct TypeAlias { pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub ty: Partial, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -219,6 +239,7 @@ pub struct Impl { pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -240,6 +261,7 @@ pub struct Trait { pub is_pub: bool, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -260,6 +282,7 @@ pub struct ImplTrait { pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -277,6 +300,7 @@ pub struct Const { pub name: Partial, pub body: Partial, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, @@ -293,6 +317,7 @@ pub struct Use { id: TrackedItemId, pub tree: Partial, + pub top_mod: TopLevelMod, #[return_ref] pub(crate) origin: HirOrigin, diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index 83701290eb..ca0c675d01 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -1,15 +1,6 @@ use std::collections::{BTreeMap, BTreeSet}; -use common::InputFile; -use parser::{ - ast::{self, prelude::*}, - SyntaxNode, -}; - -use crate::{ - hir_def::{module_tree, TopLevelMod}, - lower, HirDb, -}; +use crate::hir_def::TopLevelMod; use super::ItemKind; @@ -18,7 +9,6 @@ use super::ItemKind; /// `module_tree::TopLevelModule`. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ItemTree { - pub file: InputFile, pub top_mod: TopLevelMod, pub(crate) item_tree: BTreeMap, } @@ -51,17 +41,6 @@ pub(crate) struct ItemTreeNode { pub(crate) children: BTreeSet, } -#[salsa::tracked(return_ref)] -pub fn module_item_tree(db: &dyn HirDb, file: InputFile) -> ItemTree { - let node = SyntaxNode::new_root(crate::parse_file(db, file)); - let module_tree = module_tree::ingot_module_tree(db, file.ingot(db.upcast())); - - // This cast never fails even if the file content is empty. - let ast_root = ast::Root::cast(node).unwrap(); - let top_mod_name = module_tree.module_name(file); - lower::lower_file(db, file, top_mod_name, ast_root) -} - #[cfg(test)] mod tests { diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 364fca0dfa..187e9fc4e6 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -4,9 +4,9 @@ use camino::Utf8Path; use common::{InputFile, InputIngot}; use cranelift_entity::{entity_impl, PrimaryMap}; -use crate::HirDb; +use crate::{lower::map_file_to_mod_impl, HirDb}; -use super::IdentId; +use super::{IdentId, TopLevelMod}; /// This tree represents the structure of an ingot. /// Internal modules are not included in this tree, instead, they are included @@ -56,22 +56,32 @@ use super::IdentId; /// In this case, the tree is actually a forest. But we don't need to care about it. #[derive(Debug, Clone, PartialEq, Eq)] pub struct IngotModuleTree { - pub(crate) root: ToplevelModuleId, - pub(crate) module_tree: PrimaryMap, - pub(crate) file_map: BTreeMap, + pub(crate) root: ModuleTreeNodeId, + pub(crate) module_tree: PrimaryMap, + pub(crate) mod_map: BTreeMap, pub(crate) ingot: InputIngot, } impl IngotModuleTree { - #[inline] - pub fn module_name(&self, file: InputFile) -> IdentId { - self.module_data(file).name + /// Returns the tree node data of the given id. + pub fn tree_node_data(&self, id: ModuleTreeNodeId) -> &ModuleTreeNode { + &self.module_tree[id] } - fn module_data(&self, file: InputFile) -> &ToplevelModule { - let id = self.file_map[&file]; - &self.module_tree[id] + /// Returns the tree node id of the given top level module. + pub fn tree_node(&self, top_mod: TopLevelMod) -> ModuleTreeNodeId { + self.mod_map[&top_mod] + } + + /// Returns the root of the tree, which corresponds to the ingot root file. + pub fn root(&self) -> ModuleTreeNodeId { + self.root + } + + /// Returns an iterator of all top level modules in this ingot. + pub fn all_modules(&self) -> impl Iterator + '_ { + self.mod_map.keys().copied() } } @@ -79,46 +89,47 @@ impl IngotModuleTree { /// top level modules. This function only depends on an ingot structure and /// external ingot dependency, and not depends on file contents. #[salsa::tracked(return_ref)] -pub fn ingot_module_tree(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { +pub fn ingot_module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { IngotModuleTreeBuilder::new(db, ingot).build() } /// A top level module that is one-to-one mapped to a file. #[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct ToplevelModule { - /// A name of the module. - pub(crate) name: IdentId, - /// A file that this module is defined by. - pub(crate) file: InputFile, - /// A parent of top level module. - /// This is `None` if 1. the module is a root module or 2. the module is a - /// "floating" module. - pub(crate) parent: Option, +pub struct ModuleTreeNode { + pub top_mod: TopLevelMod, + /// A parent of the top level module. + /// This is `None` if + /// 1. the module is a root module or + /// 2. the module is a "floating" module. + pub parent: Option, /// A list of child top level module. - pub(crate) children: BTreeMap>, + pub children: BTreeMap>, } -impl ToplevelModule { - fn new(name: IdentId, file: InputFile) -> Self { +impl ModuleTreeNode { + fn new(top_mod: TopLevelMod) -> Self { Self { - name, - file, + top_mod, parent: None, children: BTreeMap::new(), } } + fn name(&self, db: &dyn HirDb) -> IdentId { + self.top_mod.name(db) + } } +/// An opaque identifier for a module tree node. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub(crate) struct ToplevelModuleId(u32); -entity_impl!(ToplevelModuleId); +pub struct ModuleTreeNodeId(u32); +entity_impl!(ModuleTreeNodeId); struct IngotModuleTreeBuilder<'db> { db: &'db dyn HirDb, ingot: InputIngot, - module_tree: PrimaryMap, - file_map: BTreeMap, - path_map: BTreeMap<&'db Utf8Path, ToplevelModuleId>, + module_tree: PrimaryMap, + mod_map: BTreeMap, + path_map: BTreeMap<&'db Utf8Path, ModuleTreeNodeId>, } impl<'db> IngotModuleTreeBuilder<'db> { @@ -127,7 +138,7 @@ impl<'db> IngotModuleTreeBuilder<'db> { db, ingot, module_tree: PrimaryMap::default(), - file_map: BTreeMap::default(), + mod_map: BTreeMap::default(), path_map: BTreeMap::default(), } } @@ -136,66 +147,62 @@ impl<'db> IngotModuleTreeBuilder<'db> { self.set_modules(); self.build_tree(); - let root_file = self.ingot.root_file(self.db.upcast()); - let root = self.file_map[&root_file]; + let top_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.upcast())); + let root = self.mod_map[&top_mod]; IngotModuleTree { root, module_tree: self.module_tree, - file_map: self.file_map, + mod_map: self.mod_map, ingot: self.ingot, } } fn set_modules(&mut self) { for &file in self.ingot.files(self.db.upcast()) { - let name = self.module_name(file); + let top_mod = map_file_to_mod_impl(self.db, file); - let module_id = self.module_tree.push(ToplevelModule::new(name, file)); + let module_id = self.module_tree.push(ModuleTreeNode::new(top_mod)); self.path_map.insert(file.path(self.db.upcast()), module_id); - self.file_map.insert(file, module_id); + self.mod_map.insert(top_mod, module_id); } } - fn module_name(&self, file: InputFile) -> IdentId { - let path = file.path(self.db.upcast()); - let name = path.file_stem().unwrap(); - IdentId::new(self.db, name.to_string()) - } - fn build_tree(&mut self) { let root = self.ingot.root_file(self.db.upcast()); - for &file in self.ingot.files(self.db.upcast()) { + for &child in self.ingot.files(self.db.upcast()) { // Ignore the root file because it has no parent. - if file == root { + if child == root { continue; } - let file_path = file.path(self.db.upcast()); let root_path = root.path(self.db.upcast()); + let root_mod = map_file_to_mod_impl(self.db, root); + let child_path = child.path(self.db.upcast()); + let child_mod = map_file_to_mod_impl(self.db, child); // If the file is in the same directory as the root file, the file is a direct // child of the root. - if file_path.parent() == root_path.parent() { - let root_mod = self.file_map[&root]; - let cur_mod = self.file_map[&file]; + if child_path.parent() == root_path.parent() { + let root_mod = self.mod_map[&root_mod]; + let cur_mod = self.mod_map[&child_mod]; self.add_branch(root_mod, cur_mod); continue; } - assert!(file_path + assert!(child_path .parent() .unwrap() .starts_with(root_path.parent().unwrap())); - if let Some(parent_mod) = self.parent_module(file) { - let cur_mod = self.file_map[&file]; + if let Some(parent_mod) = self.parent_module(child) { + let cur_mod = self.mod_map[&child_mod]; self.add_branch(parent_mod, cur_mod); } } } - fn parent_module(&self, file: InputFile) -> Option { + fn parent_module(&self, file: InputFile) -> Option { let file_path = file.path(self.db.upcast()); let file_dir = file_path.parent()?; let parent_dir = file_dir.parent()?; @@ -205,8 +212,8 @@ impl<'db> IngotModuleTreeBuilder<'db> { self.path_map.get(parent_mod_path.as_path()).copied() } - fn add_branch(&mut self, parent: ToplevelModuleId, child: ToplevelModuleId) { - let child_name = self.module_tree[child].name; + fn add_branch(&mut self, parent: ModuleTreeNodeId, child: ModuleTreeNodeId) { + let child_name = self.module_tree[child].name(self.db); self.module_tree[parent] .children .entry(child_name) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index a7d1e6f6da..e8c6a464e2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,7 +1,11 @@ use common::{InputDb, Upcast}; +use hir_def::ingot_module_tree_impl; pub use lower::parse::ParseDiagnostic; -use lower::parse::{parse_file, ParseDiagnosticAccumulator}; +use lower::{ + map_file_to_mod_impl, module_item_tree_impl, + parse::{parse_file_impl, ParseDiagnosticAccumulator}, +}; pub mod diagnostics; pub mod hir_def; @@ -42,24 +46,38 @@ pub struct Jar( hir_def::UseTreeId, /// Accumulated diagnostics. ParseDiagnosticAccumulator, - /// Tracked functions - hir_def::ingot_module_tree, - hir_def::module_item_tree, - parse_file, + /// Private tracked functions. These are not part of the public API, and + /// thus, can't be accessed from outside of the crate without implementing + /// [`LowerHirDb`] marker trait. + ingot_module_tree_impl, + module_item_tree_impl, + map_file_to_mod_impl, + parse_file_impl, ); -pub trait HirDb: salsa::DbWithJar + InputDb + Upcast { - /// Returns the diagnostics produced by parsing the given file. - fn diagnostics_for_parse(&self, file: common::InputFile) -> Vec - where - Self: Sized, - { - parse_file(self, file); - parse_file::accumulated::(self, file) - } -} +pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} +/// `LowerHirDb` is a marker trait for lowering AST to HIR items. +/// All code that requires [`LowerHirDb`] is considered have a possibility to +/// invalidate the cache in salsa when a revision is updated. Therefore, +/// implementations relying on `LowerHirDb` are prohibited in all +/// Analysis phases. +pub trait LowerHirDb: HirDb + Upcast {} + +/// `SpannedHirDb` is a marker trait for extracting span-dependent information +/// from HIR Items. +/// All code that requires [`SpannedHirDb`] is considered have a possibility to +/// invalidate the cache in salsa when a revision is updated. Therefore, +/// implementations relying on `SpannedHirDb` are prohibited in all +/// Analysis phases. +/// +/// This marker is mainly used to inject [HirOrigin](crate::span::HirOrigin) to +/// generate [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from +/// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). +/// See also `[LazySpan]`[`crate::span::LazySpan`] for more details. +pub trait SpannedHirDb: HirDb + Upcast {} + #[cfg(test)] mod test_db { use std::collections::BTreeSet; @@ -70,8 +88,10 @@ mod test_db { }; use crate::{ - hir_def::{module_item_tree, ItemKind, ItemTree}, - span::{db::SpannedHirDb, LazySpan}, + hir_def::{ItemKind, ItemTree, TopLevelMod}, + lower::{map_file_to_mod, module_item_tree}, + span::LazySpan, + LowerHirDb, SpannedHirDb, }; #[derive(Default)] @@ -79,13 +99,13 @@ mod test_db { pub(crate) struct TestDb { storage: salsa::Storage, } - impl SpannedHirDb for TestDb {} - + impl LowerHirDb for TestDb {} impl salsa::Database for TestDb { fn salsa_event(&self, _: salsa::Event) {} } - + /// Implements `ParallelDatabase` to check the all tracked + /// structs/functions are `Send`. impl salsa::ParallelDatabase for TestDb { fn snapshot(&self) -> salsa::Snapshot { salsa::Snapshot::new(TestDb { @@ -93,7 +113,6 @@ mod test_db { }) } } - impl Upcast for TestDb { fn upcast(&self) -> &(dyn common::InputDb + 'static) { self @@ -106,27 +125,28 @@ mod test_db { } impl TestDb { - pub fn parse_source(&mut self, text: &str) -> (InputFile, &ItemTree) { + pub fn parse_source(&mut self, text: &str) -> (TopLevelMod, &ItemTree) { let file = self.standalone_file(text); - (file, module_item_tree(self, file)) + let top_mod = map_file_to_mod(self, file); + (top_mod, module_item_tree(self, top_mod)) } /// Parses the given source text and returns the first inner item in the /// file. - pub fn parse_source_to_first_item(&mut self, text: &str) -> (InputFile, T) + pub fn parse_source_to_first_item(&mut self, text: &str) -> (TopLevelMod, T) where ItemKind: TryInto, { - let (file, tree) = self.parse_source(text); - let top_mod = tree.top_mod; + let (top_mod, tree) = self.parse_source(text); ( - file, + top_mod, tree.children(top_mod).next().unwrap().try_into().unwrap(), ) } - pub fn text_at(&self, file: InputFile, span: &impl LazySpan) -> &str { + pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { let range = span.resolve(self).range; + let file = top_mod.file(self.upcast()); let text = file.text(self.upcast()); &text[range.start().into()..range.end().into()] } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index e9bd26e5f0..2fd069f113 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -5,7 +5,7 @@ use crate::{ Body, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, TrackedBodyId, TrackedItemId, }, - span::{HirOrigin, LocalOrigin}, + span::HirOrigin, }; use super::FileLowerCtxt; @@ -52,14 +52,14 @@ pub(super) struct BodyCtxt<'ctxt, 'db> { } impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { - pub(super) fn push_expr(&mut self, expr: Expr, origin: LocalOrigin) -> ExprId { + pub(super) fn push_expr(&mut self, expr: Expr, origin: HirOrigin) -> ExprId { let expr_id = self.exprs.push(Partial::Present(expr)); self.source_map.expr_map.insert(expr_id, origin); expr_id } - pub(super) fn push_invalid_expr(&mut self, origin: LocalOrigin) -> ExprId { + pub(super) fn push_invalid_expr(&mut self, origin: HirOrigin) -> ExprId { let expr_id = self.exprs.push(Partial::Absent); self.source_map.expr_map.insert(expr_id, origin); @@ -68,18 +68,18 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { pub(super) fn push_missing_expr(&mut self) -> ExprId { let expr_id = self.exprs.push(Partial::Absent); - self.source_map.expr_map.insert(expr_id, LocalOrigin::None); + self.source_map.expr_map.insert(expr_id, HirOrigin::None); expr_id } - pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: LocalOrigin) -> StmtId { + pub(super) fn push_stmt(&mut self, stmt: Stmt, origin: HirOrigin) -> StmtId { let stmt_id = self.stmts.push(Partial::Present(stmt)); self.source_map.stmt_map.insert(stmt_id, origin); stmt_id } - pub(super) fn push_pat(&mut self, pat: Pat, origin: LocalOrigin) -> PatId { + pub(super) fn push_pat(&mut self, pat: Pat, origin: HirOrigin) -> PatId { let pat_id = self.pats.push(Partial::Present(pat)); self.source_map.pat_map.insert(pat_id, origin); pat_id @@ -87,7 +87,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { pub(super) fn push_missing_pat(&mut self) -> PatId { let pat_id = self.pats.push(Partial::Absent); - self.source_map.pat_map.insert(pat_id, LocalOrigin::None); + self.source_map.pat_map.insert(pat_id, HirOrigin::None); pat_id } @@ -104,13 +104,14 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { } fn build(self, ast: &ast::Expr) -> Body { - let origin = HirOrigin::raw(self.f_ctxt.file, ast); + let origin = HirOrigin::raw(ast); let body = Body::new( self.f_ctxt.db, self.bid, self.stmts, self.exprs, self.pats, + self.f_ctxt.top_mod, self.source_map, origin, ); diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index ac61bd8034..61add0b2cc 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -2,7 +2,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{expr::*, Body, GenericArgListId, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, - span::LocalOrigin, + span::HirOrigin, }; use super::body::BodyCtxt; @@ -15,7 +15,7 @@ impl Expr { let lit = LitKind::lower_ast(ctxt.f_ctxt, lit); Self::Lit(lit) } else { - return ctxt.push_invalid_expr(LocalOrigin::raw(&ast)); + return ctxt.push_invalid_expr(HirOrigin::raw(&ast)); } } @@ -166,7 +166,7 @@ impl Expr { } }; - ctxt.push_expr(expr, LocalOrigin::raw(&ast)) + ctxt.push_expr(expr, HirOrigin::raw(&ast)) } pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> ExprId { diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 80e553c803..ea0cecb45b 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -10,18 +10,54 @@ use crate::{ use super::FileLowerCtxt; -impl TopLevelMod { - pub(crate) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, name: IdentId, ast: ast::Root) -> Self { - ctxt.enter_scope(); - - let id = TrackedItemId::TopLevelMod(name); - if let Some(items) = ast.items() { - lower_module_items(ctxt, id, items); +pub(crate) fn lower_module_items( + ctxt: &mut FileLowerCtxt<'_>, + id: TrackedItemId, + items: ast::ItemList, +) { + for item in items { + match item.kind() { + ast::ItemKind::Mod(mod_) => { + Mod::lower_ast(ctxt, id.clone(), mod_); + } + ast::ItemKind::Fn(fn_) => { + Func::lower_ast(ctxt, id.clone(), fn_); + } + ast::ItemKind::Struct(struct_) => { + Struct::lower_ast(ctxt, id.clone(), struct_); + } + ast::ItemKind::Contract(contract) => { + Contract::lower_ast(ctxt, id.clone(), contract); + } + ast::ItemKind::Enum(enum_) => { + Enum::lower_ast(ctxt, id.clone(), enum_); + } + ast::ItemKind::TypeAlias(alias) => { + TypeAlias::lower_ast(ctxt, id.clone(), alias); + } + ast::ItemKind::Impl(impl_) => { + Impl::lower_ast(ctxt, id.clone(), impl_); + } + ast::ItemKind::Trait(trait_) => { + Trait::lower_ast(ctxt, id.clone(), trait_); + } + ast::ItemKind::ImplTrait(impl_trait) => { + ImplTrait::lower_ast(ctxt, id.clone(), impl_trait); + } + ast::ItemKind::Const(const_) => { + Const::lower_ast(ctxt, id.clone(), const_); + } + ast::ItemKind::Use(use_) => { + Use::lower_ast(ctxt, id.clone(), use_); + } + ast::ItemKind::Extern(extern_) => { + if let Some(extern_block) = extern_.extern_block() { + for fn_ in extern_block { + ExternFunc::lower_ast(ctxt, id.clone(), fn_); + } + } + } } - - let origin = HirOrigin::raw(ctxt.file, &ast); - let top_mod = Self::new(ctxt.db, name, origin); - ctxt.leave_scope(top_mod) } } @@ -41,8 +77,8 @@ impl Mod { lower_module_items(ctxt, id.clone(), items); } - let origin = HirOrigin::raw(ctxt.file, &ast); - let mod_ = Self::new(ctxt.db, id, name, attributes, is_pub, origin); + let origin = HirOrigin::raw(&ast); + let mod_ = Self::new(ctxt.db, id, name, attributes, is_pub, ctxt.top_mod, origin); ctxt.leave_scope(mod_) } } @@ -74,7 +110,7 @@ impl Func { ast::Expr::cast(body.syntax().clone()).unwrap(), ) }); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); let fn_ = Self::new( ctxt.db, @@ -87,6 +123,7 @@ impl Func { ret_ty, modifier, body, + ctxt.top_mod, origin, ); ctxt.leave_scope(fn_) @@ -109,7 +146,7 @@ impl Struct { let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); let struct_ = Self::new( ctxt.db, @@ -120,6 +157,7 @@ impl Struct { generic_params, where_clause, fields, + ctxt.top_mod, origin, ); ctxt.leave_scope(struct_) @@ -140,9 +178,18 @@ impl Contract { let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); - let contract = Self::new(ctxt.db, id, name, attributes, is_pub, fields, origin); + let contract = Self::new( + ctxt.db, + id, + name, + attributes, + is_pub, + fields, + ctxt.top_mod, + origin, + ); ctxt.leave_scope(contract) } } @@ -163,7 +210,7 @@ impl Enum { let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let variants = EnumVariantListId::lower_ast_opt(ctxt, ast.variants()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); let enum_ = Self::new( ctxt.db, @@ -174,6 +221,7 @@ impl Enum { generic_params, where_clause, variants, + ctxt.top_mod, origin, ); ctxt.leave_scope(enum_) @@ -196,7 +244,7 @@ impl TypeAlias { let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); let alias = Self::new( ctxt.db, @@ -207,6 +255,7 @@ impl TypeAlias { generic_params, where_clause, ty, + ctxt.top_mod, origin, ); ctxt.leave_scope(alias) @@ -227,7 +276,7 @@ impl Impl { let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); if let Some(item_list) = ast.item_list() { for impl_item in item_list { @@ -242,6 +291,7 @@ impl Impl { attributes, generic_params, where_clause, + ctxt.top_mod, origin, ); ctxt.leave_scope(impl_) @@ -263,7 +313,7 @@ impl Trait { let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); if let Some(item_list) = ast.item_list() { for impl_item in item_list { @@ -279,6 +329,7 @@ impl Trait { is_pub, generic_params, where_clause, + ctxt.top_mod, origin, ); @@ -301,7 +352,7 @@ impl ImplTrait { let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); if let Some(item_list) = ast.item_list() { for impl_item in item_list { @@ -317,6 +368,7 @@ impl ImplTrait { attributes, generic_params, where_clause, + ctxt.top_mod, origin, ); ctxt.leave_scope(impl_trait) @@ -337,9 +389,9 @@ impl Const { .value() .map(|ast| Body::lower_ast(ctxt, id.clone(), ast)) .into(); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); - let const_ = Self::new(ctxt.db, id, name, body, origin); + let const_ = Self::new(ctxt.db, id, name, body, ctxt.top_mod, origin); ctxt.leave_scope(const_) } } @@ -355,8 +407,8 @@ impl Use { let tree = UseTreeId::lower_ast_partial(ctxt, ast.use_tree()); let id = TrackedItemId::Use(tree).join(parent_id); - let origin = HirOrigin::raw(ctxt.file, &ast); - let use_ = Self::new(ctxt.db, id, tree, origin); + let origin = HirOrigin::raw(&ast); + let use_ = Self::new(ctxt.db, id, tree, ctxt.top_mod, origin); ctxt.leave_scope(use_) } } @@ -379,10 +431,18 @@ impl ExternFunc { .into(); let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); let modifier = ItemModifier::lower_ast(ast.modifier()); - let origin = HirOrigin::raw(ctxt.file, &ast); + let origin = HirOrigin::raw(&ast); let extern_func = Self::new( - ctxt.db, id, name, attributes, params, ret_ty, modifier, origin, + ctxt.db, + id, + name, + attributes, + params, + ret_ty, + modifier, + ctxt.top_mod, + origin, ); ctxt.leave_scope(extern_func) } @@ -451,50 +511,3 @@ impl EnumVariant { Self { name, ty } } } - -fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, id: TrackedItemId, items: ast::ItemList) { - for item in items { - match item.kind() { - ast::ItemKind::Mod(mod_) => { - Mod::lower_ast(ctxt, id.clone(), mod_); - } - ast::ItemKind::Fn(fn_) => { - Func::lower_ast(ctxt, id.clone(), fn_); - } - ast::ItemKind::Struct(struct_) => { - Struct::lower_ast(ctxt, id.clone(), struct_); - } - ast::ItemKind::Contract(contract) => { - Contract::lower_ast(ctxt, id.clone(), contract); - } - ast::ItemKind::Enum(enum_) => { - Enum::lower_ast(ctxt, id.clone(), enum_); - } - ast::ItemKind::TypeAlias(alias) => { - TypeAlias::lower_ast(ctxt, id.clone(), alias); - } - ast::ItemKind::Impl(impl_) => { - Impl::lower_ast(ctxt, id.clone(), impl_); - } - ast::ItemKind::Trait(trait_) => { - Trait::lower_ast(ctxt, id.clone(), trait_); - } - ast::ItemKind::ImplTrait(impl_trait) => { - ImplTrait::lower_ast(ctxt, id.clone(), impl_trait); - } - ast::ItemKind::Const(const_) => { - Const::lower_ast(ctxt, id.clone(), const_); - } - ast::ItemKind::Use(use_) => { - Use::lower_ast(ctxt, id.clone(), use_); - } - ast::ItemKind::Extern(extern_) => { - if let Some(extern_block) = extern_.extern_block() { - for fn_ in extern_block { - ExternFunc::lower_ast(ctxt, id.clone(), fn_); - } - } - } - } - } -} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index e9f9209003..b8846d52c7 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -3,14 +3,22 @@ use std::collections::{BTreeMap, BTreeSet}; use common::InputFile; use num_bigint::BigUint; use num_traits::Num; -use parser::{ast, SyntaxToken}; +use parser::{ + ast::{self, prelude::*}, + GreenNode, SyntaxNode, SyntaxToken, +}; use crate::{ hir_def::{ IdentId, IntegerId, ItemKind, ItemTree, ItemTreeNode, LitKind, Partial, StringId, - TopLevelMod, + TopLevelMod, TrackedItemId, }, - HirDb, + HirDb, LowerHirDb, ParseDiagnostic, +}; + +use self::{ + item::lower_module_items, + parse::{parse_file_impl, ParseDiagnosticAccumulator}, }; pub(crate) mod parse; @@ -26,38 +34,87 @@ mod stmt; mod types; mod use_tree; -pub(super) fn lower_file( - db: &dyn HirDb, - file: InputFile, - top_mod_name: IdentId, - root_node: ast::Root, -) -> ItemTree { - let mut ctxt = FileLowerCtxt::new(db, file); - let top_mod = TopLevelMod::lower_ast(&mut ctxt, top_mod_name, root_node); - ctxt.build(top_mod) +/// Maps the given file to a top-level module. +/// This function just maps the file to a top-level module, and doesn't perform +/// any parsing or lowering. +/// To perform the actual lowering, use `module_item_tree` function. +pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { + map_file_to_mod_impl(db.upcast(), file) +} + +/// Returns the item tree of the given top-level module. +pub fn module_item_tree(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ItemTree { + module_item_tree_impl(db.upcast(), top_mod) +} + +/// Returns the root node of the given top-level module. +/// This function also returns the diagnostics produced by parsing the file. +pub fn parse_file_with_diag( + db: &dyn LowerHirDb, + top_mod: TopLevelMod, +) -> (GreenNode, Vec) { + ( + parse_file_impl(db.upcast(), top_mod), + parse_file_impl::accumulated::(db.upcast(), top_mod), + ) +} + +/// Returns the root node of the given top-level module. +/// If diagnostics are needed, use [`parse_file_with_diag`] instead. +pub fn parse_file(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> GreenNode { + parse_file_impl(db.upcast(), top_mod) +} + +#[salsa::tracked] +pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelMod { + let path = file.path(db.upcast()); + let name = path.file_stem().unwrap(); + let mod_name = IdentId::new(db, name.to_string()); + let ingot = file.ingot(db.upcast()); + TopLevelMod::new(db, mod_name, ingot, file) +} + +#[salsa::tracked(return_ref)] +pub(crate) fn module_item_tree_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ItemTree { + let ast = top_mod_ast(db, top_mod); + let mut ctxt = FileLowerCtxt::new(db, top_mod); + + ctxt.enter_scope(); + let id = TrackedItemId::TopLevelMod(top_mod.name(db)); + if let Some(items) = ast.items() { + lower_module_items(&mut ctxt, id, items); + } + ctxt.leave_scope(top_mod); + + ctxt.build() +} + +pub(crate) fn top_mod_ast(db: &dyn HirDb, top_mod: TopLevelMod) -> ast::Root { + let node = SyntaxNode::new_root(parse_file_impl(db, top_mod)); + // This cast never fails even if the file content is empty. + ast::Root::cast(node).unwrap() } pub struct FileLowerCtxt<'db> { db: &'db dyn HirDb, - file: InputFile, scope_stack: Vec>, item_tree: BTreeMap, + top_mod: TopLevelMod, } impl<'db> FileLowerCtxt<'db> { - pub(super) fn new(db: &'db dyn HirDb, file: InputFile) -> Self { + pub(super) fn new(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { Self { db, - file, scope_stack: vec![], item_tree: BTreeMap::new(), + top_mod, } } - pub(super) fn build(self, top_mod: TopLevelMod) -> ItemTree { + pub(super) fn build(self) -> ItemTree { ItemTree { - file: self.file, - top_mod, + top_mod: self.top_mod, item_tree: self.item_tree, } } diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index 777e167942..29f60cb7f5 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -4,10 +4,11 @@ use common::{ }; use parser::GreenNode; -use crate::{diagnostics::DiagnosticVoucher, span::db::SpannedHirDb, HirDb}; +use crate::{diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, HirDb, SpannedHirDb}; #[salsa::tracked] -pub(crate) fn parse_file(db: &dyn HirDb, file: InputFile) -> GreenNode { +pub(crate) fn parse_file_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> GreenNode { + let file = top_mod.file(db); let text = file.text(db.upcast()); let (node, parse_errors) = parser::parse_source_file(text); diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs index 5ee28a6b33..6a17669d1c 100644 --- a/crates/hir/src/lower/pat.rs +++ b/crates/hir/src/lower/pat.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{pat::*, IdentId, LitKind, PathId}, - span::LocalOrigin, + span::HirOrigin, }; use super::body::BodyCtxt; @@ -63,7 +63,7 @@ impl Pat { } }; - ctxt.push_pat(pat, LocalOrigin::raw(&ast)) + ctxt.push_pat(pat, HirOrigin::raw(&ast)) } pub(super) fn lower_ast_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> PatId { diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs index 26321f4e86..6c90971754 100644 --- a/crates/hir/src/lower/stmt.rs +++ b/crates/hir/src/lower/stmt.rs @@ -2,7 +2,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{stmt::*, ArithBinOp, Expr, Pat, PathId, TypeId}, - span::{AugAssignDesugared, LocalOrigin}, + span::{AugAssignDesugared, HirOrigin}, }; use super::body::BodyCtxt; @@ -16,7 +16,7 @@ impl Stmt { .type_annotation() .map(|ty| TypeId::lower_ast(ctxt.f_ctxt, ty)); let init = let_.initializer().map(|init| Expr::lower_ast(ctxt, init)); - (Stmt::Let(pat, ty, init), LocalOrigin::raw(&ast)) + (Stmt::Let(pat, ty, init), HirOrigin::raw(&ast)) } ast::StmtKind::Assign(assign) => { let lhs = assign @@ -28,7 +28,7 @@ impl Stmt { .expr() .map(|expr| Expr::lower_ast(ctxt, expr)) .unwrap_or_else(|| ctxt.push_missing_expr()); - (Stmt::Assign(lhs, rhs), LocalOrigin::raw(&ast)) + (Stmt::Assign(lhs, rhs), HirOrigin::raw(&ast)) } ast::StmtKind::AugAssign(aug_assign) => desugar_aug_assign(ctxt, &aug_assign), @@ -42,7 +42,7 @@ impl Stmt { .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); - (Stmt::For(bind, iter, body), LocalOrigin::raw(&ast)) + (Stmt::For(bind, iter, body), HirOrigin::raw(&ast)) } ast::StmtKind::While(while_) => { @@ -54,23 +54,23 @@ impl Stmt { .and_then(|body| ast::Expr::cast(body.syntax().clone())), ); - (Stmt::While(cond, body), LocalOrigin::raw(&ast)) + (Stmt::While(cond, body), HirOrigin::raw(&ast)) } - ast::StmtKind::Continue(_) => (Stmt::Continue, LocalOrigin::raw(&ast)), + ast::StmtKind::Continue(_) => (Stmt::Continue, HirOrigin::raw(&ast)), - ast::StmtKind::Break(_) => (Stmt::Break, LocalOrigin::raw(&ast)), + ast::StmtKind::Break(_) => (Stmt::Break, HirOrigin::raw(&ast)), ast::StmtKind::Return(ret) => { let expr = ret .has_value() .then(|| Expr::push_to_body_opt(ctxt, ret.expr())); - (Stmt::Return(expr), LocalOrigin::raw(&ast)) + (Stmt::Return(expr), HirOrigin::raw(&ast)) } ast::StmtKind::Expr(expr) => { let expr = Expr::push_to_body_opt(ctxt, expr.expr()); - (Stmt::Expr(expr), LocalOrigin::raw(&ast)) + (Stmt::Expr(expr), HirOrigin::raw(&ast)) } }; @@ -81,7 +81,7 @@ impl Stmt { fn desugar_aug_assign( ctxt: &mut BodyCtxt<'_, '_>, ast: &ast::AugAssignStmt, -) -> (Stmt, LocalOrigin) { +) -> (Stmt, HirOrigin) { let lhs_ident = ast.ident(); let path = lhs_ident .clone() @@ -91,7 +91,7 @@ fn desugar_aug_assign( let lhs_pat = if let Some(path) = path { ctxt.push_pat( Pat::Path(Some(path).into()), - LocalOrigin::desugared(lhs_origin.clone()), + HirOrigin::desugared(lhs_origin.clone()), ) } else { ctxt.push_missing_pat() @@ -100,7 +100,7 @@ fn desugar_aug_assign( let binop_lhs = if let Some(path) = path { ctxt.push_expr( Expr::Path(Some(path).into()), - LocalOrigin::desugared(lhs_origin), + HirOrigin::desugared(lhs_origin), ) } else { ctxt.push_missing_expr() @@ -114,11 +114,11 @@ fn desugar_aug_assign( let binop = ast.op().map(|op| ArithBinOp::lower_ast(op).into()).into(); let expr = ctxt.push_expr( Expr::Bin(binop_lhs, binop_rhs, binop), - LocalOrigin::desugared(AugAssignDesugared::stmt(ast)), + HirOrigin::desugared(AugAssignDesugared::stmt(ast)), ); ( Stmt::Assign(lhs_pat, expr), - LocalOrigin::desugared(AugAssignDesugared::stmt(ast)), + HirOrigin::desugared(AugAssignDesugared::stmt(ast)), ) } diff --git a/crates/hir/src/span/db.rs b/crates/hir/src/span/db.rs deleted file mode 100644 index d51ef4f5fc..0000000000 --- a/crates/hir/src/span/db.rs +++ /dev/null @@ -1,84 +0,0 @@ -use common::Upcast; -use parser::ast; - -use crate::{ - hir_def::{ - Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, - Trait, TypeAlias, Use, - }, - HirDb, -}; - -use super::HirOrigin; - -/// `SpannedHirDb` is a feature gate for extracting span-dependent information -/// from HIR Items. All code that requires [`SpannedHirDb`] is considered to -/// invalidate the cache in salsa when a revision is updated. -/// Therefore, implementations relying on `SpannedHirDb` are prohibited in all -/// Analysis phases. -/// -/// SpanDb is mainly used to inject information about -/// [HirOrigin] to generate -/// [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from -/// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). -pub trait SpannedHirDb: HirDb + Upcast { - fn toplevel_ast(&self, item: TopLevelMod) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn mod_ast(&self, item: Mod) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn func_ast(&self, item: Func) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn extern_func_ast(&self, item: ExternFunc) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn struct_ast(&self, item: Struct) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn contract_ast(&self, item: Contract) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn enum_ast(&self, item: Enum) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn type_alias_ast(&self, item: TypeAlias) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn impl_ast(&self, item: Impl) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn trait_ast(&self, item: Trait) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn impl_trait_ast(&self, item: ImplTrait) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn const_ast(&self, item: Const) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn use_ast(&self, item: Use) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn body_ast(&self, item: Body) -> &HirOrigin { - item.origin(self.upcast()) - } - - fn body_source_map(&self, item: Body) -> &crate::hir_def::BodySourceMap { - item.source_map(self.upcast()) - } -} diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 32d2057b67..32ca52910c 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -3,13 +3,12 @@ use parser::{ast, SyntaxNode}; use crate::{ hir_def::{Body, ExprId}, - parse_file, span::{params::LazyGenericArgListSpan, path::LazyPathSpan, LazySpanAtom}, + SpannedHirDb, }; use super::{ - db::SpannedHirDb, - define_lazy_span_node, + body_ast, body_source_map, define_lazy_span_node, transition::{ChainRoot, SpanTransitionChain}, }; @@ -171,15 +170,13 @@ struct ExprRoot { impl ChainRoot for ExprRoot { fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { - let body_ast = db.body_ast(self.body); - let file = body_ast.file; - let source_map = db.body_source_map(self.body); - let pat_source = source_map.expr_map.node_to_source(self.expr); - let ptr = pat_source + let source_map = body_source_map(db, self.body); + let expr_source = source_map.expr_map.node_to_source(self.expr); + let ptr = expr_source .syntax_ptr() - .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); + .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let (file, root_node) = self.body.top_mod(db.upcast()).root(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 98e2d1c093..5b846eb1c7 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -31,7 +31,7 @@ define_lazy_span_node!( ); define_lazy_span_node!( - LazyFnSpan, + LazyFuncSpan, ast::Fn, new(Func), @token { @@ -48,7 +48,7 @@ define_lazy_span_node!( ); define_lazy_span_node!( - LazyExternFnSpan, + LazyExternFuncSpan, ast::Fn, new(ExternFunc), @token { diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index cd670b3dee..43fe74112d 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -3,12 +3,18 @@ use parser::{ TextRange, }; -use common::{diagnostics::Span, InputFile}; - -use self::db::SpannedHirDb; +use common::diagnostics::Span; + +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, + }, + lower::top_mod_ast, + SpannedHirDb, +}; pub mod attr; -pub mod db; pub mod expr; pub mod item; pub mod params; @@ -20,44 +26,80 @@ pub mod use_tree; mod transition; -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct HirOrigin -where - T: AstNode, -{ - pub file: InputFile, - pub kind: LocalOrigin, +/// The trait provides a way to extract [`Span`](common::diagnostics::Span) from +/// types which don't have a span information directly, but can be resolved into +/// a span lazily. +pub trait LazySpan { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span; } -impl HirOrigin -where - T: AstNode, -{ - fn syntax_ptr(&self) -> Option { - self.kind.syntax_ptr() - } +pub fn toplevel_ast(db: &dyn SpannedHirDb, item: TopLevelMod) -> HirOrigin { + HirOrigin::raw(&top_mod_ast(db.upcast(), item)) } -impl HirOrigin -where - T: AstNode, -{ - pub(crate) fn new(file: InputFile, origin: LocalOrigin) -> Self { - HirOrigin { file, kind: origin } - } +pub fn mod_ast(db: &dyn SpannedHirDb, item: Mod) -> &HirOrigin { + item.origin(db.upcast()) +} - pub(crate) fn raw(file: InputFile, ast: &T) -> Self { - Self::new(file, LocalOrigin::raw(ast)) - } +pub fn func_ast(db: &dyn SpannedHirDb, item: Func) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn extern_func_ast(db: &dyn SpannedHirDb, item: ExternFunc) -> &HirOrigin { + item.origin(db.upcast()) } -/// This enum represents the origin of the HIR node is a file. +pub fn struct_ast(db: &dyn SpannedHirDb, item: Struct) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn contract_ast(db: &dyn SpannedHirDb, item: Contract) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn enum_ast(db: &dyn SpannedHirDb, item: Enum) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn type_alias_ast(db: &dyn SpannedHirDb, item: TypeAlias) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn impl_ast(db: &dyn SpannedHirDb, item: Impl) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn trait_ast(db: &dyn SpannedHirDb, item: Trait) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn impl_trait_ast(db: &dyn SpannedHirDb, item: ImplTrait) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn const_ast(db: &dyn SpannedHirDb, item: Const) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn use_ast(db: &dyn SpannedHirDb, item: Use) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn body_ast(db: &dyn SpannedHirDb, item: Body) -> &HirOrigin { + item.origin(db.upcast()) +} + +pub fn body_source_map(db: &dyn SpannedHirDb, item: Body) -> &crate::hir_def::BodySourceMap { + item.source_map(db.upcast()) +} + +/// This enum represents the origin of the HIR node in a file. /// The origin has three possible kinds. /// 1. `Raw` is used for nodes that are created by the parser and not /// 2. `Expanded` is used for nodes that are created by the compiler and not /// 3. `Desugared` is used for nodes that are created by the compiler and not #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum LocalOrigin +pub enum HirOrigin where T: AstNode, { @@ -78,7 +120,7 @@ where None, } -impl LocalOrigin +impl HirOrigin where T: AstNode, { @@ -88,8 +130,8 @@ where fn syntax_ptr(&self) -> Option { match self { - LocalOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), - LocalOrigin::Expanded(ptr) => Some(ptr.clone()), + HirOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), + HirOrigin::Expanded(ptr) => Some(ptr.clone()), _ => None, } } @@ -99,7 +141,7 @@ where } } -impl Default for LocalOrigin +impl Default for HirOrigin where T: AstNode, { @@ -134,12 +176,6 @@ impl AugAssignDesugared { } } -/// The trait provides a way to extract [`Span`](common::diagnostics::Span) from -/// types which don't have a span information directly, but can be resolved into -/// a span lazily. -pub trait LazySpan { - fn resolve(&self, db: &dyn SpannedHirDb) -> Span; -} - use transition::define_lazy_span_node; + define_lazy_span_node!(LazySpanAtom); diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 67d07d17bf..48e54bcbf5 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -3,13 +3,12 @@ use parser::{ast, SyntaxNode}; use crate::{ hir_def::{Body, PatId}, - parse_file, span::path::LazyPathSpan, + SpannedHirDb, }; use super::{ - db::SpannedHirDb, - define_lazy_span_node, + body_ast, body_source_map, define_lazy_span_node, transition::{ChainRoot, SpanTransitionChain}, }; @@ -82,15 +81,13 @@ struct PatRoot { impl ChainRoot for PatRoot { fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { - let body_ast = db.body_ast(self.body); - let file = body_ast.file; - let source_map = db.body_source_map(self.body); + let source_map = body_source_map(db, self.body); let pat_source = source_map.pat_map.node_to_source(self.pat); let ptr = pat_source .syntax_ptr() - .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); + .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let (file, root_node) = self.body.top_mod(db.upcast()).root(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index bbb717568a..b435fb9809 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -3,13 +3,12 @@ use parser::{ast, SyntaxNode}; use crate::{ hir_def::{Body, StmtId}, - parse_file, span::types::LazyTypeSpan, + SpannedHirDb, }; use super::{ - db::SpannedHirDb, - define_lazy_span_node, + body_ast, body_source_map, define_lazy_span_node, transition::{ChainRoot, SpanTransitionChain}, }; @@ -41,15 +40,13 @@ struct StmtRoot { impl ChainRoot for StmtRoot { fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { - let body_ast = db.body_ast(self.body); - let file = body_ast.file; - let source_map = db.body_source_map(self.body); - let pat_source = source_map.stmt_map.node_to_source(self.stmt); - let ptr = pat_source + let source_map = body_source_map(db, self.body); + let stmt_source = source_map.stmt_map.node_to_source(self.stmt); + let ptr = stmt_source .syntax_ptr() - .unwrap_or_else(|| body_ast.syntax_ptr().unwrap()); + .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); + let (file, root_node) = self.body.top_mod(db.upcast()).root(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index e4153abd41..0a1a644218 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use common::{diagnostics::Span, InputFile}; -use parser::{syntax_node::NodeOrToken, SyntaxNode}; +use parser::{ast::prelude::*, syntax_node::NodeOrToken, SyntaxNode}; use smallvec::SmallVec; use crate::{ @@ -9,10 +9,13 @@ use crate::{ Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, Use, }, - parse_file, + lower::top_mod_ast, }; -use super::{db::SpannedHirDb, LazySpan}; +use super::{ + body_ast, const_ast, contract_ast, enum_ast, extern_func_ast, func_ast, impl_ast, + impl_trait_ast, mod_ast, struct_ast, trait_ast, type_alias_ast, use_ast, LazySpan, +}; type TransitionFn = Arc Option>; @@ -39,7 +42,7 @@ impl SpanTransitionChain { } impl LazySpan for SpanTransitionChain { - fn resolve(&self, db: &dyn SpannedHirDb) -> Span { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { let (file, mut node) = self.root.root(db); for transition in &self.chain { @@ -59,26 +62,33 @@ impl LazySpan for SpanTransitionChain { } pub(super) trait ChainRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode); + fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode); +} + +impl ChainRoot for TopLevelMod { + fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + let file = self.file(db.upcast()); + let ast = top_mod_ast(db.upcast(), *self); + (file, ast.syntax().clone()) + } } macro_rules! impl_chain_root { ($(($ty:ty, $fn:ident),)*) => { $( impl ChainRoot for $ty { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { - let ast = db.$fn(*self); - let file = ast.file; + fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + let ast = $fn(db, *self); + let (file, root) = self.top_mod(db.upcast()).root(db); let ptr = ast.syntax_ptr().unwrap(); - let root_node = SyntaxNode::new_root(parse_file(db.upcast(), file)); - let node = ptr.to_node(&root_node); + let node = ptr.to_node(&root); (file, node) } })* }; } + impl_chain_root! { - (TopLevelMod, toplevel_ast), (Mod, mod_ast), (Func, func_ast), (ExternFunc, extern_func_ast), @@ -160,7 +170,7 @@ macro_rules! define_lazy_span_node { impl crate::span::LazySpan for $name { - fn resolve(&self, db: &dyn crate::span::SpannedHirDb) -> common::diagnostics::Span { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> common::diagnostics::Span { self.0.resolve(db) } } From 4eb7f03612dbbda8838f0f3927c3d463ee0be577 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 9 Apr 2023 12:02:21 +0200 Subject: [PATCH 136/678] Add tests for `ModuleTree` --- crates/common2/src/input.rs | 17 ++++- crates/hir/src/hir_def/item.rs | 10 +-- crates/hir/src/hir_def/item_tree.rs | 2 +- crates/hir/src/hir_def/module_tree.rs | 106 +++++++++++++++++++++----- crates/hir/src/lib.rs | 29 +++---- crates/hir/src/lower/mod.rs | 17 +++-- crates/hir/src/span/item.rs | 97 ++++++++++++----------- 7 files changed, 188 insertions(+), 90 deletions(-) diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs index 2839b58f28..65258da7e3 100644 --- a/crates/common2/src/input.rs +++ b/crates/common2/src/input.rs @@ -27,6 +27,7 @@ pub struct InputIngot { /// A list of files which the current ingot contains. #[return_ref] + #[set(__set_files_impl)] pub files: BTreeSet, #[set(__set_root_file_impl)] @@ -35,7 +36,7 @@ pub struct InputIngot { } impl InputIngot { pub fn new( - db: &mut dyn InputDb, + db: &dyn InputDb, path: &str, kind: IngotKind, version: Version, @@ -60,6 +61,12 @@ impl InputIngot { self.__set_root_file_impl(db).to(Some(file)); } + /// Set the list of files which the ingot contains. + /// All files must bee set before the ingot is used. + pub fn set_files(self, db: &mut dyn InputDb, files: BTreeSet) { + self.__set_files_impl(db).to(files); + } + /// Returns the root file of the ingot. /// Panics if the root file is not set. pub fn root_file(&self, db: &dyn InputDb) -> InputFile { @@ -109,6 +116,14 @@ pub struct IngotDependency { /// An ingot which the current ingot depends on. pub ingot: InputIngot, } +impl IngotDependency { + pub fn new(name: &str, ingot: InputIngot) -> Self { + Self { + name: SmolStr::new(name), + ingot, + } + } +} impl PartialOrd for IngotDependency { fn partial_cmp(&self, other: &Self) -> Option { diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 036538efe2..b68750854c 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -21,8 +21,8 @@ use crate::{ }; use super::{ - ingot_module_tree_impl, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, - IngotModuleTree, ItemTree, Partial, TypeId, WhereClauseId, + module_tree_impl, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, ItemTree, + ModuleTree, Partial, TypeId, WhereClauseId, }; #[derive( @@ -71,11 +71,11 @@ impl TopLevelMod { } pub fn module_item_tree(self, db: &dyn HirDb) -> &ItemTree { - lower::module_item_tree_impl(db, self) + lower::item_tree_impl(db, self) } - pub fn ingot_module_tree(self, db: &dyn HirDb) -> &IngotModuleTree { - ingot_module_tree_impl(db, self.ingot(db)) + pub fn ingot_module_tree(self, db: &dyn HirDb) -> &ModuleTree { + module_tree_impl(db, self.ingot(db)) } } diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs index ca0c675d01..2b72cf5193 100644 --- a/crates/hir/src/hir_def/item_tree.rs +++ b/crates/hir/src/hir_def/item_tree.rs @@ -65,7 +65,7 @@ mod tests { } "#; - let (_, item_tree) = db.parse_source(text); + let item_tree = db.parse_source(text); let top_mod = item_tree.top_mod; assert_eq!(item_tree.dfs().count(), 8); diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 187e9fc4e6..821aba482d 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -55,7 +55,7 @@ use super::{IdentId, TopLevelMod}; /// As a result, `baz` is represented as a "floating" node. /// In this case, the tree is actually a forest. But we don't need to care about it. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct IngotModuleTree { +pub struct ModuleTree { pub(crate) root: ModuleTreeNodeId, pub(crate) module_tree: PrimaryMap, pub(crate) mod_map: BTreeMap, @@ -63,9 +63,9 @@ pub struct IngotModuleTree { pub(crate) ingot: InputIngot, } -impl IngotModuleTree { +impl ModuleTree { /// Returns the tree node data of the given id. - pub fn tree_node_data(&self, id: ModuleTreeNodeId) -> &ModuleTreeNode { + pub fn node_data(&self, id: ModuleTreeNodeId) -> &ModuleTreeNode { &self.module_tree[id] } @@ -74,11 +74,20 @@ impl IngotModuleTree { self.mod_map[&top_mod] } + /// Returns the tree node data of the given top level module. + pub fn tree_node_data(&self, top_mod: TopLevelMod) -> &ModuleTreeNode { + &self.module_tree[self.tree_node(top_mod)] + } + /// Returns the root of the tree, which corresponds to the ingot root file. pub fn root(&self) -> ModuleTreeNodeId { self.root } + pub fn root_data(&self) -> &ModuleTreeNode { + self.node_data(self.root) + } + /// Returns an iterator of all top level modules in this ingot. pub fn all_modules(&self) -> impl Iterator + '_ { self.mod_map.keys().copied() @@ -89,8 +98,8 @@ impl IngotModuleTree { /// top level modules. This function only depends on an ingot structure and /// external ingot dependency, and not depends on file contents. #[salsa::tracked(return_ref)] -pub fn ingot_module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> IngotModuleTree { - IngotModuleTreeBuilder::new(db, ingot).build() +pub fn module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> ModuleTree { + ModuleTreeBuilder::new(db, ingot).build() } /// A top level module that is one-to-one mapped to a file. @@ -103,7 +112,7 @@ pub struct ModuleTreeNode { /// 2. the module is a "floating" module. pub parent: Option, /// A list of child top level module. - pub children: BTreeMap>, + pub children: Vec, } impl ModuleTreeNode { @@ -111,10 +120,10 @@ impl ModuleTreeNode { Self { top_mod, parent: None, - children: BTreeMap::new(), + children: Vec::new(), } } - fn name(&self, db: &dyn HirDb) -> IdentId { + pub fn name(&self, db: &dyn HirDb) -> IdentId { self.top_mod.name(db) } } @@ -124,7 +133,7 @@ impl ModuleTreeNode { pub struct ModuleTreeNodeId(u32); entity_impl!(ModuleTreeNodeId); -struct IngotModuleTreeBuilder<'db> { +struct ModuleTreeBuilder<'db> { db: &'db dyn HirDb, ingot: InputIngot, module_tree: PrimaryMap, @@ -132,7 +141,7 @@ struct IngotModuleTreeBuilder<'db> { path_map: BTreeMap<&'db Utf8Path, ModuleTreeNodeId>, } -impl<'db> IngotModuleTreeBuilder<'db> { +impl<'db> ModuleTreeBuilder<'db> { fn new(db: &'db dyn HirDb, ingot: InputIngot) -> Self { Self { db, @@ -143,13 +152,13 @@ impl<'db> IngotModuleTreeBuilder<'db> { } } - fn build(mut self) -> IngotModuleTree { + fn build(mut self) -> ModuleTree { self.set_modules(); self.build_tree(); - let top_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.upcast())); - let root = self.mod_map[&top_mod]; - IngotModuleTree { + let root_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.upcast())); + let root = self.mod_map[&root_mod]; + ModuleTree { root, module_tree: self.module_tree, mod_map: self.mod_map, @@ -213,13 +222,70 @@ impl<'db> IngotModuleTreeBuilder<'db> { } fn add_branch(&mut self, parent: ModuleTreeNodeId, child: ModuleTreeNodeId) { - let child_name = self.module_tree[child].name(self.db); - self.module_tree[parent] - .children - .entry(child_name) - .or_default() - .push(child); + self.module_tree[parent].children.push(child); self.module_tree[child].parent = Some(parent); } } + +#[cfg(test)] +mod tests { + use common::input::{IngotKind, Version}; + + use super::*; + use crate::{lower, test_db::TestDb}; + + #[test] + fn module_tree() { + let mut db = TestDb::default(); + + let local_ingot = InputIngot::new( + &db, + "/foo/fargo", + IngotKind::Local, + Version::new(0, 0, 1), + [].into(), + ); + let local_root = InputFile::new(&db, local_ingot, "src/lib.fe".into(), "".into()); + let mod1 = InputFile::new(&db, local_ingot, "src/mod1.fe".into(), "".into()); + let mod2 = InputFile::new(&db, local_ingot, "src/mod2.fe".into(), "".into()); + let foo = InputFile::new(&db, local_ingot, "src/mod1/foo.fe".into(), "".into()); + let bar = InputFile::new(&db, local_ingot, "src/mod2/bar.fe".into(), "".into()); + let baz = InputFile::new(&db, local_ingot, "src/mod2/baz.fe".into(), "".into()); + let floating = InputFile::new(&db, local_ingot, "src/mod3/floating.fe".into(), "".into()); + local_ingot.set_root_file(&mut db, local_root); + local_ingot.set_files( + &mut db, + [local_root, mod1, mod2, foo, bar, baz, floating].into(), + ); + + let local_root_mod = lower::map_file_to_mod(&db, local_root); + let mod1_mod = lower::map_file_to_mod(&db, mod1); + let mod2_mod = lower::map_file_to_mod(&db, mod2); + let foo_mod = lower::map_file_to_mod(&db, foo); + let bar_mod = lower::map_file_to_mod(&db, bar); + let baz_mod = lower::map_file_to_mod(&db, baz); + + let local_tree = lower::module_tree(&db, local_ingot); + let root_node = local_tree.root_data(); + assert_eq!(root_node.top_mod, local_root_mod); + assert_eq!(root_node.children.len(), 2); + + for &child in &root_node.children { + if child == local_tree.tree_node(mod1_mod) { + let child = local_tree.node_data(child); + assert_eq!(child.parent, Some(local_tree.root())); + assert_eq!(child.children.len(), 1); + assert_eq!(child.children[0], local_tree.tree_node(foo_mod)); + } else if child == local_tree.tree_node(mod2_mod) { + let child = local_tree.node_data(child); + assert_eq!(child.parent, Some(local_tree.root())); + assert_eq!(child.children.len(), 2); + assert_eq!(child.children[0], local_tree.tree_node(bar_mod)); + assert_eq!(child.children[1], local_tree.tree_node(baz_mod)); + } else { + panic!("unexpected child") + } + } + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e8c6a464e2..f1ff6858a5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,9 +1,9 @@ use common::{InputDb, Upcast}; -use hir_def::ingot_module_tree_impl; +use hir_def::module_tree_impl; pub use lower::parse::ParseDiagnostic; use lower::{ - map_file_to_mod_impl, module_item_tree_impl, + item_tree_impl, map_file_to_mod_impl, parse::{parse_file_impl, ParseDiagnosticAccumulator}, }; @@ -49,8 +49,8 @@ pub struct Jar( /// Private tracked functions. These are not part of the public API, and /// thus, can't be accessed from outside of the crate without implementing /// [`LowerHirDb`] marker trait. - ingot_module_tree_impl, - module_item_tree_impl, + module_tree_impl, + item_tree_impl, map_file_to_mod_impl, parse_file_impl, ); @@ -89,7 +89,7 @@ mod test_db { use crate::{ hir_def::{ItemKind, ItemTree, TopLevelMod}, - lower::{map_file_to_mod, module_item_tree}, + lower::{item_tree, map_file_to_mod}, span::LazySpan, LowerHirDb, SpannedHirDb, }; @@ -125,23 +125,24 @@ mod test_db { } impl TestDb { - pub fn parse_source(&mut self, text: &str) -> (TopLevelMod, &ItemTree) { + pub fn parse_source(&mut self, text: &str) -> &ItemTree { let file = self.standalone_file(text); let top_mod = map_file_to_mod(self, file); - (top_mod, module_item_tree(self, top_mod)) + item_tree(self, top_mod) } /// Parses the given source text and returns the first inner item in the /// file. - pub fn parse_source_to_first_item(&mut self, text: &str) -> (TopLevelMod, T) + pub fn parse_source_to_first_item(&mut self, text: &str) -> T where ItemKind: TryInto, { - let (top_mod, tree) = self.parse_source(text); - ( - top_mod, - tree.children(top_mod).next().unwrap().try_into().unwrap(), - ) + let tree = self.parse_source(text); + tree.children(tree.top_mod) + .next() + .unwrap() + .try_into() + .unwrap() } pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { @@ -158,7 +159,7 @@ mod test_db { let ingot = InputIngot::new(self, path, kind, version, BTreeSet::default()); let file = InputFile::new(self, ingot, "test_file.fe".into(), text.to_string()); ingot.set_root_file(self, file); - ingot.set_files(self).to([file].into()); + ingot.set_files(self, [file].into()); file } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index b8846d52c7..58b030434a 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,6 +1,6 @@ use std::collections::{BTreeMap, BTreeSet}; -use common::InputFile; +use common::{InputFile, InputIngot}; use num_bigint::BigUint; use num_traits::Num; use parser::{ @@ -10,8 +10,8 @@ use parser::{ use crate::{ hir_def::{ - IdentId, IntegerId, ItemKind, ItemTree, ItemTreeNode, LitKind, Partial, StringId, - TopLevelMod, TrackedItemId, + module_tree_impl, IdentId, IntegerId, ItemKind, ItemTree, ItemTreeNode, LitKind, + ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, }, HirDb, LowerHirDb, ParseDiagnostic, }; @@ -43,8 +43,8 @@ pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { } /// Returns the item tree of the given top-level module. -pub fn module_item_tree(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ItemTree { - module_item_tree_impl(db.upcast(), top_mod) +pub fn item_tree(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ItemTree { + item_tree_impl(db.upcast(), top_mod) } /// Returns the root node of the given top-level module. @@ -65,6 +65,11 @@ pub fn parse_file(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> GreenNode { parse_file_impl(db.upcast(), top_mod) } +/// Returns the ingot module tree of the given ingot. +pub fn module_tree(db: &dyn LowerHirDb, ingot: InputIngot) -> &ModuleTree { + module_tree_impl(db.upcast(), ingot) +} + #[salsa::tracked] pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelMod { let path = file.path(db.upcast()); @@ -75,7 +80,7 @@ pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelM } #[salsa::tracked(return_ref)] -pub(crate) fn module_item_tree_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ItemTree { +pub(crate) fn item_tree_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ItemTree { let ast = top_mod_ast(db, top_mod); let mut ctxt = FileLowerCtxt::new(db, top_mod); diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 5b846eb1c7..60e82fc1b7 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -238,6 +238,8 @@ define_lazy_span_node!( #[cfg(test)] mod tests { + use common::Upcast; + use crate::{ hir_def::{Enum, Func, Mod, Struct, TypeAlias, Use}, test_db::TestDb, @@ -257,9 +259,9 @@ mod tests { } "#; - let (file, item_tree) = db.parse_source(text); + let item_tree = db.parse_source(text); let top_mod = item_tree.top_mod; - assert_eq!(text, db.text_at(file, &top_mod.lazy_span())); + assert_eq!(text, db.text_at(top_mod, &top_mod.lazy_span())); } #[test] @@ -273,15 +275,16 @@ mod tests { } "#; - let (file, mod_) = db.parse_source_to_first_item::(text); + let mod_ = db.parse_source_to_first_item::(text); + let top_mod = mod_.top_mod(db.upcast()); let mod_span = mod_.lazy_span(); assert_eq!( r#"mod foo { fn bar() {} }"#, - db.text_at(file, &mod_span) + db.text_at(top_mod, &mod_span) ); - assert_eq!("foo", db.text_at(file, &mod_span.name())); + assert_eq!("foo", db.text_at(top_mod, &mod_span.name())); } #[test] @@ -293,44 +296,45 @@ mod tests { where U: Add "#; - let (file, fn_) = db.parse_source_to_first_item::(text); + let fn_ = db.parse_source_to_first_item::(text); + let top_mod = fn_.top_mod(db.upcast()); let fn_span = fn_.lazy_span(); - assert_eq!("my_func", db.text_at(file, &fn_span.name())); + assert_eq!("my_func", db.text_at(top_mod, &fn_span.name())); let generic_params = fn_span.generic_params(); let type_generic_param_1 = generic_params.param(0).into_type_param(); let type_generic_param_2 = generic_params.param(1).into_type_param(); let const_generic_param = generic_params.param(2).into_const_param(); - assert_eq!("T", db.text_at(file, &type_generic_param_1.name())); + assert_eq!("T", db.text_at(top_mod, &type_generic_param_1.name())); assert_eq!( "Debug", - db.text_at(file, &type_generic_param_1.bounds().bound(0)) + db.text_at(top_mod, &type_generic_param_1.bounds().bound(0)) ); - assert_eq!("U", db.text_at(file, &type_generic_param_2.name())); + assert_eq!("U", db.text_at(top_mod, &type_generic_param_2.name())); assert_eq!( "const", - db.text_at(file, &const_generic_param.const_token()) + db.text_at(top_mod, &const_generic_param.const_token()) ); - assert_eq!("LEN", db.text_at(file, &const_generic_param.name())); - assert_eq!("usize", db.text_at(file, &const_generic_param.ty())); + assert_eq!("LEN", db.text_at(top_mod, &const_generic_param.name())); + assert_eq!("usize", db.text_at(top_mod, &const_generic_param.ty())); let params = fn_span.params(); let param_1 = params.param(0); let param_2 = params.param(1); - assert_eq!("x", db.text_at(file, ¶m_1.name())); - assert_eq!("u32", db.text_at(file, ¶m_1.ty())); - assert_eq!("label", db.text_at(file, ¶m_2.label())); - assert_eq!("foo::Bar<2>", db.text_at(file, ¶m_2.ty())); + assert_eq!("x", db.text_at(top_mod, ¶m_1.name())); + assert_eq!("u32", db.text_at(top_mod, ¶m_1.ty())); + assert_eq!("label", db.text_at(top_mod, ¶m_2.label())); + assert_eq!("foo::Bar<2>", db.text_at(top_mod, ¶m_2.ty())); - assert_eq!("FooResult", db.text_at(file, &fn_span.ret_ty())); + assert_eq!("FooResult", db.text_at(top_mod, &fn_span.ret_ty())); let where_clause = fn_span.where_clause(); let where_predicate = where_clause.predicate(0); - assert_eq!("where", db.text_at(file, &where_clause.where_token())); - assert_eq!("U", db.text_at(file, &where_predicate.ty())); - assert_eq!(": Add", db.text_at(file, &where_predicate.bounds())); + assert_eq!("where", db.text_at(top_mod, &where_clause.where_token())); + assert_eq!("U", db.text_at(top_mod, &where_predicate.ty())); + assert_eq!(": Add", db.text_at(top_mod, &where_predicate.bounds())); } #[test] @@ -343,20 +347,21 @@ mod tests { pub y: foo::Bar<2> }"#; - let (file, struct_) = db.parse_source_to_first_item::(text); + let struct_ = db.parse_source_to_first_item::(text); + let top_mod = struct_.top_mod(db.upcast()); let struct_span = struct_.lazy_span(); - assert_eq!("Foo", db.text_at(file, &struct_span.name())); + assert_eq!("Foo", db.text_at(top_mod, &struct_span.name())); let fields = struct_span.fields(); let field_1 = fields.field(0); let field_2 = fields.field(1); - assert_eq!("x", db.text_at(file, &field_1.name())); - assert_eq!("u32", db.text_at(file, &field_1.ty())); + assert_eq!("x", db.text_at(top_mod, &field_1.name())); + assert_eq!("u32", db.text_at(top_mod, &field_1.ty())); - assert_eq!("pub", db.text_at(file, &field_2.pub_span())); - assert_eq!("y", db.text_at(file, &field_2.name())); - assert_eq!("foo::Bar<2>", db.text_at(file, &field_2.ty())); + assert_eq!("pub", db.text_at(top_mod, &field_2.pub_span())); + assert_eq!("y", db.text_at(top_mod, &field_2.name())); + assert_eq!("foo::Bar<2>", db.text_at(top_mod, &field_2.ty())); } #[test] @@ -369,17 +374,18 @@ mod tests { Baz(u32, i32) }"#; - let (file, enum_) = db.parse_source_to_first_item::(text); + let enum_ = db.parse_source_to_first_item::(text); + let top_mod = enum_.top_mod(db.upcast()); let enum_span = enum_.lazy_span(); - assert_eq!("Foo", db.text_at(file, &enum_span.name())); + assert_eq!("Foo", db.text_at(top_mod, &enum_span.name())); let variants = enum_span.variants(); let variant_1 = variants.variant(0); let variant_2 = variants.variant(1); - assert_eq!("Bar", db.text_at(file, &variant_1.name())); - assert_eq!("Baz", db.text_at(file, &variant_2.name())); - assert_eq!("(u32, i32)", db.text_at(file, &variant_2.ty())); + assert_eq!("Bar", db.text_at(top_mod, &variant_1.name())); + assert_eq!("Baz", db.text_at(top_mod, &variant_2.name())); + assert_eq!("(u32, i32)", db.text_at(top_mod, &variant_2.ty())); } #[test] @@ -390,11 +396,12 @@ mod tests { pub type Foo = u32 "#; - let (file, type_alias) = db.parse_source_to_first_item::(text); + let type_alias = db.parse_source_to_first_item::(text); + let top_mod = type_alias.top_mod(db.upcast()); let type_alias_span = type_alias.lazy_span(); - assert_eq!("Foo", db.text_at(file, &type_alias_span.alias())); - assert_eq!("u32", db.text_at(file, &type_alias_span.ty())); - assert_eq!("pub", db.text_at(file, &type_alias_span.modifier())); + assert_eq!("Foo", db.text_at(top_mod, &type_alias_span.alias())); + assert_eq!("u32", db.text_at(top_mod, &type_alias_span.ty())); + assert_eq!("pub", db.text_at(top_mod, &type_alias_span.modifier())); } #[test] @@ -405,17 +412,21 @@ mod tests { use foo::bar::{baz::*, qux as Alias} "#; - let (file, use_) = db.parse_source_to_first_item::(text); + let use_ = db.parse_source_to_first_item::(text); + let top_mod = use_.top_mod(db.upcast()); let use_tree = use_.lazy_span().use_tree(); - assert_eq!("foo::bar", db.text_at(file, &use_tree.path())); + assert_eq!("foo::bar", db.text_at(top_mod, &use_tree.path())); let use_tree_list = use_tree.subtree(); let use_tree_1 = use_tree_list.tree(0); let use_tree_2 = use_tree_list.tree(1); - assert_eq!("baz::*", db.text_at(file, &use_tree_1.path())); - assert_eq!("qux", db.text_at(file, &use_tree_2.path())); - assert_eq!("as Alias", db.text_at(file, &use_tree_2.alias())); - assert_eq!("Alias", db.text_at(file, &use_tree_2.alias().alias_name())); + assert_eq!("baz::*", db.text_at(top_mod, &use_tree_1.path())); + assert_eq!("qux", db.text_at(top_mod, &use_tree_2.path())); + assert_eq!("as Alias", db.text_at(top_mod, &use_tree_2.alias())); + assert_eq!( + "Alias", + db.text_at(top_mod, &use_tree_2.alias().alias_name()) + ); } } From 5986fb7609712334d62c70ee71740ad4cbe011a5 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 19 Apr 2023 14:30:05 +0200 Subject: [PATCH 137/678] Make types implementing `LazySpan` comparabale --- crates/hir/src/span/attr.rs | 17 +++- crates/hir/src/span/expr.rs | 12 +-- crates/hir/src/span/pat.rs | 12 +-- crates/hir/src/span/stmt.rs | 12 +-- crates/hir/src/span/transition.rs | 138 +++++++++++++++++++++++------- 5 files changed, 139 insertions(+), 52 deletions(-) diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index b0fbdfe24e..dc0ec2e4a6 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -11,12 +11,25 @@ define_lazy_span_node!( ); impl LazyAttrListSpan { pub fn normal_attr(&self, idx: usize) -> LazyNormalAttrSpan { - let transition = move |node: parser::SyntaxNode| { + fn f( + node: parser::SyntaxNode, + arg: crate::span::transition::LazyArg, + ) -> Option { + let idx = match arg { + crate::span::transition::LazyArg::Idx(idx) => idx, + _ => unreachable!(), + }; ast::AttrList::cast(node) .and_then(|f| f.normal_attrs().nth(idx)) .map(|n| n.syntax().clone().into()) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::Idx(idx), }; - LazyNormalAttrSpan(self.0.push_transition(std::sync::Arc::new(transition))) + + LazyNormalAttrSpan(self.0.push_transition(lazy_transition)) } } diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 32ca52910c..e4e3f98ba8 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -9,7 +9,7 @@ use crate::{ use super::{ body_ast, body_source_map, define_lazy_span_node, - transition::{ChainRoot, SpanTransitionChain}, + transition::{ChainInitiator, SpanTransitionChain}, }; define_lazy_span_node!(LazyExprSpan, ast::Expr,); @@ -162,21 +162,21 @@ define_lazy_span_node!( } ); -#[derive(Clone, Copy)] -struct ExprRoot { +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct ExprRoot { expr: ExprId, body: Body, } -impl ChainRoot for ExprRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { +impl ChainInitiator for ExprRoot { + fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let source_map = body_source_map(db, self.body); let expr_source = source_map.expr_map.node_to_source(self.expr); let ptr = expr_source .syntax_ptr() .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let (file, root_node) = self.body.top_mod(db.upcast()).root(db); + let (file, root_node) = self.body.top_mod(db.upcast()).init(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 48e54bcbf5..b613032486 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -9,7 +9,7 @@ use crate::{ use super::{ body_ast, body_source_map, define_lazy_span_node, - transition::{ChainRoot, SpanTransitionChain}, + transition::{ChainInitiator, SpanTransitionChain}, }; define_lazy_span_node!(LazyPatSpan, ast::Pat,); @@ -73,21 +73,21 @@ define_lazy_span_node!( } ); -#[derive(Clone, Copy)] -struct PatRoot { +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct PatRoot { pat: PatId, body: Body, } -impl ChainRoot for PatRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { +impl ChainInitiator for PatRoot { + fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let source_map = body_source_map(db, self.body); let pat_source = source_map.pat_map.node_to_source(self.pat); let ptr = pat_source .syntax_ptr() .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let (file, root_node) = self.body.top_mod(db.upcast()).root(db); + let (file, root_node) = self.body.top_mod(db.upcast()).init(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index b435fb9809..23956cf108 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -9,7 +9,7 @@ use crate::{ use super::{ body_ast, body_source_map, define_lazy_span_node, - transition::{ChainRoot, SpanTransitionChain}, + transition::{ChainInitiator, SpanTransitionChain}, }; define_lazy_span_node!(LazyStmtSpan, ast::Stmt,); @@ -32,21 +32,21 @@ define_lazy_span_node!( } ); -#[derive(Clone, Copy)] -struct StmtRoot { +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct StmtRoot { stmt: StmtId, body: Body, } -impl ChainRoot for StmtRoot { - fn root(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { +impl ChainInitiator for StmtRoot { + fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { let source_map = body_source_map(db, self.body); let stmt_source = source_map.stmt_map.node_to_source(self.stmt); let ptr = stmt_source .syntax_ptr() .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - let (file, root_node) = self.body.top_mod(db.upcast()).root(db); + let (file, root_node) = self.body.top_mod(db.upcast()).init(db); let node = ptr.to_node(&root_node); (file, node) } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 0a1a644218..6f6c6b7496 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -1,8 +1,5 @@ -use std::sync::Arc; - use common::{diagnostics::Span, InputFile}; use parser::{ast::prelude::*, syntax_node::NodeOrToken, SyntaxNode}; -use smallvec::SmallVec; use crate::{ hir_def::{ @@ -13,28 +10,83 @@ use crate::{ }; use super::{ - body_ast, const_ast, contract_ast, enum_ast, extern_func_ast, func_ast, impl_ast, - impl_trait_ast, mod_ast, struct_ast, trait_ast, type_alias_ast, use_ast, LazySpan, + body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, extern_func_ast, func_ast, + impl_ast, impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, + type_alias_ast, use_ast, LazySpan, }; -type TransitionFn = Arc Option>; +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct LazyTransitionFn { + pub(super) f: fn(SyntaxNode, LazyArg) -> Option, + pub(super) arg: LazyArg, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) enum LazyArg { + Idx(usize), + None, +} -#[derive(Clone)] +#[derive(Clone, PartialEq, Eq, Hash, Debug)] pub(crate) struct SpanTransitionChain { - root: Arc, - chain: SmallVec<[TransitionFn; 4]>, + root: ChainRoot, + chain: Vec, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, derive_more::From)] +pub(crate) enum ChainRoot { + TopMod(TopLevelMod), + Mod(Mod), + Func(Func), + ExternFunc(ExternFunc), + Struct(Struct), + Contract(Contract), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), + Const(Const), + Use(Use), + Body(Body), + Stmt(StmtRoot), + Expr(ExprRoot), + Pat(PatRoot), +} + +impl ChainInitiator for ChainRoot { + fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + match self { + Self::TopMod(top_mod) => top_mod.init(db), + Self::Mod(mod_) => mod_.init(db), + Self::Func(func) => func.init(db), + Self::ExternFunc(extern_func) => extern_func.init(db), + Self::Struct(struct_) => struct_.init(db), + Self::Contract(contract) => contract.init(db), + Self::Enum(enum_) => enum_.init(db), + Self::TypeAlias(type_alias) => type_alias.init(db), + Self::Impl(impl_) => impl_.init(db), + Self::Trait(trait_) => trait_.init(db), + Self::ImplTrait(impl_trait) => impl_trait.init(db), + Self::Const(const_) => const_.init(db), + Self::Use(use_) => use_.init(db), + Self::Body(body) => body.init(db), + Self::Stmt(stmt) => stmt.init(db), + Self::Expr(expr) => expr.init(db), + Self::Pat(pat) => pat.init(db), + } + } } impl SpanTransitionChain { - pub(super) fn new(root: T) -> Self { - let root = Arc::new(root); + pub(super) fn new(root: impl Into) -> Self { Self { - root, - chain: SmallVec::new(), + root: root.into(), + chain: Vec::new(), } } - pub(super) fn push_transition(&self, transition: TransitionFn) -> Self { + pub(super) fn push_transition(&self, transition: LazyTransitionFn) -> Self { let mut new_state = self.clone(); new_state.chain.push(transition); new_state @@ -43,10 +95,10 @@ impl SpanTransitionChain { impl LazySpan for SpanTransitionChain { fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { - let (file, mut node) = self.root.root(db); + let (file, mut node) = self.root.init(db); - for transition in &self.chain { - node = match transition(node.clone()) { + for LazyTransitionFn { f, arg } in &self.chain { + node = match f(node.clone(), *arg) { Some(NodeOrToken::Node(node)) => node, Some(NodeOrToken::Token(token)) => { return Span::new(file, token.text_range()); @@ -61,12 +113,12 @@ impl LazySpan for SpanTransitionChain { } } -pub(super) trait ChainRoot { - fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode); +pub trait ChainInitiator { + fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode); } -impl ChainRoot for TopLevelMod { - fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { +impl ChainInitiator for TopLevelMod { + fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { let file = self.file(db.upcast()); let ast = top_mod_ast(db.upcast(), *self); (file, ast.syntax().clone()) @@ -76,10 +128,10 @@ impl ChainRoot for TopLevelMod { macro_rules! impl_chain_root { ($(($ty:ty, $fn:ident),)*) => { $( - impl ChainRoot for $ty { - fn root(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + impl ChainInitiator for $ty { + fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { let ast = $fn(db, *self); - let (file, root) = self.top_mod(db.upcast()).root(db); + let (file, root) = self.top_mod(db.upcast()).init(db); let ptr = ast.syntax_ptr().unwrap(); let node = ptr.to_node(&root); (file, node) @@ -118,7 +170,7 @@ macro_rules! define_lazy_span_node { )? )? ) => { - #[derive(Clone)] + #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct $name(pub(crate) crate::span::transition::SpanTransitionChain); $( $( @@ -131,13 +183,18 @@ macro_rules! define_lazy_span_node { $($( pub fn $name_token(&self) -> crate::span::LazySpanAtom { use parser::ast::prelude::*; - let transition = |node: parser::SyntaxNode| { + fn f(node: parser::SyntaxNode, _: crate::span::transition::LazyArg) -> Option { <$sk_node as AstNode>::cast(node) .and_then(|n| n.$getter_token()) .map(|n| n.into()) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, }; crate::span::LazySpanAtom( - self.0.push_transition(std::sync::Arc::new(transition)) + self.0.push_transition(lazy_transition) ) } )*)? @@ -145,12 +202,18 @@ macro_rules! define_lazy_span_node { $($( pub fn $name_node(&self) -> $result { use parser::ast::prelude::*; - let transition = |node: parser::SyntaxNode| { + + fn f(node: parser::SyntaxNode, _: crate::span::transition::LazyArg) -> Option { <$sk_node as AstNode>::cast(node) - .and_then(|f| f.$getter_node()) + .and_then(|n| n.$getter_node()) .map(|n| n.syntax().clone().into()) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, }; - $result(self.0.push_transition(std::sync::Arc::new(transition))) + $result(self.0.push_transition(lazy_transition)) } )*)? @@ -158,12 +221,23 @@ macro_rules! define_lazy_span_node { pub fn $name_iter(&self, idx: usize) -> $result_iter { use parser::ast::prelude::*; - let transition = move |node: parser::SyntaxNode| { + fn f(node: parser::SyntaxNode, arg: crate::span::transition::LazyArg) -> Option { + let idx = match arg { + crate::span::transition::LazyArg::Idx(idx) => idx, + _ => unreachable!(), + }; + <$sk_node as AstNode>::cast(node) .and_then(|f| f.into_iter().nth(idx)) .map(|n| n.syntax().clone().into()) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::Idx(idx), }; - $result_iter(self.0.push_transition(std::sync::Arc::new(transition))) + + $result_iter(self.0.push_transition(lazy_transition)) } )*)? })?)? From 123e40409d709946dbcfc5a679c989f3e244e8b2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 19 Apr 2023 16:03:23 +0200 Subject: [PATCH 138/678] Allow more precise span origin tracing --- crates/common2/src/diagnostics.rs | 26 ++++- crates/hir/src/hir_def/expr.rs | 8 ++ crates/hir/src/hir_def/pat.rs | 10 +- crates/hir/src/hir_def/stmt.rs | 10 +- crates/hir/src/lib.rs | 8 +- crates/hir/src/lower/parse.rs | 4 +- crates/hir/src/span/attr.rs | 15 +-- crates/hir/src/span/expr.rs | 20 ++-- crates/hir/src/span/item.rs | 12 +-- crates/hir/src/span/mod.rs | 8 -- crates/hir/src/span/pat.rs | 20 ++-- crates/hir/src/span/stmt.rs | 54 +++++++--- crates/hir/src/span/transition.rs | 170 +++++++++++++++++++++++------- 13 files changed, 257 insertions(+), 108 deletions(-) diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index bd9235b84d..3fed6dd8e0 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -52,11 +52,33 @@ pub struct SubDiagnostic { pub struct Span { pub file: InputFile, pub range: TextRange, + pub kind: SpanKind, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum SpanKind { + /// A node corresponding is originally written in the source code. + Original, + + /// A node corresponding to the span is generated by macro expansion. + Expanded, + + /// No span information was found. + /// This happens if analysis code tries to get a span for a node that is + /// generated in lowering phase. + /// + /// If span has this kind, it means there is a bug in the analysis code. + /// The reason not to panic is that LSP should continue working even if + /// there are bugs in the span generation(This also makes easier to identify + /// the cause of the bug) + /// + /// Range is always the first character of the file in this case. + NotFound, } impl Span { - pub fn new(file: InputFile, range: TextRange) -> Self { - Self { file, range } + pub fn new(file: InputFile, range: TextRange, kind: SpanKind) -> Self { + Self { file, range, kind } } } diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 589b20edb3..0abc647779 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -1,5 +1,7 @@ use cranelift_entity::entity_impl; +use crate::span::expr::LazyExprSpan; + use super::{Body, GenericArgListId, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -44,6 +46,12 @@ pub enum Expr { pub struct ExprId(u32); entity_impl!(ExprId); +impl ExprId { + pub fn lazy_span(self, body: Body) -> LazyExprSpan { + LazyExprSpan::new(self, body) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum FieldIndex { /// The field is indexed by its name. diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index 18c5669a51..3b55fbd3bf 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -1,6 +1,8 @@ use cranelift_entity::entity_impl; -use super::{IdentId, LitKind, Partial, PathId}; +use crate::span::pat::LazyPatSpan; + +use super::{Body, IdentId, LitKind, Partial, PathId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Pat { @@ -18,6 +20,12 @@ pub enum Pat { pub struct PatId(u32); entity_impl!(PatId); +impl PatId { + pub fn lazy_span(self, body: Body) -> LazyPatSpan { + LazyPatSpan::new(self, body) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RecordPatField { pub label: Partial, diff --git a/crates/hir/src/hir_def/stmt.rs b/crates/hir/src/hir_def/stmt.rs index e96cd18f92..707d2bdf3e 100644 --- a/crates/hir/src/hir_def/stmt.rs +++ b/crates/hir/src/hir_def/stmt.rs @@ -1,6 +1,8 @@ use cranelift_entity::entity_impl; -use super::{ExprId, PatId, TypeId}; +use crate::span::stmt::LazyStmtSpan; + +use super::{Body, ExprId, PatId, TypeId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Stmt { @@ -31,3 +33,9 @@ pub enum Stmt { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct StmtId(u32); entity_impl!(StmtId); + +impl StmtId { + pub fn lazy_span(self, body: Body) -> LazyStmtSpan { + LazyStmtSpan::new(self, body) + } +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index f1ff6858a5..9e87255b19 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -133,16 +133,12 @@ mod test_db { /// Parses the given source text and returns the first inner item in the /// file. - pub fn parse_source_to_first_item(&mut self, text: &str) -> T + pub fn expect_item(&mut self, text: &str) -> T where ItemKind: TryInto, { let tree = self.parse_source(text); - tree.children(tree.top_mod) - .next() - .unwrap() - .try_into() - .unwrap() + tree.dfs().find_map(|it| it.try_into().ok()).unwrap() } pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index 29f60cb7f5..93b43bb68a 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -1,5 +1,5 @@ use common::{ - diagnostics::{AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, Span}, + diagnostics::{AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, Span, SpanKind}, InputFile, }; use parser::GreenNode; @@ -38,7 +38,7 @@ impl DiagnosticVoucher for ParseDiagnostic { fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { let error_code = self.error_code(); - let span = Span::new(self.file, self.error.range); + let span = Span::new(self.file, self.error.range, SpanKind::Original); CompleteDiagnostic::new(Severity::Error, self.error.msg, span, vec![], error_code) } } diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index dc0ec2e4a6..6e78feeb4b 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -1,5 +1,7 @@ use parser::ast::{self, prelude::*}; +use crate::span::transition::ResolvedOrigin; + use super::define_lazy_span_node; define_lazy_span_node!( @@ -11,17 +13,16 @@ define_lazy_span_node!( ); impl LazyAttrListSpan { pub fn normal_attr(&self, idx: usize) -> LazyNormalAttrSpan { - fn f( - node: parser::SyntaxNode, - arg: crate::span::transition::LazyArg, - ) -> Option { + fn f(origin: ResolvedOrigin, arg: crate::span::transition::LazyArg) -> ResolvedOrigin { let idx = match arg { crate::span::transition::LazyArg::Idx(idx) => idx, _ => unreachable!(), }; - ast::AttrList::cast(node) - .and_then(|f| f.normal_attrs().nth(idx)) - .map(|n| n.syntax().clone().into()) + origin.map(|node| { + ast::AttrList::cast(node) + .and_then(|f| f.normal_attrs().nth(idx)) + .map(|n| n.syntax().clone().into()) + }) } let lazy_transition = crate::span::transition::LazyTransitionFn { diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index e4e3f98ba8..979e1c8ed2 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -1,5 +1,4 @@ -use common::InputFile; -use parser::{ast, SyntaxNode}; +use parser::ast; use crate::{ hir_def::{Body, ExprId}, @@ -8,8 +7,8 @@ use crate::{ }; use super::{ - body_ast, body_source_map, define_lazy_span_node, - transition::{ChainInitiator, SpanTransitionChain}, + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, }; define_lazy_span_node!(LazyExprSpan, ast::Expr,); @@ -169,15 +168,10 @@ pub(crate) struct ExprRoot { } impl ChainInitiator for ExprRoot { - fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); - let expr_source = source_map.expr_map.node_to_source(self.expr); - let ptr = expr_source - .syntax_ptr() - .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - - let (file, root_node) = self.body.top_mod(db.upcast()).init(db); - let node = ptr.to_node(&root_node); - (file, node) + let origin = source_map.expr_map.node_to_source(self.expr); + let top_mod = self.body.top_mod(db.upcast()); + ResolvedOrigin::resolve(db, top_mod, origin) } } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 60e82fc1b7..3e9acf9dd4 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -275,7 +275,7 @@ mod tests { } "#; - let mod_ = db.parse_source_to_first_item::(text); + let mod_ = db.expect_item::(text); let top_mod = mod_.top_mod(db.upcast()); let mod_span = mod_.lazy_span(); assert_eq!( @@ -296,7 +296,7 @@ mod tests { where U: Add "#; - let fn_ = db.parse_source_to_first_item::(text); + let fn_ = db.expect_item::(text); let top_mod = fn_.top_mod(db.upcast()); let fn_span = fn_.lazy_span(); assert_eq!("my_func", db.text_at(top_mod, &fn_span.name())); @@ -347,7 +347,7 @@ mod tests { pub y: foo::Bar<2> }"#; - let struct_ = db.parse_source_to_first_item::(text); + let struct_ = db.expect_item::(text); let top_mod = struct_.top_mod(db.upcast()); let struct_span = struct_.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &struct_span.name())); @@ -374,7 +374,7 @@ mod tests { Baz(u32, i32) }"#; - let enum_ = db.parse_source_to_first_item::(text); + let enum_ = db.expect_item::(text); let top_mod = enum_.top_mod(db.upcast()); let enum_span = enum_.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &enum_span.name())); @@ -396,7 +396,7 @@ mod tests { pub type Foo = u32 "#; - let type_alias = db.parse_source_to_first_item::(text); + let type_alias = db.expect_item::(text); let top_mod = type_alias.top_mod(db.upcast()); let type_alias_span = type_alias.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &type_alias_span.alias())); @@ -412,7 +412,7 @@ mod tests { use foo::bar::{baz::*, qux as Alias} "#; - let use_ = db.parse_source_to_first_item::(text); + let use_ = db.expect_item::(text); let top_mod = use_.top_mod(db.upcast()); let use_tree = use_.lazy_span().use_tree(); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 43fe74112d..a7d2c30aff 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -128,14 +128,6 @@ where Self::Raw(AstPtr::new(ast)) } - fn syntax_ptr(&self) -> Option { - match self { - HirOrigin::Raw(ptr) => Some(ptr.syntax_node_ptr()), - HirOrigin::Expanded(ptr) => Some(ptr.clone()), - _ => None, - } - } - pub(crate) fn desugared(origin: impl Into) -> Self { Self::Desugared(origin.into()) } diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index b613032486..61b559bd21 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -1,5 +1,4 @@ -use common::InputFile; -use parser::{ast, SyntaxNode}; +use parser::ast; use crate::{ hir_def::{Body, PatId}, @@ -8,8 +7,8 @@ use crate::{ }; use super::{ - body_ast, body_source_map, define_lazy_span_node, - transition::{ChainInitiator, SpanTransitionChain}, + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, }; define_lazy_span_node!(LazyPatSpan, ast::Pat,); @@ -80,15 +79,10 @@ pub(crate) struct PatRoot { } impl ChainInitiator for PatRoot { - fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); - let pat_source = source_map.pat_map.node_to_source(self.pat); - let ptr = pat_source - .syntax_ptr() - .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - - let (file, root_node) = self.body.top_mod(db.upcast()).init(db); - let node = ptr.to_node(&root_node); - (file, node) + let origin = source_map.pat_map.node_to_source(self.pat); + let top_mod = self.body.top_mod(db.upcast()); + ResolvedOrigin::resolve(db, top_mod, origin) } } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index 23956cf108..9f7570f8ba 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -1,5 +1,4 @@ -use common::InputFile; -use parser::{ast, SyntaxNode}; +use parser::ast; use crate::{ hir_def::{Body, StmtId}, @@ -8,8 +7,8 @@ use crate::{ }; use super::{ - body_ast, body_source_map, define_lazy_span_node, - transition::{ChainInitiator, SpanTransitionChain}, + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, }; define_lazy_span_node!(LazyStmtSpan, ast::Stmt,); @@ -39,15 +38,44 @@ pub(crate) struct StmtRoot { } impl ChainInitiator for StmtRoot { - fn init(&self, db: &dyn SpannedHirDb) -> (InputFile, SyntaxNode) { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); - let stmt_source = source_map.stmt_map.node_to_source(self.stmt); - let ptr = stmt_source - .syntax_ptr() - .unwrap_or_else(|| body_ast(db, self.body).syntax_ptr().unwrap()); - - let (file, root_node) = self.body.top_mod(db.upcast()).init(db); - let node = ptr.to_node(&root_node); - (file, node) + let origin = source_map.stmt_map.node_to_source(self.stmt); + let top_mod = self.body.top_mod(db.upcast()); + ResolvedOrigin::resolve(db, top_mod, origin) + } +} + +#[cfg(test)] +mod tests { + use crate::{hir_def::Body, test_db::TestDb}; + use common::Upcast; + + #[test] + fn aug_assign() { + let mut db = TestDb::default(); + + let text = r#" { + fn foo() { + let mut x = 0 + x += 1 + } + }"#; + + let body: Body = db.expect_item::(text); + let top_mod = body.top_mod(db.upcast()); + for (i, stmt) in body.stmts(db.upcast()).keys().enumerate() { + match i { + 0 => { + let span = stmt.lazy_span(body); + assert_eq!("let mut x = 0", db.text_at(top_mod, &span)); + } + 1 => { + let span = stmt.lazy_span(body); + assert_eq!("x += 1", db.text_at(top_mod, &span)); + } + _ => unreachable!(), + } + } } } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 6f6c6b7496..05731c2e75 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -1,23 +1,29 @@ -use common::{diagnostics::Span, InputFile}; -use parser::{ast::prelude::*, syntax_node::NodeOrToken, SyntaxNode}; +use common::{ + diagnostics::{Span, SpanKind}, + InputFile, +}; +use parser::{ + ast::prelude::*, syntax_node::NodeOrToken, FeLang, SyntaxNode, SyntaxToken, TextRange, +}; use crate::{ hir_def::{ Body, Const, Contract, Enum, ExternFunc, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, Use, }, - lower::top_mod_ast, + lower::{map_file_to_mod_impl, top_mod_ast}, + SpannedHirDb, }; use super::{ body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, extern_func_ast, func_ast, impl_ast, impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, - type_alias_ast, use_ast, LazySpan, + type_alias_ast, use_ast, AugAssignDesugared, DesugaredOrigin, HirOrigin, LazySpan, }; #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub(crate) struct LazyTransitionFn { - pub(super) f: fn(SyntaxNode, LazyArg) -> Option, + pub(super) f: fn(ResolvedOrigin, LazyArg) -> ResolvedOrigin, pub(super) arg: LazyArg, } @@ -54,8 +60,66 @@ pub(crate) enum ChainRoot { Pat(PatRoot), } +pub(crate) struct ResolvedOrigin { + pub(crate) file: InputFile, + pub(crate) kind: ResolvedOriginKind, +} +impl ResolvedOrigin { + pub(crate) fn new(file: InputFile, kind: ResolvedOriginKind) -> Self { + Self { file, kind } + } + + pub(crate) fn resolve( + db: &dyn SpannedHirDb, + top_mod: TopLevelMod, + origin: &HirOrigin, + ) -> ResolvedOrigin + where + T: AstNode, + { + let root = top_mod_ast(db.upcast(), top_mod).syntax().clone(); + let kind = match origin { + HirOrigin::Raw(ptr) => ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root)), + HirOrigin::Expanded(ptr) => ResolvedOriginKind::Expanded(ptr.to_node(&root)), + HirOrigin::Desugared(desugared) => ResolvedOriginKind::Desugared(desugared.clone()), + HirOrigin::None => ResolvedOriginKind::None, + }; + + ResolvedOrigin::new(top_mod.file(db.upcast()), kind) + } +} + +pub(crate) enum ResolvedOriginKind { + Node(SyntaxNode), + Token(SyntaxToken), + Expanded(SyntaxNode), + Desugared(DesugaredOrigin), + None, +} + +impl ResolvedOrigin { + pub(crate) fn map(self, f: F) -> Self + where + F: FnOnce(SyntaxNode) -> Option, + { + let kind = match self.kind { + ResolvedOriginKind::Node(node) => match f(node) { + Some(NodeOrToken::Node(node)) => ResolvedOriginKind::Node(node), + Some(NodeOrToken::Token(token)) => ResolvedOriginKind::Token(token), + None => ResolvedOriginKind::None, + }, + kind => kind, + }; + + ResolvedOrigin { + file: self.file, + kind, + } + } +} + impl ChainInitiator for ChainRoot { - fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { match self { Self::TopMod(top_mod) => top_mod.init(db), Self::Mod(mod_) => mod_.init(db), @@ -95,33 +159,43 @@ impl SpanTransitionChain { impl LazySpan for SpanTransitionChain { fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { - let (file, mut node) = self.root.init(db); + let mut resolved = self.root.init(db); for LazyTransitionFn { f, arg } in &self.chain { - node = match f(node.clone(), *arg) { - Some(NodeOrToken::Node(node)) => node, - Some(NodeOrToken::Token(token)) => { - return Span::new(file, token.text_range()); - } - None => { - break; - } - }; + resolved = f(resolved, *arg); } - Span::new(file, node.text_range()) + match resolved.kind { + ResolvedOriginKind::Node(node) => { + Span::new(resolved.file, node.text_range(), SpanKind::Original) + } + ResolvedOriginKind::Token(token) => { + Span::new(resolved.file, token.text_range(), SpanKind::Original) + } + ResolvedOriginKind::Expanded(node) => { + Span::new(resolved.file, node.text_range(), SpanKind::Expanded) + } + ResolvedOriginKind::Desugared(desugared) => desugared.resolve(db, resolved.file), + ResolvedOriginKind::None => Span::new( + resolved.file, + TextRange::new(0.into(), 0.into()), + SpanKind::NotFound, + ), + } } } -pub trait ChainInitiator { - fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode); +/// A trait for types that can be used as the root of a `SpanTransitionChain`. +pub(crate) trait ChainInitiator { + /// Returns the `ResolvedOrigin` for the root of the chain. + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin; } impl ChainInitiator for TopLevelMod { - fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { let file = self.file(db.upcast()); let ast = top_mod_ast(db.upcast(), *self); - (file, ast.syntax().clone()) + ResolvedOrigin::new(file, ResolvedOriginKind::Node(ast.syntax().clone())) } } @@ -129,12 +203,10 @@ macro_rules! impl_chain_root { ($(($ty:ty, $fn:ident),)*) => { $( impl ChainInitiator for $ty { - fn init(&self, db: &dyn crate::SpannedHirDb) -> (InputFile, SyntaxNode) { - let ast = $fn(db, *self); - let (file, root) = self.top_mod(db.upcast()).init(db); - let ptr = ast.syntax_ptr().unwrap(); - let node = ptr.to_node(&root); - (file, node) + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { + let top_mod = self.top_mod(db.upcast()); + let origin = $fn(db, *self); + ResolvedOrigin::resolve(db, top_mod, origin) } })* }; @@ -183,10 +255,10 @@ macro_rules! define_lazy_span_node { $($( pub fn $name_token(&self) -> crate::span::LazySpanAtom { use parser::ast::prelude::*; - fn f(node: parser::SyntaxNode, _: crate::span::transition::LazyArg) -> Option { - <$sk_node as AstNode>::cast(node) + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) .and_then(|n| n.$getter_token()) - .map(|n| n.into()) + .map(|n| n.into())) } let lazy_transition = crate::span::transition::LazyTransitionFn { @@ -203,10 +275,10 @@ macro_rules! define_lazy_span_node { pub fn $name_node(&self) -> $result { use parser::ast::prelude::*; - fn f(node: parser::SyntaxNode, _: crate::span::transition::LazyArg) -> Option { - <$sk_node as AstNode>::cast(node) + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) .and_then(|n| n.$getter_node()) - .map(|n| n.syntax().clone().into()) + .map(|n| n.syntax().clone().into())) } let lazy_transition = crate::span::transition::LazyTransitionFn { @@ -221,15 +293,15 @@ macro_rules! define_lazy_span_node { pub fn $name_iter(&self, idx: usize) -> $result_iter { use parser::ast::prelude::*; - fn f(node: parser::SyntaxNode, arg: crate::span::transition::LazyArg) -> Option { + fn f(origin: crate::span::transition::ResolvedOrigin, arg: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { let idx = match arg { crate::span::transition::LazyArg::Idx(idx) => idx, _ => unreachable!(), }; - <$sk_node as AstNode>::cast(node) + origin.map(|node| <$sk_node as AstNode>::cast(node) .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into()) + .map(|n| n.syntax().clone().into())) } let lazy_transition = crate::span::transition::LazyTransitionFn { @@ -251,4 +323,30 @@ macro_rules! define_lazy_span_node { }; } +impl DesugaredOrigin { + fn resolve(self, db: &dyn SpannedHirDb, file: InputFile) -> Span { + let range = match self { + Self::AugAssign(AugAssignDesugared::Stmt(ptr)) => { + let top_mod = map_file_to_mod_impl(db.upcast(), file); + let top_mod_ast = top_mod_ast(db.upcast(), top_mod); + ptr.syntax_node_ptr() + .to_node(top_mod_ast.syntax()) + .text_range() + } + + Self::AugAssign(AugAssignDesugared::Lhs(range)) => range, + + Self::AugAssign(AugAssignDesugared::Rhs(ptr)) => { + let top_mod = map_file_to_mod_impl(db.upcast(), file); + let top_mod_ast = top_mod_ast(db.upcast(), top_mod); + ptr.syntax_node_ptr() + .to_node(top_mod_ast.syntax()) + .text_range() + } + }; + + Span::new(file, range, SpanKind::Original) + } +} + pub(super) use define_lazy_span_node; From 95b2c99ccfbefce6f3c8b713a934ae20fbb09e81 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 19 Apr 2023 17:11:43 +0200 Subject: [PATCH 139/678] Add `DynLazySpan` --- crates/hir/src/hir_def/mod.rs | 9 ++++++++ crates/hir/src/span/expr.rs | 35 +++++++++++++++++++++++++++++++ crates/hir/src/span/mod.rs | 14 +++++++++++++ crates/hir/src/span/transition.rs | 13 ++++++++++++ 4 files changed, 71 insertions(+) diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 43bbe3d95a..e2f3e0f4f6 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -76,6 +76,15 @@ pub enum Partial { Absent, } +impl Partial { + pub fn unwrap(&self) -> &T { + match self { + Self::Present(value) => value, + Self::Absent => panic!("unwrap called on absent value"), + } + } +} + impl Default for Partial { fn default() -> Self { Self::Absent diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 979e1c8ed2..ae559cd313 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -175,3 +175,38 @@ impl ChainInitiator for ExprRoot { ResolvedOrigin::resolve(db, top_mod, origin) } } + +#[cfg(test)] +mod tests { + use crate::{ + hir_def::{Body, Expr, Stmt}, + test_db::TestDb, + }; + use common::Upcast; + + #[test] + fn aug_assign() { + let mut db = TestDb::default(); + + let text = r#" { + fn foo(mut x: i32) { + x += 1 + } + }"#; + + let body: Body = db.expect_item::(text); + let bin_expr = match body.stmts(db.upcast()).values().next().unwrap().unwrap() { + Stmt::Assign(_, rhs) => *rhs, + _ => unreachable!(), + }; + let (lhs, rhs) = match body.exprs(db.upcast())[bin_expr].unwrap() { + Expr::Bin(lhs, rhs, _) => (lhs, rhs), + _ => unreachable!(), + }; + + let top_mod = body.top_mod(db.upcast()); + assert_eq!("x += 1", db.text_at(top_mod, &bin_expr.lazy_span(body))); + assert_eq!("x", db.text_at(top_mod, &lhs.lazy_span(body))); + assert_eq!("1", db.text_at(top_mod, &rhs.lazy_span(body))); + } +} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index a7d2c30aff..ee2eb55052 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -26,6 +26,18 @@ pub mod use_tree; mod transition; +/// This struct represents a dynamic lazy span, which can be converted from all +/// types that implement [`LazySpan`] in this module. We want to avoid `dyn +/// LazySpan` usage because it doesn't implement `Clone` and `Eq` which leads to +/// a lot of difficulties in salsa integration +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct DynLazySpan(SpanTransitionChain); +impl LazySpan for DynLazySpan { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { + self.0.resolve(db) + } +} + /// The trait provides a way to extract [`Span`](common::diagnostics::Span) from /// types which don't have a span information directly, but can be resolved into /// a span lazily. @@ -170,4 +182,6 @@ impl AugAssignDesugared { use transition::define_lazy_span_node; +use self::transition::SpanTransitionChain; + define_lazy_span_node!(LazySpanAtom); diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 05731c2e75..2763449f3f 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -21,6 +21,13 @@ use super::{ type_alias_ast, use_ast, AugAssignDesugared, DesugaredOrigin, HirOrigin, LazySpan, }; +/// This type represents function from the hir origin to another hir origin to +/// identify the span of HIR node. `LazyTransitionFn` is regarded as a closure +/// that takes a `HirOrigin` and [`LazyArg`], `LazyArg` is considered as +/// captured variables. +/// The reason why we use `LazyTransitionFn` instead of `dyn +/// Fn` is that we want to make all types that use `LazyTransitionFn` to be +/// `Clone` and `Eq`. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub(crate) struct LazyTransitionFn { pub(super) f: fn(ResolvedOrigin, LazyArg) -> ResolvedOrigin, @@ -320,6 +327,12 @@ macro_rules! define_lazy_span_node { self.0.resolve(db) } } + + impl From<$name> for crate::span::DynLazySpan { + fn from(val: $name) -> Self { + Self(val.0) + } + } }; } From 269754e03e86b40f795d688f4f0b0827750e3433 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 21 Apr 2023 11:38:04 +0200 Subject: [PATCH 140/678] Bumpup versions of fe-v2 crates --- Cargo.lock | 4 ++-- crates/hir/Cargo.toml | 2 +- crates/parser2/Cargo.toml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7f9e7552c..ff870057ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -904,7 +904,7 @@ dependencies = [ [[package]] name = "fe-hir" -version = "0.20.0-alpha" +version = "0.22.0" dependencies = [ "camino", "cranelift-entity", @@ -968,7 +968,7 @@ dependencies = [ [[package]] name = "fe-parser2" -version = "0.20.0-alpha" +version = "0.22.0" dependencies = [ "derive_more", "dir-test", diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 20ff3d0bc1..cfb4fdac8c 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-hir" -version = "0.20.0-alpha" +version = "0.22.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index b308b10cbe..45a51c124c 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-parser2" -version = "0.20.0-alpha" +version = "0.22.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" @@ -22,4 +22,4 @@ dir-test = "0.1" wasm-bindgen-test = "0.3" [target.'cfg(target_arch = "wasm32")'.dependencies] -wasm-bindgen = "0.2" \ No newline at end of file +wasm-bindgen = "0.2" From baada61555d016276197dde8d63a087a5ba156fb Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 21 Apr 2022 16:45:10 +0200 Subject: [PATCH 141/678] Implement `ScopeGraphBuilder` --- Cargo.lock | 1 + crates/hir/src/hir_def/item.rs | 10 +- crates/hir/src/hir_def/item_tree.rs | 84 -------- crates/hir/src/hir_def/mod.rs | 51 +++-- crates/hir/src/hir_def/params.rs | 2 +- crates/hir/src/hir_def/path.rs | 15 +- crates/hir/src/hir_def/scope_graph.rs | 174 +++++++++++---- crates/hir/src/hir_def/use_tree.rs | 10 +- crates/hir/src/lib.rs | 15 +- crates/hir/src/lower/attr.rs | 6 +- crates/hir/src/lower/body.rs | 6 +- crates/hir/src/lower/item.rs | 76 ++++--- crates/hir/src/lower/mod.rs | 75 +++---- crates/hir/src/lower/params.rs | 14 +- crates/hir/src/lower/path.rs | 22 +- crates/hir/src/lower/scope_builder.rs | 297 ++++++++++++++++++++++++++ crates/hir/src/lower/types.rs | 2 +- crates/hir/src/lower/use_tree.rs | 8 +- 18 files changed, 589 insertions(+), 279 deletions(-) delete mode 100644 crates/hir/src/hir_def/item_tree.rs create mode 100644 crates/hir/src/lower/scope_builder.rs diff --git a/Cargo.lock b/Cargo.lock index 9fe5b28782..15126a93fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -909,6 +909,7 @@ dependencies = [ "camino", "cranelift-entity", "derive_more", + "either", "fe-common2", "fe-parser2", "num-bigint", diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 651de6d09e..cc0c0de676 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -21,8 +21,8 @@ use crate::{ }; use super::{ - module_tree_impl, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, ItemTree, - ModuleTree, Partial, TypeId, WhereClauseId, + module_tree_impl, scope_graph::ScopeGraph, AttrListId, Body, FnParamListId, GenericParamListId, + IdentId, ModuleTree, Partial, TypeId, WhereClauseId, }; #[derive( @@ -58,7 +58,7 @@ pub enum ItemKind { #[salsa::tracked] pub struct TopLevelMod { // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument - // of `module_item_tree`. + // of `module_scope_graph`. pub name: IdentId, pub(crate) ingot: InputIngot, @@ -69,8 +69,8 @@ impl TopLevelMod { LazyTopLevelModSpan::new(self) } - pub fn module_item_tree(self, db: &dyn HirDb) -> &ItemTree { - lower::item_tree_impl(db, self) + pub fn module_scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { + lower::scope_graph_impl(db, self) } pub fn ingot_module_tree(self, db: &dyn HirDb) -> &ModuleTree { diff --git a/crates/hir/src/hir_def/item_tree.rs b/crates/hir/src/hir_def/item_tree.rs deleted file mode 100644 index 2751284c2e..0000000000 --- a/crates/hir/src/hir_def/item_tree.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use crate::hir_def::TopLevelMod; - -use super::ItemKind; - -/// This tree represents the item hierarchy inside a file. -/// The root node of the tree is the top level module, which corresponds to the -/// `module_tree::TopLevelModule`. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ItemTree { - pub top_mod: TopLevelMod, - pub(crate) item_tree: BTreeMap, -} - -impl ItemTree { - /// Returns the depth-first iterator of the item tree. - pub fn dfs(&self) -> impl Iterator + '_ { - let mut stack = vec![self.top_mod.into()]; - std::iter::from_fn(move || { - let item = stack.pop()?; - stack.extend(self.item_tree[&item].children.iter().rev()); - Some(item) - }) - } - - /// Returns the parent of the item. - pub fn parent(&self, item: ItemKind) -> Option { - self.item_tree[&item].parent - } - - /// Returns the children of the item. - pub fn children(&self, item: impl Into) -> impl Iterator + '_ { - self.item_tree[&item.into()].children.iter().copied() - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct ItemTreeNode { - pub(crate) parent: Option, - pub(crate) children: BTreeSet, -} - -#[cfg(test)] -mod tests { - - use crate::{hir_def::ItemKind, test_db::TestDb}; - - #[test] - fn item_tree() { - let mut db = TestDb::default(); - - let text = r#" - mod foo { - fn bar() {} - extern { - fn baz() - } - } - - enum MyEnum {} - - mod baz { - struct MyS {} - } - "#; - - let item_tree = db.parse_source(text); - let top_mod = item_tree.top_mod; - assert_eq!(item_tree.dfs().count(), 8); - - let inner_items: Vec<_> = item_tree.children(top_mod).collect(); - assert!(matches!(inner_items[0], ItemKind::Mod(_))); - assert!(matches!(inner_items[1], ItemKind::Mod(_))); - assert!(matches!(inner_items[2], ItemKind::Enum(_))); - - let foo_children: Vec<_> = item_tree.children(inner_items[0]).collect(); - assert!(matches!(foo_children[0], ItemKind::Func(_))); - assert!(matches!(foo_children[1], ItemKind::Func(_))); - - let baz_children: Vec<_> = item_tree.children(inner_items[1]).collect(); - assert!(matches!(baz_children[0], ItemKind::Struct(_))); - } -} diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 295250b6f8..8b9c10681b 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -10,7 +10,6 @@ pub mod stmt; pub mod types; pub mod use_tree; -pub(crate) mod item_tree; pub(crate) mod module_tree; pub use attr::*; @@ -25,7 +24,6 @@ pub use stmt::*; pub use types::*; pub use use_tree::*; -pub use item_tree::*; pub use module_tree::*; use crate::HirDb; @@ -34,11 +32,38 @@ use crate::HirDb; pub struct IdentId { data: String, } +// TODO: Keyword should be prefilled in the database. +// ref: https://github.com/salsa-rs/salsa/pull/440 impl IdentId { - pub fn is_self(&self, db: &dyn HirDb) -> bool { - // TODO: Keyword should be prefilled in the database. - // ref: https://github.com/salsa-rs/salsa/pull/440 - self.data(db) == "self" + pub fn is_super(self, db: &dyn HirDb) -> bool { + self == Self::super_kw(db) + } + + pub fn is_ingot(self, db: &dyn HirDb) -> bool { + self == Self::ingot_kw(db) + } + + pub fn is_self(self, db: &dyn HirDb) -> bool { + self == Self::self_kw(db) + } + + pub fn is_self_ty(self, db: &dyn HirDb) -> bool { + self == Self::self_ty_kw(db) + } + pub fn super_kw(db: &dyn HirDb) -> Self { + IdentId::new(db, "super".to_string()) + } + + pub fn ingot_kw(db: &dyn HirDb) -> Self { + IdentId::new(db, "ingot".to_string()) + } + + pub fn self_kw(db: &dyn HirDb) -> Self { + IdentId::new(db, "self".to_string()) + } + + pub fn self_ty_kw(db: &dyn HirDb) -> Self { + IdentId::new(db, "Self".to_string()) } } @@ -88,7 +113,10 @@ impl Partial { } pub fn to_opt(self) -> Option { - self.into() + match self { + Self::Present(value) => Some(value), + Self::Absent => None, + } } } @@ -108,11 +136,8 @@ impl From> for Partial { } } -impl Into> for Partial { - fn into(self) -> Option { - match self { - Self::Present(value) => Some(value), - Self::Absent => None, - } +impl From> for Option { + fn from(value: Partial) -> Option { + value.to_opt() } } diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index b8a6736efa..173e3f8414 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,4 +1,4 @@ -use crate::{hir_def::TypeId, HirDb}; +use crate::hir_def::TypeId; use super::{Body, IdentId, Partial, PathId}; diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index 718835ac0e..bdf7232e44 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -4,18 +4,5 @@ use super::IdentId; #[salsa::interned] pub struct PathId { - segments: Vec>, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum PathSegment { - /// `ingot`. - Ingot, - /// `super`. - Super, - /// `Self` segment. - SelfTy, - /// `self` segment. - Self_, - Ident(IdentId), + segments: Vec>, } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 6cebf18142..7562f1855f 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,12 +1,73 @@ use cranelift_entity::{entity_impl, PrimaryMap}; -use either::Either; +use rustc_hash::{FxHashMap, FxHashSet}; -use super::{IdentId, ItemKind, PathId, TopLevelMod, TraitRef, TypeId, UseAlias}; +use super::{IdentId, ItemKind, TopLevelMod, Use}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { pub top_mod: TopLevelMod, pub scopes: PrimaryMap, + pub item_map: FxHashMap, + pub unresolved_imports: FxHashMap>, +} + +impl ScopeGraph { + pub fn items_dfs(&self) -> impl Iterator + '_ { + ScopeGraphItemIterDfs { + graph: self, + visited: Default::default(), + stack: vec![self.top_mod.into()], + } + } + + pub fn edges(&self, scope: LocalScopeId) -> &[ScopeEdge] { + &self.scopes[scope].edges + } + + pub fn scope_data(&self, scope: LocalScopeId) -> &LocalScope { + &self.scopes[scope] + } + + pub fn scope_item(&self, scope: LocalScopeId) -> Option { + if let ScopeKind::Item(item) = self.scope_data(scope).kind { + Some(item) + } else { + None + } + } + + pub fn item_scope(&self, item: ItemKind) -> LocalScopeId { + self.item_map[&item] + } +} + +struct ScopeGraphItemIterDfs<'a> { + graph: &'a ScopeGraph, + visited: FxHashSet, + stack: Vec, +} + +impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { + type Item = ItemKind; + + fn next(&mut self) -> Option { + let item = self.stack.pop()?; + self.visited.insert(item); + let scope_id = self.graph.item_scope(item); + + for edge in self.graph.edges(scope_id) { + let ScopeId { top_mod, local_id } = edge.dest; + if top_mod != self.graph.top_mod { + continue; + } + if let Some(item) = self.graph.scope_item(local_id) { + if !self.visited.contains(&item) { + self.stack.push(item); + } + } + } + Some(item) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -35,16 +96,26 @@ pub enum ScopeKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ScopeEdge { - pub dest: Either, + pub dest: ScopeId, pub kind: EdgeKind, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ScopeId { + pub top_mod: TopLevelMod, + pub local_id: LocalScopeId, +} + +impl ScopeId { + pub fn new(top_mod: TopLevelMod, local_id: LocalScopeId) -> Self { + Self { top_mod, local_id } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum EdgeKind { Lex(LexEdge), Mod(ModEdge), - GlobUse(GlobUseEdge), - Use(UseEdge), Type(TypeEdge), Trait(TraitEdge), GenericParam(GenericParamEdge), @@ -67,14 +138,6 @@ impl EdgeKind { EdgeKind::Mod(ident.into()) } - pub fn glob_use(path: PathId, alias: UseAlias) -> Self { - EdgeKind::GlobUse(GlobUseEdge { path, alias }) - } - - pub fn use_(path: PathId, alias: UseAlias) -> Self { - EdgeKind::Use(UseEdge { path, alias }) - } - pub fn type_(ident: IdentId) -> Self { EdgeKind::Type(ident.into()) } @@ -107,8 +170,8 @@ impl EdgeKind { EdgeKind::Ingot(IngotEdge()) } - pub fn self_ty(ty: Either) -> Self { - EdgeKind::SelfTy(ty.into()) + pub fn self_ty() -> Self { + EdgeKind::SelfTy(SelfTyEdge()) } pub fn self_() -> Self { @@ -124,39 +187,25 @@ impl EdgeKind { pub struct LexEdge(); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct ModEdge(IdentId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct GlobUseEdge { - /// `UsePathSegment` are lowered to a normal `Path`. - path: PathId, - alias: UseAlias, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct UseEdge { - /// `UsePathSegment` are lowered to a normal `Path`. - path: PathId, - alias: UseAlias, -} +pub struct ModEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct TypeEdge(IdentId); +pub struct TypeEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct TraitEdge(IdentId); +pub struct TraitEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct ValueEdge(IdentId); +pub struct ValueEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct GenericParamEdge(IdentId); +pub struct GenericParamEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct FieldEdge(IdentId); +pub struct FieldEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct VariantEdge(IdentId); +pub struct VariantEdge(pub IdentId); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct SuperEdge(); @@ -165,9 +214,7 @@ pub struct SuperEdge(); pub struct IngotEdge(); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub struct SelfTyEdge { - ty: Either, -} +pub struct SelfTyEdge(); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] pub struct SelfEdge(); @@ -178,3 +225,52 @@ pub struct AnonEdge(); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct LocalScopeId(u32); entity_impl!(LocalScopeId); + +impl LocalScopeId { + pub(crate) fn root() -> Self { + LocalScopeId(0) + } +} + +#[cfg(test)] +mod tests { + + use crate::{hir_def::ItemKind, test_db::TestDb}; + + #[test] + fn item_tree() { + let mut db = TestDb::default(); + + let text = r#" + mod foo { + fn bar() {} + extern { + fn baz() + } + } + + enum MyEnum {} + + mod baz { + struct MyS {} + } + "#; + + let scope_graph = db.parse_source(text); + assert_eq!(scope_graph.items_dfs().count(), 8); + + for (i, item) in scope_graph.items_dfs().enumerate() { + match i { + 0 => assert!(matches!(item, ItemKind::TopMod(_))), + 1 => assert!(matches!(item, ItemKind::Mod(_))), + 2 => assert!(matches!(item, ItemKind::Struct(_))), + 3 => assert!(matches!(item, ItemKind::Enum(_))), + 4 => assert!(matches!(item, ItemKind::Mod(_))), + 5 => assert!(matches!(item, ItemKind::Func(_))), + 6 => assert!(matches!(item, ItemKind::Func(_))), + 7 => assert!(matches!(item, ItemKind::Body(_))), + _ => unreachable!(), + } + } + } +} diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index b025d47d88..643c8047a9 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -8,26 +8,24 @@ pub struct UseTreeId { /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` /// /// NOTE: If the tree root is started with `{}`, then the `path` is `None`. + #[return_ref] pub path: Vec>, + /// The subtree of the use tree. /// /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. + #[return_ref] pub subtree: Vec, //// The alias of this use tree. /// `Bar` in `Foo as Bar;` + #[return_ref] pub alias: Option>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum UsePathSegment { Ident(IdentId), - /// `ingot`. - Ingot, - /// `super`. - Super, - /// `self`. - Self_, /// `*`. Glob, } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1d9b5c653c..7ba14624b0 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3,8 +3,9 @@ use hir_def::module_tree_impl; pub use lower::parse::ParseDiagnostic; use lower::{ - item_tree_impl, map_file_to_mod_impl, + map_file_to_mod_impl, parse::{parse_file_impl, ParseDiagnosticAccumulator}, + scope_graph_impl, }; pub mod diagnostics; @@ -49,7 +50,7 @@ pub struct Jar( /// thus, can't be accessed from outside of the crate without implementing /// [`LowerHirDb`] marker trait. module_tree_impl, - item_tree_impl, + scope_graph_impl, map_file_to_mod_impl, parse_file_impl, ); @@ -87,8 +88,8 @@ mod test_db { }; use crate::{ - hir_def::{ItemKind, ItemTree, TopLevelMod}, - lower::{item_tree, map_file_to_mod}, + hir_def::{scope_graph::ScopeGraph, ItemKind, TopLevelMod}, + lower::{map_file_to_mod, scope_graph}, span::LazySpan, LowerHirDb, SpannedHirDb, }; @@ -124,10 +125,10 @@ mod test_db { } impl TestDb { - pub fn parse_source(&mut self, text: &str) -> &ItemTree { + pub fn parse_source(&mut self, text: &str) -> &ScopeGraph { let file = self.standalone_file(text); let top_mod = map_file_to_mod(self, file); - item_tree(self, top_mod) + scope_graph(self, top_mod) } /// Parses the given source text and returns the first inner item in the @@ -137,7 +138,7 @@ mod test_db { ItemKind: TryInto, { let tree = self.parse_source(text); - tree.dfs().find_map(|it| it.try_into().ok()).unwrap() + tree.items_dfs().find_map(|it| it.try_into().ok()).unwrap() } pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs index 87b0fe8652..2e4999fd35 100644 --- a/crates/hir/src/lower/attr.rs +++ b/crates/hir/src/lower/attr.rs @@ -9,12 +9,12 @@ impl AttrListId { .into_iter() .map(|attr| Attr::lower_ast(ctxt, attr)) .collect(); - Self::new(ctxt.db, attrs) + Self::new(ctxt.db(), attrs) } pub(super) fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or_else(|| Self::new(ctxt.db, vec![])) + .unwrap_or_else(|| Self::new(ctxt.db(), vec![])) } } @@ -50,7 +50,7 @@ impl DocCommentAttr { .map(|doc| doc.text()[3..].to_string()) .unwrap_or_default(); Self { - text: StringId::new(ctxt.db, text), + text: StringId::new(ctxt.db(), text), } } } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 2fd069f113..78f42d607e 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -92,7 +92,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { } fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, bid: TrackedBodyId) -> Self { - f_ctxt.enter_scope(); + f_ctxt.enter_scope(false); Self { f_ctxt, bid, @@ -106,12 +106,12 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { fn build(self, ast: &ast::Expr) -> Body { let origin = HirOrigin::raw(ast); let body = Body::new( - self.f_ctxt.db, + self.f_ctxt.db(), self.bid, self.stmts, self.exprs, self.pats, - self.f_ctxt.top_mod, + self.f_ctxt.top_mod(), self.source_map, origin, ); diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index ddc80e5f55..4caedd010b 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -67,7 +67,7 @@ impl Mod { parent_id: TrackedItemId, ast: ast::Mod, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(true); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Mod(name).join(parent_id); @@ -78,7 +78,15 @@ impl Mod { } let origin = HirOrigin::raw(&ast); - let mod_ = Self::new(ctxt.db, id, name, attributes, is_pub, ctxt.top_mod, origin); + let mod_ = Self::new( + ctxt.db(), + id, + name, + attributes, + is_pub, + ctxt.top_mod(), + origin, + ); ctxt.leave_scope(mod_) } } @@ -90,7 +98,7 @@ impl Func { ast: ast::Fn, is_extern: bool, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Fn(name).join(parent_id); @@ -114,7 +122,7 @@ impl Func { let origin = HirOrigin::raw(&ast); let fn_ = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, @@ -125,7 +133,7 @@ impl Func { modifier, body, is_extern, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(fn_) @@ -138,7 +146,7 @@ impl Struct { parent_id: TrackedItemId, ast: ast::Struct, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Struct(name).join(parent_id); @@ -151,7 +159,7 @@ impl Struct { let origin = HirOrigin::raw(&ast); let struct_ = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, @@ -159,7 +167,7 @@ impl Struct { generic_params, where_clause, fields, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(struct_) @@ -172,7 +180,7 @@ impl Contract { parent_id: TrackedItemId, ast: ast::Contract, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Contract(name).join(parent_id); @@ -183,13 +191,13 @@ impl Contract { let origin = HirOrigin::raw(&ast); let contract = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, is_pub, fields, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(contract) @@ -202,7 +210,7 @@ impl Enum { parent_id: TrackedItemId, ast: ast::Enum, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Enum(name).join(parent_id); @@ -215,7 +223,7 @@ impl Enum { let origin = HirOrigin::raw(&ast); let enum_ = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, @@ -223,7 +231,7 @@ impl Enum { generic_params, where_clause, variants, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(enum_) @@ -236,7 +244,7 @@ impl TypeAlias { parent_id: TrackedItemId, ast: ast::TypeAlias, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.alias()); let id = TrackedItemId::TypeAlias(name).join(parent_id); @@ -249,7 +257,7 @@ impl TypeAlias { let origin = HirOrigin::raw(&ast); let alias = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, @@ -257,7 +265,7 @@ impl TypeAlias { generic_params, where_clause, ty, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(alias) @@ -270,7 +278,7 @@ impl Impl { parent_id: TrackedItemId, ast: ast::Impl, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let id = TrackedItemId::Impl(ty).join(parent_id); @@ -287,13 +295,13 @@ impl Impl { } let impl_ = Self::new( - ctxt.db, + ctxt.db(), id, ty, attributes, generic_params, where_clause, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(impl_) @@ -306,7 +314,7 @@ impl Trait { parent_id: TrackedItemId, ast: ast::Trait, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Trait(name).join(parent_id); @@ -324,14 +332,14 @@ impl Trait { } let trait_ = Self::new( - ctxt.db, + ctxt.db(), id, name, attributes, is_pub, generic_params, where_clause, - ctxt.top_mod, + ctxt.top_mod(), origin, ); @@ -345,7 +353,7 @@ impl ImplTrait { parent_id: TrackedItemId, ast: ast::ImplTrait, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let trait_ref = TraitRef::lower_ast_partial(ctxt, ast.trait_ref()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); @@ -363,14 +371,14 @@ impl ImplTrait { } let impl_trait = Self::new( - ctxt.db, + ctxt.db(), id, trait_ref, ty, attributes, generic_params, where_clause, - ctxt.top_mod, + ctxt.top_mod(), origin, ); ctxt.leave_scope(impl_trait) @@ -383,7 +391,7 @@ impl Const { parent_id: TrackedItemId, ast: ast::Const, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Const(name).join(parent_id); @@ -393,7 +401,7 @@ impl Const { .into(); let origin = HirOrigin::raw(&ast); - let const_ = Self::new(ctxt.db, id, name, body, ctxt.top_mod, origin); + let const_ = Self::new(ctxt.db(), id, name, body, ctxt.top_mod(), origin); ctxt.leave_scope(const_) } } @@ -404,13 +412,13 @@ impl Use { parent_id: TrackedItemId, ast: ast::Use, ) -> Self { - ctxt.enter_scope(); + ctxt.enter_scope(false); let tree = UseTreeId::lower_ast_partial(ctxt, ast.use_tree()); let id = TrackedItemId::Use(tree).join(parent_id); let origin = HirOrigin::raw(&ast); - let use_ = Self::new(ctxt.db, id, tree, ctxt.top_mod, origin); + let use_ = Self::new(ctxt.db(), id, tree, ctxt.top_mod(), origin); ctxt.leave_scope(use_) } } @@ -436,12 +444,12 @@ impl RecordFieldListId { .into_iter() .map(|field| RecordField::lower_ast(ctxt, field)) .collect(); - Self::new(ctxt.db, fields) + Self::new(ctxt.db(), fields) } fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or(Self::new(ctxt.db, Vec::new())) + .unwrap_or(Self::new(ctxt.db(), Vec::new())) } } @@ -461,12 +469,12 @@ impl EnumVariantListId { .into_iter() .map(|variant| EnumVariant::lower_ast(ctxt, variant)) .collect(); - Self::new(ctxt.db, variants) + Self::new(ctxt.db(), variants) } fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or(Self::new(ctxt.db, Vec::new())) + .unwrap_or(Self::new(ctxt.db(), Vec::new())) } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index ac37b2bb5b..ef289d7f13 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -1,5 +1,3 @@ -use std::collections::{BTreeMap, BTreeSet}; - use common::{InputFile, InputIngot}; use num_bigint::BigUint; use num_traits::Num; @@ -10,7 +8,7 @@ use parser::{ use crate::{ hir_def::{ - module_tree_impl, IdentId, IntegerId, ItemKind, ItemTree, ItemTreeNode, LitKind, + module_tree_impl, scope_graph::ScopeGraph, IdentId, IntegerId, ItemKind, LitKind, ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, }, HirDb, LowerHirDb, ParseDiagnostic, @@ -19,6 +17,7 @@ use crate::{ use self::{ item::lower_module_items, parse::{parse_file_impl, ParseDiagnosticAccumulator}, + scope_builder::ScopeGraphBuilder, }; pub(crate) mod parse; @@ -44,8 +43,8 @@ pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { } /// Returns the item tree of the given top-level module. -pub fn item_tree(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ItemTree { - item_tree_impl(db.upcast(), top_mod) +pub fn scope_graph(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ScopeGraph { + scope_graph_impl(db.upcast(), top_mod) } /// Returns the root node of the given top-level module. @@ -81,11 +80,10 @@ pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelM } #[salsa::tracked(return_ref)] -pub(crate) fn item_tree_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ItemTree { +pub(crate) fn scope_graph_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ScopeGraph { let ast = top_mod_ast(db, top_mod); - let mut ctxt = FileLowerCtxt::new(db, top_mod); + let mut ctxt = FileLowerCtxt::enter_top_mod(db, top_mod); - ctxt.enter_scope(); let id = TrackedItemId::TopLevelMod(top_mod.name(db)); if let Some(items) = ast.items() { lower_module_items(&mut ctxt, id, items); @@ -102,32 +100,31 @@ pub(crate) fn top_mod_ast(db: &dyn HirDb, top_mod: TopLevelMod) -> ast::Root { } pub struct FileLowerCtxt<'db> { - db: &'db dyn HirDb, - scope_stack: Vec>, - item_tree: BTreeMap, - top_mod: TopLevelMod, + builder: ScopeGraphBuilder<'db>, } impl<'db> FileLowerCtxt<'db> { - pub(super) fn new(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { + pub(super) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { Self { - db, - scope_stack: vec![], - item_tree: BTreeMap::new(), - top_mod, + builder: ScopeGraphBuilder::enter_top_mod(db, top_mod), } } - pub(super) fn build(self) -> ItemTree { - ItemTree { - top_mod: self.top_mod, - item_tree: self.item_tree, - } + pub(super) fn build(self) -> ScopeGraph { + self.builder.build() + } + + pub(super) fn db(&self) -> &'db dyn HirDb { + self.builder.db + } + + pub(super) fn top_mod(&self) -> TopLevelMod { + self.builder.top_mod } /// Creates a new scope for an item. - fn enter_scope(&mut self) { - self.scope_stack.push(BTreeSet::default()); + fn enter_scope(&mut self, is_mod: bool) { + self.builder.enter_scope(is_mod); } /// Leaves the current scope, `item` should be the generated item which owns @@ -136,31 +133,14 @@ impl<'db> FileLowerCtxt<'db> { where I: Into + Copy, { - let item_kind = item.into(); - let item_scope = self.scope_stack.pop().unwrap(); - - for item in &item_scope { - self.item_tree.get_mut(item).unwrap().parent = Some(item_kind); - } - - self.item_tree.insert( - item_kind, - ItemTreeNode { - parent: None, - children: item_scope, - }, - ); - - if !matches!(item_kind, ItemKind::TopMod(_)) { - self.scope_stack.last_mut().unwrap().insert(item.into()); - } + self.builder.leave_scope(item.into()); item } } impl IdentId { fn lower_token(ctxt: &mut FileLowerCtxt<'_>, token: SyntaxToken) -> Self { - Self::new(ctxt.db, token.text().to_string()) + Self::new(ctxt.db(), token.text().to_string()) } fn lower_token_partial( @@ -177,7 +157,10 @@ impl LitKind { ast::LitKind::Int(int) => Self::Int(IntegerId::lower_ast(ctxt, int)), ast::LitKind::String(string) => { let text = string.token().text(); - Self::String(StringId::new(ctxt.db, text[1..text.len() - 1].to_string())) + Self::String(StringId::new( + ctxt.db(), + text[1..text.len() - 1].to_string(), + )) } ast::LitKind::Bool(bool) => match bool.token().text() { "true" => Self::Bool(true), @@ -193,7 +176,7 @@ impl IntegerId { let text = ast.token().text(); // Parser ensures that the text is valid pair with a radix and a number. if text.len() < 2 { - return Self::new(ctxt.db, BigUint::from_str_radix(text, 10).unwrap()); + return Self::new(ctxt.db(), BigUint::from_str_radix(text, 10).unwrap()); } let int = match &text[0..2] { @@ -203,6 +186,6 @@ impl IntegerId { _ => BigUint::from_str_radix(text, 10).unwrap(), }; - Self::new(ctxt.db, int) + Self::new(ctxt.db(), int) } } diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 3064f99c0b..4e63cf2bf0 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -10,7 +10,7 @@ impl GenericArgListId { .into_iter() .map(|arg| GenericArg::lower_ast(ctxt, arg)) .collect(); - Self::new(ctxt.db, args) + Self::new(ctxt.db(), args) } pub(super) fn lower_ast_opt( @@ -18,7 +18,7 @@ impl GenericArgListId { ast: Option, ) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) + .unwrap_or_else(|| Self::new(ctxt.db(), Vec::new())) } } @@ -28,7 +28,7 @@ impl GenericParamListId { .into_iter() .map(|param| GenericParam::lower_ast(ctxt, param)) .collect(); - Self::new(ctxt.db, params) + Self::new(ctxt.db(), params) } pub(super) fn lower_ast_opt( @@ -36,7 +36,7 @@ impl GenericParamListId { ast: Option, ) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) + .unwrap_or_else(|| Self::new(ctxt.db(), Vec::new())) } } @@ -46,7 +46,7 @@ impl FnParamListId { .into_iter() .map(|param| FnParam::lower_ast(ctxt, param)) .collect(); - Self::new(ctxt.db, params) + Self::new(ctxt.db(), params) } } @@ -56,7 +56,7 @@ impl WhereClauseId { .into_iter() .map(|pred| WherePredicate::lower_ast(ctxt, pred)) .collect(); - Self::new(ctxt.db, predicates) + Self::new(ctxt.db(), predicates) } pub(super) fn lower_ast_opt( @@ -64,7 +64,7 @@ impl WhereClauseId { ast: Option, ) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) - .unwrap_or_else(|| Self::new(ctxt.db, Vec::new())) + .unwrap_or_else(|| Self::new(ctxt.db(), Vec::new())) } } diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index 6d20ab9b76..f32b990752 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,6 +1,6 @@ use parser::{ast, SyntaxToken}; -use crate::hir_def::{IdentId, Partial, PathId, PathSegment}; +use crate::hir_def::{IdentId, Partial, PathId}; use super::FileLowerCtxt; @@ -9,20 +9,18 @@ impl PathId { let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = match seg.kind() { - Some(ast::PathSegmentKind::Ingot(_)) => Some(PathSegment::Ingot), - Some(ast::PathSegmentKind::Super(_)) => Some(PathSegment::Super), - Some(ast::PathSegmentKind::SelfTy(_)) => Some(PathSegment::SelfTy), - Some(ast::PathSegmentKind::Self_(_)) => Some(PathSegment::Self_), - Some(ast::PathSegmentKind::Ident(ident)) => { - Some(PathSegment::Ident(IdentId::lower_token(ctxt, ident))) - } + Some(ast::PathSegmentKind::Ingot(_)) => Some(IdentId::ingot_kw(ctxt.db())), + Some(ast::PathSegmentKind::Super(_)) => Some(IdentId::super_kw(ctxt.db())), + Some(ast::PathSegmentKind::SelfTy(_)) => Some(IdentId::self_ty_kw(ctxt.db())), + Some(ast::PathSegmentKind::Self_(_)) => Some(IdentId::self_kw(ctxt.db())), + Some(ast::PathSegmentKind::Ident(ident)) => Some(IdentId::lower_token(ctxt, ident)), None => None, } .into(); segments.push(segment); } - Self::new(ctxt.db, segments) + Self::new(ctxt.db(), segments) } pub(super) fn lower_ast_partial( @@ -33,8 +31,8 @@ impl PathId { } pub(super) fn from_ident(ctxt: &mut FileLowerCtxt<'_>, ast: SyntaxToken) -> Self { - let ident_id = IdentId::new(ctxt.db, ast.text().to_string()); - let seg = vec![Partial::Present(PathSegment::Ident(ident_id))]; - Self::new(ctxt.db, seg) + let ident_id = IdentId::new(ctxt.db(), ast.text().to_string()); + let seg = vec![Partial::Present(ident_id)]; + Self::new(ctxt.db(), seg) } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs new file mode 100644 index 0000000000..44aafead0e --- /dev/null +++ b/crates/hir/src/lower/scope_builder.rs @@ -0,0 +1,297 @@ +use crate::{ + hir_def::{ + scope_graph::{ + EdgeKind, LocalScope, LocalScopeId, ScopeEdge, ScopeGraph, ScopeId, ScopeKind, + }, + EnumVariantListId, FnParamListId, FnParamName, GenericParamListId, ItemKind, + RecordFieldListId, TopLevelMod, + }, + HirDb, +}; + +pub struct ScopeGraphBuilder<'db> { + pub(super) db: &'db dyn HirDb, + pub(super) top_mod: TopLevelMod, + graph: ScopeGraph, + scope_stack: Vec, + module_stack: Vec, +} + +impl<'db> ScopeGraphBuilder<'db> { + pub(crate) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { + let mut builder = Self { + db, + top_mod, + graph: ScopeGraph { + top_mod, + scopes: Default::default(), + item_map: Default::default(), + unresolved_imports: Default::default(), + }, + scope_stack: Default::default(), + module_stack: Default::default(), + }; + + builder.enter_scope(true); + builder + } + + pub fn build(self) -> ScopeGraph { + debug_assert!(matches!( + self.graph.scope_item(LocalScopeId::root()), + Some(ItemKind::TopMod(_)) + )); + + self.graph + } + + pub fn enter_scope(&mut self, is_mod: bool) { + // Create dummy scope, the scope kind is initialized in `leave_scope`. + let id = self.graph.scopes.push(self.dummy_scope()); + self.scope_stack.push(id); + if is_mod { + self.module_stack.push(id); + } + } + + pub fn leave_scope(&mut self, item: ItemKind) { + use ItemKind::*; + + let item_scope = self.scope_stack.pop().unwrap(); + self.graph.scopes[item_scope].kind = ScopeKind::Item(item); + self.graph.item_map.insert(item, item_scope); + + if let ItemKind::TopMod(top_mod) = item { + debug_assert!(self.scope_stack.is_empty()); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); + + self.add_global_edge(item_scope, top_mod.ingot_root(self.db), EdgeKind::ingot()); + for child in top_mod.children(self.db) { + let child_name = child.name(self.db); + let edge = EdgeKind::mod_(child_name); + self.add_global_edge(item_scope, child, edge) + } + + if let Some(parent) = top_mod.parent(self.db) { + let parent_edge = EdgeKind::super_(); + self.add_global_edge(item_scope, parent, parent_edge); + } + self.module_stack.pop().unwrap(); + + return; + } + + let parent_scope = *self.scope_stack.last().unwrap(); + let parent_to_child_edge = match item { + Mod(inner) => { + self.add_local_edge( + item_scope, + *self.module_stack.last().unwrap(), + EdgeKind::super_(), + ); + self.add_global_edge( + item_scope, + self.top_mod.ingot_root(self.db), + EdgeKind::ingot(), + ); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); + + self.module_stack.pop().unwrap(); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::mod_) + .unwrap_or_else(EdgeKind::anon) + } + + Func(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + if let Some(params) = inner.params(self.db).to_opt() { + self.add_func_param_scope(item_scope, params); + } + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) + } + + Struct(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_field_scope(item_scope, inner.fields(self.db)); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Contract(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_field_scope(item_scope, inner.fields(self.db)); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Enum(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_variant_scope(item_scope, inner.variants(self.db)); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + TypeAlias(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Impl(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + EdgeKind::anon() + } + + Trait(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::trait_) + .unwrap_or_else(EdgeKind::anon) + } + + ImplTrait(inner) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + EdgeKind::anon() + } + + Const(c) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + c.name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) + } + + Use(use_) => { + self.graph + .unresolved_imports + .entry(parent_scope) + .or_default() + .push(use_); + + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + EdgeKind::anon() + } + + Body(_) => { + self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + EdgeKind::anon() + } + + _ => unreachable!(), + }; + + self.add_local_edge(parent_scope, item_scope, parent_to_child_edge); + } + + fn add_field_scope(&mut self, current_scope: LocalScopeId, fields: RecordFieldListId) { + for (i, field) in fields.data(self.db).iter().enumerate() { + let scope = LocalScope::new(ScopeKind::Field(i)); + let field_scope = self.graph.scopes.push(scope); + self.add_local_edge(field_scope, current_scope, EdgeKind::lex()); + let kind = field + .name + .to_opt() + .map(EdgeKind::field) + .unwrap_or_else(EdgeKind::anon); + self.add_local_edge(current_scope, field_scope, kind) + } + } + + fn add_variant_scope(&mut self, current_scope: LocalScopeId, variants: EnumVariantListId) { + for (i, field) in variants.data(self.db).iter().enumerate() { + let scope = LocalScope::new(ScopeKind::Variant(i)); + let variant_scope = self.graph.scopes.push(scope); + self.add_local_edge(variant_scope, current_scope, EdgeKind::lex()); + let kind = field + .name + .to_opt() + .map(EdgeKind::variant) + .unwrap_or_else(EdgeKind::anon); + self.add_local_edge(current_scope, variant_scope, kind) + } + } + + fn add_func_param_scope(&mut self, current_scope: LocalScopeId, params: FnParamListId) { + for (i, param) in params.data(self.db).iter().enumerate() { + let scope = LocalScope::new(ScopeKind::FnParam(i)); + let generic_param_scope = self.graph.scopes.push(scope); + self.add_local_edge(generic_param_scope, current_scope, EdgeKind::lex()); + let kind = param + .name + .to_opt() + .map(|name| match name { + FnParamName::Self_ => EdgeKind::self_(), + FnParamName::Ident(ident) => EdgeKind::value(ident), + FnParamName::Underscore => EdgeKind::anon(), + }) + .unwrap_or_else(EdgeKind::anon); + self.add_local_edge(current_scope, generic_param_scope, kind) + } + } + + fn add_generic_param_scope(&mut self, current_scope: LocalScopeId, params: GenericParamListId) { + for (i, param) in params.data(self.db).iter().enumerate() { + let scope = LocalScope::new(ScopeKind::GenericParam(i)); + let generic_param_scope = self.graph.scopes.push(scope); + self.add_local_edge(generic_param_scope, current_scope, EdgeKind::lex()); + let kind = param + .name() + .to_opt() + .map(EdgeKind::generic_param) + .unwrap_or_else(EdgeKind::anon); + self.add_local_edge(current_scope, generic_param_scope, kind) + } + } + + fn dummy_scope(&self) -> LocalScope { + LocalScope { + kind: ScopeKind::Item(self.top_mod.into()), + edges: Vec::new(), + } + } + + fn add_local_edge(&mut self, source: LocalScopeId, dest: LocalScopeId, kind: EdgeKind) { + self.graph.scopes[source].edges.push(ScopeEdge { + dest: ScopeId::new(self.top_mod, dest), + kind, + }); + } + + fn add_global_edge(&mut self, source: LocalScopeId, dest: TopLevelMod, kind: EdgeKind) { + self.graph.scopes[source].edges.push(ScopeEdge { + dest: ScopeId::new(dest, LocalScopeId::root()), + kind, + }); + } +} diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index a85af01dcc..3d85c25733 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -38,7 +38,7 @@ impl TypeId { } }; - TypeId::new(ctxt.db, kind) + TypeId::new(ctxt.db(), kind) } pub(super) fn lower_ast_partial( diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 98f4b7a1f1..7d0ac3d66f 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -25,7 +25,7 @@ impl UseTreeId { .alias() .map(|ast| UseAlias::lower_ast_partial(ctxt, ast)); - Self::new(ctxt.db, path, subtree, alias) + Self::new(ctxt.db(), path, subtree, alias) } pub(super) fn lower_ast_partial( @@ -43,12 +43,12 @@ impl UsePathSegment { ) -> Partial { ast.kind() .map(|kind| match kind { - ast::UsePathSegmentKind::Ingot(_) => Self::Ingot, - ast::UsePathSegmentKind::Super(_) => Self::Super, + ast::UsePathSegmentKind::Ingot(_) => Self::Ident(IdentId::ingot_kw(ctxt.db())), + ast::UsePathSegmentKind::Super(_) => Self::Ident(IdentId::super_kw(ctxt.db())), ast::UsePathSegmentKind::Ident(ident) => { Self::Ident(IdentId::lower_token(ctxt, ident)) } - ast::UsePathSegmentKind::Self_(_) => Self::Self_, + ast::UsePathSegmentKind::Self_(_) => Self::Ident(IdentId::self_kw(ctxt.db())), ast::UsePathSegmentKind::Glob(_) => Self::Glob, }) .into() From bbb8d5b56edc4094ba0d46c0310c7ded0947548d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 26 Apr 2023 15:06:20 +0200 Subject: [PATCH 142/678] Improve error messages in use tree parsing --- crates/parser2/src/parser/use_tree.rs | 26 ++++---- .../test_files/error_recovery/items/use_.fe | 3 + .../test_files/error_recovery/items/use_.snap | 66 +++++++++++++++++++ 3 files changed, 80 insertions(+), 15 deletions(-) create mode 100644 crates/parser2/test_files/error_recovery/items/use_.fe create mode 100644 crates/parser2/test_files/error_recovery/items/use_.snap diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index 82c4e1839f..3bb8cc82c5 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -19,11 +19,11 @@ impl super::Parse for UseTreeScope { let use_path_scope = UsePathScope::default(); parser.parse(use_path_scope.clone(), None); - let has_wildcard = use_path_scope.has_wildcard.get(); + let is_glob = use_path_scope.is_glob.get(); if parser.current_kind() == Some(SyntaxKind::AsKw) { - if has_wildcard { - parser.error_and_recover("cant use `as` with wildcard", None); + if is_glob { + parser.error_and_recover("can't use `as` with `*`", None); } if parser.current_kind() == Some(SyntaxKind::AsKw) { parser.parse(UseTreeRenameScope::default(), None); @@ -35,15 +35,11 @@ impl super::Parse for UseTreeScope { return; } match parser.current_kind() { - Some(SyntaxKind::LBrace) if !has_wildcard => { - if has_wildcard { - parser.error_and_recover("can't use `*` with `{}`", None); - } else { - parser.parse(UseTreeListScope::default(), None); - } + Some(SyntaxKind::LBrace) if !is_glob => { + parser.parse(UseTreeListScope::default(), None); } _ => { - parser.error_and_recover("expected identifier, `*` or `self`", None); + parser.error_and_recover("can't use `*` with `{}`", None); } }; } @@ -78,7 +74,7 @@ impl super::Parse for UseTreeListScope { } define_scope! { - UsePathScope{ has_wildcard: Rc>}, + UsePathScope{ is_glob: Rc>}, UsePath, Inheritance(Colon2) } @@ -93,13 +89,13 @@ impl super::Parse for UsePathScope { && parser.parse(UsePathSegmentScope::default(), None).0 }); if is_path_segment { + if self.is_glob.get() { + parser.error_and_recover("can't specify path after `*`", None); + } parser.bump_expected(SyntaxKind::Colon2); - self.has_wildcard + self.is_glob .set(parser.current_kind() == Some(SyntaxKind::Star)); parser.parse(UsePathSegmentScope::default(), None); - if self.has_wildcard.get() { - break; - } } else { break; } diff --git a/crates/parser2/test_files/error_recovery/items/use_.fe b/crates/parser2/test_files/error_recovery/items/use_.fe new file mode 100644 index 0000000000..eb4678b476 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/use_.fe @@ -0,0 +1,3 @@ +use foo::bar::*::A +use foo::bar::*::{A, B} +use foo::bar::* as B \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/use_.snap b/crates/parser2/test_files/error_recovery/items/use_.snap new file mode 100644 index 0000000000..5c3a51ab9b --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/use_.snap @@ -0,0 +1,66 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/use_.fe +--- +Root@0..63 + ItemList@0..63 + Use@0..18 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..18 + UsePath@4..18 + UsePathSegment@4..7 + Ident@4..7 "foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "bar" + Colon2@12..14 "::" + UsePathSegment@14..15 + Star@14..15 "*" + Error@15..15 + Colon2@15..17 "::" + UsePathSegment@17..18 + Ident@17..18 "A" + Newline@18..19 "\n" + Use@19..42 + UseKw@19..22 "use" + WhiteSpace@22..23 " " + UseTree@23..42 + UsePath@23..34 + UsePathSegment@23..26 + Ident@23..26 "foo" + Colon2@26..28 "::" + UsePathSegment@28..31 + Ident@28..31 "bar" + Colon2@31..33 "::" + UsePathSegment@33..34 + Star@33..34 "*" + Colon2@34..36 "::" + Error@36..42 + LBrace@36..37 "{" + Ident@37..38 "A" + Comma@38..39 "," + WhiteSpace@39..40 " " + Ident@40..41 "B" + RBrace@41..42 "}" + Newline@42..43 "\n" + Use@43..63 + UseKw@43..46 "use" + WhiteSpace@46..47 " " + UseTree@47..63 + UsePath@47..58 + UsePathSegment@47..50 + Ident@47..50 "foo" + Colon2@50..52 "::" + UsePathSegment@52..55 + Ident@52..55 "bar" + Colon2@55..57 "::" + UsePathSegment@57..58 + Star@57..58 "*" + WhiteSpace@58..59 " " + Error@59..63 + AsKw@59..61 "as" + WhiteSpace@61..62 " " + Ident@62..63 "B" + From 8b20864c6815aac4cc064c1d3394cc7e4cc935f3 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 26 Apr 2023 21:21:59 +0200 Subject: [PATCH 143/678] Implement `NameResolver` --- Cargo.lock | 5 +- crates/hir-analysis/Cargo.toml | 4 + crates/hir-analysis/src/lib.rs | 10 +- .../hir-analysis/src/name_resolution/mod.rs | 396 ++++++++++++++++++ crates/hir/src/hir_def/item.rs | 46 +- crates/hir/src/hir_def/scope_graph.rs | 34 +- crates/hir/src/lower/item.rs | 46 +- crates/hir/src/lower/scope_builder.rs | 258 ++++++++---- crates/parser2/src/ast/item.rs | 2 + 9 files changed, 682 insertions(+), 119 deletions(-) create mode 100644 crates/hir-analysis/src/name_resolution/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 15126a93fb..668ab9b79c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -909,7 +909,6 @@ dependencies = [ "camino", "cranelift-entity", "derive_more", - "either", "fe-common2", "fe-parser2", "num-bigint", @@ -923,9 +922,13 @@ dependencies = [ name = "fe-hir-analysis" version = "0.20.0-alpha" dependencies = [ + "cranelift-entity", + "either", "fe-common", "fe-hir", + "rustc-hash", "salsa-2022", + "smallvec", ] [[package]] diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 300396de7d..0c15aa07d5 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -9,6 +9,10 @@ description = "Provides HIR definition and lowering for Fe lang" [dependencies] salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +smallvec = "1.10" +cranelift-entity = "0.91" +rustc-hash = "1.1.0" +either = "1.8" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common", package = "fe-common" } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 00a4d737fd..feee0eb92d 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -1,8 +1,16 @@ use common::db::Upcast; -use hir::HirDb; +use hir::{span::DynLazySpan, HirDb}; #[salsa::jar(db = HirAnalysisDb)] pub struct Jar(); pub trait HirAnalysisDb: salsa::DbWithJar + Upcast {} impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + Upcast {} + +pub mod name_resolution; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Spanned { + pub data: T, + pub span: DynLazySpan, +} diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs new file mode 100644 index 0000000000..0ba7188df4 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -0,0 +1,396 @@ +use either::Either; +use hir::{ + hir_def::{ + scope_graph::{ + AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, + ScopeEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, + VariantEdge, + }, + IdentId, Visibility, + }, + span::DynLazySpan, +}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{HirAnalysisDb, Spanned}; + +pub mod import_resolver; +pub mod vis_checker; + +pub struct NameResolver<'db, 'a> { + db: &'db dyn HirAnalysisDb, + importer: &'a dyn Importer, + cache_store: ResolvedQueryCacheStore, +} + +impl<'db, 'a> NameResolver<'db, 'a> { + pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { + // If the query is already resolved, return the cached result. + if let Some(answer) = self.cache_store.get(scope, query) { + return answer.clone(); + }; + + // The shadowing rule is `$ = NamedImports > GlobImports > Lex`, where `$` means + // current scope. This ordering means that greater scope shadows lower + // scopes having the same name in the same domain and + + // 1. Look for the name in the current scope and named imports. + let mut results = Vec::new(); + let mut found_scopes = FxHashSet::default(); + let mut parent = None; + for edge in self.edges(scope) { + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(QueryAnswer::new(edge.dest, edge.vis, None)); + } + } + + PropagatedQuery::Continuation => { + debug_assert!(parent.is_none()); + parent = Some(edge.dest); + } + + PropagatedQuery::UnPropagated => {} + } + } + + for named_import in self.importer.named_imports(scope) { + let edge = &named_import.data; + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(QueryAnswer::new( + edge.dest, + edge.vis, + Some(named_import.span.clone()), + )); + } + } + PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} + } + } + + // If the name is found in the current scope or named imports, we don't need to + // look for it further. + if !results.is_empty() { + self.cache_store.cache_answer(scope, query, results.clone()); + return results; + } + + // 2. Look for the name in the glob imports. + for glob_import in self.importer.glob_imports(scope) { + let edge = &glob_import.data; + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(QueryAnswer::new_glob( + edge.dest, + edge.vis, + Some(glob_import.span.clone()), + )); + } + } + PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} + } + } + + // If the name is found in the glob imports, we don't need to look for it + // further. + if !results.is_empty() { + self.cache_store.cache_answer(scope, query, results.clone()); + return results; + } + + // 3. Look for the name in the lexical scope. + if let Some(parent) = parent { + self.cache_store.cache_delegated(scope, query, parent); + self.resolve_query(parent, query) + } else { + self.cache_store.cache_answer(scope, query, vec![]); + vec![] + } + } + + fn edges(&self, scope: ScopeId) -> &'db [ScopeEdge] { + let graph = scope.top_mod.module_scope_graph(self.db.upcast()); + graph.edges(scope.local_id) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct NameQuery { + name: IdentId, + domain: NameDomain, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct QueryAnswer { + pub scope: ScopeId, + pub vis: Visibility, + pub import_span: Option, + pub via_glob_import: bool, +} + +impl QueryAnswer { + pub fn new(scope: ScopeId, vis: Visibility, import_span: Option) -> Self { + Self { + scope, + vis, + import_span, + via_glob_import: false, + } + } + + pub fn new_glob(scope: ScopeId, vis: Visibility, import_span: Option) -> Self { + Self { + scope, + vis, + import_span, + via_glob_import: true, + } + } +} + +impl QueryAnswer { + pub fn is_valid(&self) -> bool { + self.scope.is_valid() + } +} + +#[derive(Default)] +struct ResolvedQueryCacheStore { + cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, + no_cache: bool, +} + +impl ResolvedQueryCacheStore { + fn cache_answer(&mut self, scope: ScopeId, query: NameQuery, answer: Vec) { + if self.no_cache { + return; + } + self.cache.insert((scope, query), Either::Left(answer)); + } + + fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { + if self.no_cache { + return; + } + self.cache.insert((scope, query), Either::Right(parent)); + } + + fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { + match self.cache.get(&(scope, query)) { + Some(Either::Left(answers)) => Some(answers.clone()), + Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), + _ => None, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum NameDomain { + Item, + Value, + Type, + Field, + Variant, +} + +pub trait Importer { + fn glob_imports(&self, scope: ScopeId) -> &[Spanned]; + fn named_imports(&self, scope: ScopeId) -> &[Spanned]; +} + +trait QueryPropagator { + // TODO: `db` is not necessary if we implement prefilled keywords. + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery; +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum PropagatedQuery { + Terminated, + Continuation, + UnPropagated, +} + +impl QueryPropagator for LexEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + PropagatedQuery::Continuation + } +} + +impl QueryPropagator for ModEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + match query.domain { + NameDomain::Item if self.0 == query.name => PropagatedQuery::Terminated, + _ => PropagatedQuery::UnPropagated, + } + } +} + +impl QueryPropagator for TypeEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for TraitEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for ValueEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Value) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for GenericParamEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for FieldEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Field) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for VariantEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Variant) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SuperEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_super(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for IngotEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_ingot(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SelfTyEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_self_ty(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SelfEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameDomain::Item | NameDomain::Value) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_self(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for AnonEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } +} + +impl QueryPropagator for EdgeKind { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + match self { + EdgeKind::Lex(edge) => edge.propagate(db, query), + EdgeKind::Mod(edge) => edge.propagate(db, query), + EdgeKind::Type(edge) => edge.propagate(db, query), + EdgeKind::Trait(edge) => edge.propagate(db, query), + EdgeKind::GenericParam(edge) => edge.propagate(db, query), + EdgeKind::Value(edge) => edge.propagate(db, query), + EdgeKind::Field(edge) => edge.propagate(db, query), + EdgeKind::Variant(edge) => edge.propagate(db, query), + EdgeKind::Super(edge) => edge.propagate(db, query), + EdgeKind::Ingot(edge) => edge.propagate(db, query), + EdgeKind::Self_(edge) => edge.propagate(db, query), + EdgeKind::SelfTy(edge) => edge.propagate(db, query), + EdgeKind::Anon(edge) => edge.propagate(db, query), + } + } +} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index cc0c0de676..6592921f9d 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -90,6 +90,20 @@ impl TopLevelMod { let module_tree = self.ingot_module_tree(db); module_tree.children(self) } + + pub fn invalid() -> Self { + Self(salsa::Id::from_u32(u32::MAX - 1)) + } + + pub fn is_valid(self) -> bool { + self != Self::invalid() + } + + pub fn vis(self, db: &dyn HirDb) -> Visibility { + // We don't have a way to specify visibility of a top level module. + // Please change here if we introduce it. + Visibility::Public + } } #[salsa::tracked] @@ -99,7 +113,7 @@ pub struct Mod { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub top_mod: TopLevelMod, @@ -135,6 +149,10 @@ impl Func { pub fn lazy_span(self) -> LazyFuncSpan { LazyFuncSpan::new(self) } + + pub fn vis(self, db: &dyn HirDb) -> Visibility { + self.modifier(db).to_visibility() + } } #[salsa::tracked] @@ -144,7 +162,7 @@ pub struct Struct { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub fields: RecordFieldListId, @@ -166,7 +184,7 @@ pub struct Contract { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub fields: RecordFieldListId, pub top_mod: TopLevelMod, @@ -186,7 +204,7 @@ pub struct Enum { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub variants: EnumVariantListId, @@ -208,7 +226,7 @@ pub struct TypeAlias { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub ty: Partial, @@ -251,7 +269,7 @@ pub struct Trait { pub name: Partial, pub attributes: AttrListId, - pub is_pub: bool, + pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, pub top_mod: TopLevelMod, @@ -293,6 +311,7 @@ pub struct Const { pub name: Partial, pub body: Partial, + pub vis: Visibility, pub top_mod: TopLevelMod, #[return_ref] @@ -310,6 +329,7 @@ pub struct Use { id: TrackedItemId, pub tree: Partial, + pub vis: Visibility, pub top_mod: TopLevelMod, #[return_ref] @@ -330,10 +350,10 @@ pub enum ItemModifier { } impl ItemModifier { - pub fn is_pub(self) -> bool { + pub fn to_visibility(self) -> Visibility { match self { - ItemModifier::Pub | ItemModifier::PubAndUnsafe => true, - ItemModifier::Unsafe | ItemModifier::None => false, + ItemModifier::Pub | ItemModifier::PubAndUnsafe => Visibility::Public, + ItemModifier::Unsafe | ItemModifier::None => Visibility::Private, } } } @@ -348,7 +368,7 @@ pub struct RecordFieldListId { pub struct RecordField { pub name: Partial, pub ty: Partial, - pub is_pub: bool, + pub vis: Visibility, } #[salsa::interned] @@ -373,6 +393,12 @@ pub type TraitItemListId = ImplItemListId; pub type ImplTraitItemListId = ImplItemListId; pub type ExternItemListId = ImplItemListId; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Visibility { + Public, + Private, +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedItemId { TopLevelMod(IdentId), diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 7562f1855f..10a008b987 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,7 +1,9 @@ use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; -use super::{IdentId, ItemKind, TopLevelMod, Use}; +use crate::{span::DynLazySpan, HirDb}; + +use super::{IdentId, ItemKind, TopLevelMod, Use, Visibility}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { @@ -74,13 +76,15 @@ impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { pub struct LocalScope { pub kind: ScopeKind, pub edges: Vec, + pub parent_module: Option, } impl LocalScope { - pub fn new(kind: ScopeKind) -> Self { + pub fn new(kind: ScopeKind, parent_module: Option) -> Self { Self { kind, edges: vec![], + parent_module, } } } @@ -98,6 +102,7 @@ pub enum ScopeKind { pub struct ScopeEdge { pub dest: ScopeId, pub kind: EdgeKind, + pub vis: Visibility, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -106,6 +111,23 @@ pub struct ScopeId { pub local_id: LocalScopeId, } +impl ScopeId { + pub fn span(self, _db: &dyn HirDb) -> DynLazySpan { + todo!() + } + + pub fn invalid() -> Self { + Self { + top_mod: TopLevelMod::invalid(), + local_id: LocalScopeId::invalid(), + } + } + + pub fn is_valid(self) -> bool { + self != Self::invalid() + } +} + impl ScopeId { pub fn new(top_mod: TopLevelMod, local_id: LocalScopeId) -> Self { Self { top_mod, local_id } @@ -230,6 +252,14 @@ impl LocalScopeId { pub(crate) fn root() -> Self { LocalScopeId(0) } + + pub fn invalid() -> Self { + LocalScopeId(u32::MAX) + } + + pub fn is_valid(self) -> bool { + self != Self::invalid() + } } #[cfg(test)] diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 4caedd010b..dce3ad41ee 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -72,21 +72,13 @@ impl Mod { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Mod(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); if let Some(items) = ast.items() { lower_module_items(ctxt, id.clone(), items); } let origin = HirOrigin::raw(&ast); - let mod_ = Self::new( - ctxt.db(), - id, - name, - attributes, - is_pub, - ctxt.top_mod(), - origin, - ); + let mod_ = Self::new(ctxt.db(), id, name, attributes, vis, ctxt.top_mod(), origin); ctxt.leave_scope(mod_) } } @@ -152,7 +144,7 @@ impl Struct { let id = TrackedItemId::Struct(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); @@ -163,7 +155,7 @@ impl Struct { id, name, attributes, - is_pub, + vis, generic_params, where_clause, fields, @@ -186,7 +178,7 @@ impl Contract { let id = TrackedItemId::Contract(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); let origin = HirOrigin::raw(&ast); @@ -195,7 +187,7 @@ impl Contract { id, name, attributes, - is_pub, + vis, fields, ctxt.top_mod(), origin, @@ -216,7 +208,7 @@ impl Enum { let id = TrackedItemId::Enum(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let variants = EnumVariantListId::lower_ast_opt(ctxt, ast.variants()); @@ -227,7 +219,7 @@ impl Enum { id, name, attributes, - is_pub, + vis, generic_params, where_clause, variants, @@ -250,7 +242,7 @@ impl TypeAlias { let id = TrackedItemId::TypeAlias(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); @@ -261,7 +253,7 @@ impl TypeAlias { id, name, attributes, - is_pub, + vis, generic_params, where_clause, ty, @@ -320,7 +312,7 @@ impl Trait { let id = TrackedItemId::Trait(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); - let is_pub = ItemModifier::lower_ast(ast.modifier()).is_pub(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let origin = HirOrigin::raw(&ast); @@ -336,7 +328,7 @@ impl Trait { id, name, attributes, - is_pub, + vis, generic_params, where_clause, ctxt.top_mod(), @@ -399,9 +391,10 @@ impl Const { .value() .map(|ast| Body::lower_ast(ctxt, id.clone(), ast)) .into(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let origin = HirOrigin::raw(&ast); - let const_ = Self::new(ctxt.db(), id, name, body, ctxt.top_mod(), origin); + let const_ = Self::new(ctxt.db(), id, name, body, vis, ctxt.top_mod(), origin); ctxt.leave_scope(const_) } } @@ -418,7 +411,8 @@ impl Use { let id = TrackedItemId::Use(tree).join(parent_id); let origin = HirOrigin::raw(&ast); - let use_ = Self::new(ctxt.db(), id, tree, ctxt.top_mod(), origin); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let use_ = Self::new(ctxt.db(), id, tree, vis, ctxt.top_mod(), origin); ctxt.leave_scope(use_) } } @@ -457,9 +451,13 @@ impl RecordField { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDef) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); - let is_pub = ast.pub_kw().is_some(); + let vis = if ast.pub_kw().is_some() { + Visibility::Public + } else { + Visibility::Private + }; - Self { name, ty, is_pub } + Self { name, ty, vis } } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 44aafead0e..159e96680b 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -4,7 +4,7 @@ use crate::{ EdgeKind, LocalScope, LocalScopeId, ScopeEdge, ScopeGraph, ScopeId, ScopeKind, }, EnumVariantListId, FnParamListId, FnParamName, GenericParamListId, ItemKind, - RecordFieldListId, TopLevelMod, + RecordFieldListId, TopLevelMod, Visibility, }, HirDb, }; @@ -63,18 +63,28 @@ impl<'db> ScopeGraphBuilder<'db> { if let ItemKind::TopMod(top_mod) = item { debug_assert!(self.scope_stack.is_empty()); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); - - self.add_global_edge(item_scope, top_mod.ingot_root(self.db), EdgeKind::ingot()); + self.add_local_edge( + item_scope, + item_scope, + EdgeKind::self_(), + Visibility::Private, + ); + + self.add_global_edge( + item_scope, + top_mod.ingot_root(self.db), + EdgeKind::ingot(), + Visibility::Private, + ); for child in top_mod.children(self.db) { let child_name = child.name(self.db); let edge = EdgeKind::mod_(child_name); - self.add_global_edge(item_scope, child, edge) + self.add_global_edge(item_scope, child, edge, child.vis(self.db)) } if let Some(parent) = top_mod.parent(self.db) { let parent_edge = EdgeKind::super_(); - self.add_global_edge(item_scope, parent, parent_edge); + self.add_global_edge(item_scope, parent, parent_edge, Visibility::Private); } self.module_stack.pop().unwrap(); @@ -82,114 +92,161 @@ impl<'db> ScopeGraphBuilder<'db> { } let parent_scope = *self.scope_stack.last().unwrap(); - let parent_to_child_edge = match item { + let (parent_to_child_edge, vis) = match item { Mod(inner) => { self.add_local_edge( item_scope, *self.module_stack.last().unwrap(), EdgeKind::super_(), + inner.vis(self.db), ); self.add_global_edge( item_scope, self.top_mod.ingot_root(self.db), EdgeKind::ingot(), + Visibility::Private, + ); + self.add_local_edge( + item_scope, + item_scope, + EdgeKind::self_(), + Visibility::Private, ); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); self.module_stack.pop().unwrap(); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::mod_) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::mod_) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } Func(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); if let Some(params) = inner.params(self.db).to_opt() { self.add_func_param_scope(item_scope, params); } - inner - .name(self.db) - .to_opt() - .map(EdgeKind::value) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } Struct(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_field_scope(item_scope, inner.fields(self.db)); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon), + Visibility::Private, + ) } Contract(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_field_scope(item_scope, inner.fields(self.db)); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } Enum(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_variant_scope(item_scope, inner.variants(self.db)); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } TypeAlias(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon) + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } Impl(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); - EdgeKind::anon() + self.add_local_edge( + item_scope, + item_scope, + EdgeKind::self_ty(), + Visibility::Private, + ); + (EdgeKind::anon(), Visibility::Private) } Trait(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); - inner - .name(self.db) - .to_opt() - .map(EdgeKind::trait_) - .unwrap_or_else(EdgeKind::anon) + self.add_local_edge( + item_scope, + item_scope, + EdgeKind::self_ty(), + Visibility::Private, + ); + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::trait_) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } ImplTrait(inner) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); + self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); - EdgeKind::anon() + self.add_local_edge( + item_scope, + item_scope, + EdgeKind::self_ty(), + Visibility::Private, + ); + (EdgeKind::anon(), Visibility::Private) } - Const(c) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); - c.name(self.db) - .to_opt() - .map(EdgeKind::value) - .unwrap_or_else(EdgeKind::anon) + Const(inner) => { + self.add_lex_edge(item_scope, parent_scope); + ( + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon), + inner.vis(self.db), + ) } Use(use_) => { @@ -199,54 +256,54 @@ impl<'db> ScopeGraphBuilder<'db> { .or_default() .push(use_); - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); - EdgeKind::anon() + self.add_lex_edge(item_scope, parent_scope); + (EdgeKind::anon(), use_.vis(self.db)) } Body(_) => { - self.add_local_edge(item_scope, parent_scope, EdgeKind::lex()); - EdgeKind::anon() + self.add_lex_edge(item_scope, parent_scope); + (EdgeKind::anon(), Visibility::Private) } _ => unreachable!(), }; - self.add_local_edge(parent_scope, item_scope, parent_to_child_edge); + self.add_local_edge(parent_scope, item_scope, parent_to_child_edge, vis); } fn add_field_scope(&mut self, current_scope: LocalScopeId, fields: RecordFieldListId) { for (i, field) in fields.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::Field(i)); + let scope = LocalScope::new(ScopeKind::Field(i), self.parent_module_id()); let field_scope = self.graph.scopes.push(scope); - self.add_local_edge(field_scope, current_scope, EdgeKind::lex()); + self.add_lex_edge(field_scope, current_scope); let kind = field .name .to_opt() .map(EdgeKind::field) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, field_scope, kind) + self.add_local_edge(current_scope, field_scope, kind, field.vis) } } fn add_variant_scope(&mut self, current_scope: LocalScopeId, variants: EnumVariantListId) { for (i, field) in variants.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::Variant(i)); + let scope = LocalScope::new(ScopeKind::Variant(i), self.parent_module_id()); let variant_scope = self.graph.scopes.push(scope); - self.add_local_edge(variant_scope, current_scope, EdgeKind::lex()); + self.add_lex_edge(variant_scope, current_scope); let kind = field .name .to_opt() .map(EdgeKind::variant) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, variant_scope, kind) + self.add_local_edge(current_scope, variant_scope, kind, Visibility::Public) } } fn add_func_param_scope(&mut self, current_scope: LocalScopeId, params: FnParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::FnParam(i)); + let scope = LocalScope::new(ScopeKind::FnParam(i), self.parent_module_id()); let generic_param_scope = self.graph.scopes.push(scope); - self.add_local_edge(generic_param_scope, current_scope, EdgeKind::lex()); + self.add_lex_edge(generic_param_scope, current_scope); let kind = param .name .to_opt() @@ -256,21 +313,31 @@ impl<'db> ScopeGraphBuilder<'db> { FnParamName::Underscore => EdgeKind::anon(), }) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, generic_param_scope, kind) + self.add_local_edge( + current_scope, + generic_param_scope, + kind, + Visibility::Private, + ) } } fn add_generic_param_scope(&mut self, current_scope: LocalScopeId, params: GenericParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::GenericParam(i)); + let scope = LocalScope::new(ScopeKind::GenericParam(i), self.parent_module_id()); let generic_param_scope = self.graph.scopes.push(scope); - self.add_local_edge(generic_param_scope, current_scope, EdgeKind::lex()); + self.add_lex_edge(generic_param_scope, current_scope); let kind = param .name() .to_opt() .map(EdgeKind::generic_param) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, generic_param_scope, kind) + self.add_local_edge( + current_scope, + generic_param_scope, + kind, + Visibility::Private, + ) } } @@ -278,20 +345,49 @@ impl<'db> ScopeGraphBuilder<'db> { LocalScope { kind: ScopeKind::Item(self.top_mod.into()), edges: Vec::new(), + parent_module: self.parent_module_id(), } } - fn add_local_edge(&mut self, source: LocalScopeId, dest: LocalScopeId, kind: EdgeKind) { + fn parent_module_id(&self) -> Option { + if let Some(id) = self.module_stack.last() { + Some(ScopeId::new(self.top_mod, *id)) + } else if let Some(top_mod) = self.top_mod.parent(self.db) { + Some(ScopeId::new(top_mod, LocalScopeId::root())) + } else { + None + } + } + + fn add_local_edge( + &mut self, + source: LocalScopeId, + dest: LocalScopeId, + kind: EdgeKind, + vis: Visibility, + ) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(self.top_mod, dest), kind, + vis, }); } - fn add_global_edge(&mut self, source: LocalScopeId, dest: TopLevelMod, kind: EdgeKind) { + fn add_lex_edge(&mut self, source: LocalScopeId, dest: LocalScopeId) { + self.add_local_edge(source, dest, EdgeKind::lex(), Visibility::Private); + } + + fn add_global_edge( + &mut self, + source: LocalScopeId, + dest: TopLevelMod, + kind: EdgeKind, + vis: Visibility, + ) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(dest, LocalScopeId::root()), kind, + vis, }); } } diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index c7f2f6c969..628ea370f4 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -270,6 +270,7 @@ ast_node! { SK::Const, } impl super::AttrListOwner for Const {} +impl ItemModifierOwner for Const {} impl Const { /// Returns the name of the const. /// `FOO` in `const FOO: u32 = 42;` @@ -296,6 +297,7 @@ ast_node! { SK::Use, } impl super::AttrListOwner for Use {} +impl ItemModifierOwner for Use {} impl Use { /// Returns the use tree. /// `foo::{bar, Baz::*}` in `use foo::{bar, Baz::*}` From 5321c8f1c2077f567823134e2d913a9031416855 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 27 Apr 2023 00:21:19 +0200 Subject: [PATCH 144/678] Add methods to obtain external ingot dependencies and its scope --- crates/common2/src/input.rs | 6 +- .../hir-analysis/src/name_resolution/mod.rs | 61 +++++++++++++------ crates/hir/src/hir_def/item.rs | 23 +++++-- crates/hir/src/hir_def/scope_graph.rs | 20 ++++-- crates/hir/src/lib.rs | 25 +++++++- 5 files changed, 101 insertions(+), 34 deletions(-) diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs index 65258da7e3..4eb82cba62 100644 --- a/crates/common2/src/input.rs +++ b/crates/common2/src/input.rs @@ -23,7 +23,7 @@ pub struct InputIngot { /// A list of ingots which the current ingot depends on. #[return_ref] - pub dependency: BTreeSet, + pub external_ingots: BTreeSet, /// A list of files which the current ingot contains. #[return_ref] @@ -40,7 +40,7 @@ impl InputIngot { path: &str, kind: IngotKind, version: Version, - dependency: BTreeSet, + external_ingots: BTreeSet, ) -> InputIngot { let path = Utf8PathBuf::from(path); let root_file = None; @@ -49,7 +49,7 @@ impl InputIngot { path, kind, version, - dependency, + external_ingots, BTreeSet::default(), root_file, ) diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 0ba7188df4..fb16a3fd25 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -15,7 +15,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{HirAnalysisDb, Spanned}; pub mod import_resolver; -pub mod vis_checker; +pub mod visibility_checker; pub struct NameResolver<'db, 'a> { db: &'db dyn HirAnalysisDb, @@ -24,15 +24,17 @@ pub struct NameResolver<'db, 'a> { } impl<'db, 'a> NameResolver<'db, 'a> { - pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { + pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { // If the query is already resolved, return the cached result. if let Some(answer) = self.cache_store.get(scope, query) { return answer.clone(); }; - // The shadowing rule is `$ = NamedImports > GlobImports > Lex`, where `$` means - // current scope. This ordering means that greater scope shadows lower - // scopes having the same name in the same domain and + // The shadowing rule is + // `$ = NamedImports > GlobImports > Lex > external ingot`, + // where `$` means current scope. This ordering means that + // greater scope shadows lower scopes having the same name in the same + // domain and // 1. Look for the name in the current scope and named imports. let mut results = Vec::new(); @@ -42,7 +44,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(QueryAnswer::new(edge.dest, edge.vis, None)); + results.push(ResolvedName::new(edge.dest, edge.vis, None)); } } @@ -60,7 +62,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(QueryAnswer::new( + results.push(ResolvedName::new( edge.dest, edge.vis, Some(named_import.span.clone()), @@ -74,7 +76,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { // If the name is found in the current scope or named imports, we don't need to // look for it further. if !results.is_empty() { - self.cache_store.cache_answer(scope, query, results.clone()); + self.cache_store + .cache_resolved(scope, query, results.clone()); return results; } @@ -84,7 +87,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(QueryAnswer::new_glob( + results.push(ResolvedName::new_glob( edge.dest, edge.vis, Some(glob_import.span.clone()), @@ -98,16 +101,34 @@ impl<'db, 'a> NameResolver<'db, 'a> { // If the name is found in the glob imports, we don't need to look for it // further. if !results.is_empty() { - self.cache_store.cache_answer(scope, query, results.clone()); + self.cache_store + .cache_resolved(scope, query, results.clone()); return results; } - // 3. Look for the name in the lexical scope. + // 3. Look for the name in the lexical scope if it exists, else look for the + // name in the external ingot. if let Some(parent) = parent { self.cache_store.cache_delegated(scope, query, parent); self.resolve_query(parent, query) + } else if query.domain == NameDomain::Item { + for (name, root_mod) in scope.top_mod.external_ingots(self.db.upcast()) { + if *name == query.name { + results.push(ResolvedName::new( + ScopeId::root(*root_mod), + Visibility::Public, + None, + )); + } + } + + // Ensure that all names of external ingots don't conflict with each other. + debug_assert!(results.len() < 2); + self.cache_store + .cache_resolved(scope, query, results.clone()); + results } else { - self.cache_store.cache_answer(scope, query, vec![]); + self.cache_store.cache_resolved(scope, query, vec![]); vec![] } } @@ -125,14 +146,14 @@ pub struct NameQuery { } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct QueryAnswer { +pub struct ResolvedName { pub scope: ScopeId, pub vis: Visibility, pub import_span: Option, pub via_glob_import: bool, } -impl QueryAnswer { +impl ResolvedName { pub fn new(scope: ScopeId, vis: Visibility, import_span: Option) -> Self { Self { scope, @@ -152,7 +173,7 @@ impl QueryAnswer { } } -impl QueryAnswer { +impl ResolvedName { pub fn is_valid(&self) -> bool { self.scope.is_valid() } @@ -160,16 +181,16 @@ impl QueryAnswer { #[derive(Default)] struct ResolvedQueryCacheStore { - cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, + cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, no_cache: bool, } impl ResolvedQueryCacheStore { - fn cache_answer(&mut self, scope: ScopeId, query: NameQuery, answer: Vec) { + fn cache_resolved(&mut self, scope: ScopeId, query: NameQuery, resolved: Vec) { if self.no_cache { return; } - self.cache.insert((scope, query), Either::Left(answer)); + self.cache.insert((scope, query), Either::Left(resolved)); } fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { @@ -179,9 +200,9 @@ impl ResolvedQueryCacheStore { self.cache.insert((scope, query), Either::Right(parent)); } - fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { + fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { match self.cache.get(&(scope, query)) { - Some(Either::Left(answers)) => Some(answers.clone()), + Some(Either::Left(resolved)) => Some(resolved.clone()), Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), _ => None, } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 6592921f9d..0e9fbc928f 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -7,6 +7,7 @@ use common::{InputFile, InputIngot}; use parser::ast; use crate::{ + external_ingots_impl, hir_def::TraitRef, lower, span::{ @@ -73,21 +74,21 @@ impl TopLevelMod { lower::scope_graph_impl(db, self) } - pub fn ingot_module_tree(self, db: &dyn HirDb) -> &ModuleTree { + pub fn module_tree(self, db: &dyn HirDb) -> &ModuleTree { module_tree_impl(db, self.ingot(db)) } pub fn ingot_root(self, db: &dyn HirDb) -> TopLevelMod { - self.ingot_module_tree(db).root_data().top_mod + self.module_tree(db).root_data().top_mod } pub fn parent(self, db: &dyn HirDb) -> Option { - let module_tree = self.ingot_module_tree(db); + let module_tree = self.module_tree(db); module_tree.parent(self) } pub fn children(self, db: &dyn HirDb) -> impl Iterator + '_ { - let module_tree = self.ingot_module_tree(db); + let module_tree = self.module_tree(db); module_tree.children(self) } @@ -99,11 +100,17 @@ impl TopLevelMod { self != Self::invalid() } - pub fn vis(self, db: &dyn HirDb) -> Visibility { + pub fn vis(self, _db: &dyn HirDb) -> Visibility { // We don't have a way to specify visibility of a top level module. // Please change here if we introduce it. Visibility::Public } + + /// Returns the root modules and names of external ingots that this module + /// depends on. + pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, TopLevelMod)] { + external_ingots_impl(db, self.ingot(db)).as_slice() + } } #[salsa::tracked] @@ -399,6 +406,12 @@ pub enum Visibility { Private, } +impl Visibility { + pub fn is_pub(self) -> bool { + self == Self::Public + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedItemId { TopLevelMod(IdentId), diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 10a008b987..6659811cae 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -112,6 +112,14 @@ pub struct ScopeId { } impl ScopeId { + pub fn new(top_mod: TopLevelMod, local_id: LocalScopeId) -> Self { + Self { top_mod, local_id } + } + + pub fn root(top_mod: TopLevelMod) -> Self { + Self::new(top_mod, LocalScopeId::root()) + } + pub fn span(self, _db: &dyn HirDb) -> DynLazySpan { todo!() } @@ -126,11 +134,15 @@ impl ScopeId { pub fn is_valid(self) -> bool { self != Self::invalid() } -} -impl ScopeId { - pub fn new(top_mod: TopLevelMod, local_id: LocalScopeId) -> Self { - Self { top_mod, local_id } + pub fn scope_data(self, db: &dyn HirDb) -> &LocalScope { + self.top_mod + .module_scope_graph(db) + .scope_data(self.local_id) + } + + pub fn parent_module(self, db: &dyn HirDb) -> Option { + self.scope_data(db).parent_module } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7ba14624b0..fc15887c55 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,5 +1,5 @@ -use common::{InputDb, Upcast}; -use hir_def::module_tree_impl; +use common::{InputDb, InputIngot, Upcast}; +use hir_def::{module_tree_impl, IdentId, TopLevelMod}; pub use lower::parse::ParseDiagnostic; use lower::{ @@ -53,8 +53,29 @@ pub struct Jar( scope_graph_impl, map_file_to_mod_impl, parse_file_impl, + external_ingots_impl, ); +/// Returns the root modules and names of external ingots that the given `ingot` +/// depends on. +/// From the outside of the crate, this functionality can be accessed via +/// [`TopLevelMod::external_ingots`](crate::TopLevelMod::external_ingots). +// The reason why this function is not a public API is that we want to prohibit users of `HirDb` to +// access `InputIngot` directly. +#[salsa::tracked(return_ref)] +pub(crate) fn external_ingots_impl( + db: &dyn HirDb, + ingot: InputIngot, +) -> Vec<(IdentId, TopLevelMod)> { + let mut res = Vec::new(); + for dep in ingot.external_ingots(db.upcast()) { + let name = IdentId::new(db, dep.name.to_string()); + let root = module_tree_impl(db, dep.ingot).root_data().top_mod; + res.push((name, root)) + } + res +} + pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} From 2a4dd7ba062ce259ae3eb55c47c2c522cb8502da Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 27 Apr 2023 00:21:32 +0200 Subject: [PATCH 145/678] Implement visibility check --- Cargo.lock | 1 - crates/hir-analysis/Cargo.toml | 3 +- .../src/name_resolution/import_resolver.rs | 39 ++ .../hir-analysis/src/name_resolution/mod.rs | 416 +----------------- .../src/name_resolution/name_resolver.rs | 415 +++++++++++++++++ .../src/name_resolution/visibility_checker.rs | 47 ++ crates/hir/src/hir_def/item.rs | 19 + crates/hir/src/hir_def/scope_graph.rs | 22 +- crates/hir/src/lower/scope_builder.rs | 241 ++++------ 9 files changed, 628 insertions(+), 575 deletions(-) create mode 100644 crates/hir-analysis/src/name_resolution/import_resolver.rs create mode 100644 crates/hir-analysis/src/name_resolution/name_resolver.rs create mode 100644 crates/hir-analysis/src/name_resolution/visibility_checker.rs diff --git a/Cargo.lock b/Cargo.lock index 668ab9b79c..d52f40ca28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -922,7 +922,6 @@ dependencies = [ name = "fe-hir-analysis" version = "0.20.0-alpha" dependencies = [ - "cranelift-entity", "either", "fe-common", "fe-hir", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 0c15aa07d5..3d4f3d2ca4 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -5,12 +5,11 @@ authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" repository = "https://github.com/ethereum/fe" -description = "Provides HIR definition and lowering for Fe lang" +description = "Provides HIR semantic analysis for Fe lang" [dependencies] salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } smallvec = "1.10" -cranelift-entity = "0.91" rustc-hash = "1.1.0" either = "1.8" diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs new file mode 100644 index 0000000000..9102d176a6 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -0,0 +1,39 @@ +#![allow(dead_code)] +use hir::hir_def::scope_graph::{ScopeEdge, ScopeId}; +use rustc_hash::FxHashMap; + +use crate::Spanned; + +pub struct ResolvedImports { + pub resolved: FxHashMap, +} + +pub struct ImportResolver { + resolved: FxHashMap>>, + glob_resolved: FxHashMap>>, + states: FxHashMap, +} + +pub trait Importer { + fn glob_imports(&self, scope: ScopeId) -> &[Spanned]; + fn named_imports(&self, scope: ScopeId) -> &[Spanned]; +} + +/// This is the state of import resolution for a given scope. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum ScopeState { + // The scope is open, meaning that the scope needs further processing. + Open, + // The scope is closed, meaning that the scope is fully resolved. + Close, +} + +impl Importer for ImportResolver { + fn glob_imports(&self, scope: ScopeId) -> &[Spanned] { + &self.glob_resolved[&scope] + } + + fn named_imports(&self, scope: ScopeId) -> &[Spanned] { + &self.resolved[&scope] + } +} diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index fb16a3fd25..ede8568475 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -1,417 +1,3 @@ -use either::Either; -use hir::{ - hir_def::{ - scope_graph::{ - AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, - ScopeEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, - VariantEdge, - }, - IdentId, Visibility, - }, - span::DynLazySpan, -}; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::{HirAnalysisDb, Spanned}; - pub mod import_resolver; +pub mod name_resolver; pub mod visibility_checker; - -pub struct NameResolver<'db, 'a> { - db: &'db dyn HirAnalysisDb, - importer: &'a dyn Importer, - cache_store: ResolvedQueryCacheStore, -} - -impl<'db, 'a> NameResolver<'db, 'a> { - pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { - // If the query is already resolved, return the cached result. - if let Some(answer) = self.cache_store.get(scope, query) { - return answer.clone(); - }; - - // The shadowing rule is - // `$ = NamedImports > GlobImports > Lex > external ingot`, - // where `$` means current scope. This ordering means that - // greater scope shadows lower scopes having the same name in the same - // domain and - - // 1. Look for the name in the current scope and named imports. - let mut results = Vec::new(); - let mut found_scopes = FxHashSet::default(); - let mut parent = None; - for edge in self.edges(scope) { - match edge.kind.propagate(self.db, query) { - PropagatedQuery::Terminated => { - if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new(edge.dest, edge.vis, None)); - } - } - - PropagatedQuery::Continuation => { - debug_assert!(parent.is_none()); - parent = Some(edge.dest); - } - - PropagatedQuery::UnPropagated => {} - } - } - - for named_import in self.importer.named_imports(scope) { - let edge = &named_import.data; - match edge.kind.propagate(self.db, query) { - PropagatedQuery::Terminated => { - if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new( - edge.dest, - edge.vis, - Some(named_import.span.clone()), - )); - } - } - PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} - } - } - - // If the name is found in the current scope or named imports, we don't need to - // look for it further. - if !results.is_empty() { - self.cache_store - .cache_resolved(scope, query, results.clone()); - return results; - } - - // 2. Look for the name in the glob imports. - for glob_import in self.importer.glob_imports(scope) { - let edge = &glob_import.data; - match edge.kind.propagate(self.db, query) { - PropagatedQuery::Terminated => { - if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new_glob( - edge.dest, - edge.vis, - Some(glob_import.span.clone()), - )); - } - } - PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} - } - } - - // If the name is found in the glob imports, we don't need to look for it - // further. - if !results.is_empty() { - self.cache_store - .cache_resolved(scope, query, results.clone()); - return results; - } - - // 3. Look for the name in the lexical scope if it exists, else look for the - // name in the external ingot. - if let Some(parent) = parent { - self.cache_store.cache_delegated(scope, query, parent); - self.resolve_query(parent, query) - } else if query.domain == NameDomain::Item { - for (name, root_mod) in scope.top_mod.external_ingots(self.db.upcast()) { - if *name == query.name { - results.push(ResolvedName::new( - ScopeId::root(*root_mod), - Visibility::Public, - None, - )); - } - } - - // Ensure that all names of external ingots don't conflict with each other. - debug_assert!(results.len() < 2); - self.cache_store - .cache_resolved(scope, query, results.clone()); - results - } else { - self.cache_store.cache_resolved(scope, query, vec![]); - vec![] - } - } - - fn edges(&self, scope: ScopeId) -> &'db [ScopeEdge] { - let graph = scope.top_mod.module_scope_graph(self.db.upcast()); - graph.edges(scope.local_id) - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct NameQuery { - name: IdentId, - domain: NameDomain, -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ResolvedName { - pub scope: ScopeId, - pub vis: Visibility, - pub import_span: Option, - pub via_glob_import: bool, -} - -impl ResolvedName { - pub fn new(scope: ScopeId, vis: Visibility, import_span: Option) -> Self { - Self { - scope, - vis, - import_span, - via_glob_import: false, - } - } - - pub fn new_glob(scope: ScopeId, vis: Visibility, import_span: Option) -> Self { - Self { - scope, - vis, - import_span, - via_glob_import: true, - } - } -} - -impl ResolvedName { - pub fn is_valid(&self) -> bool { - self.scope.is_valid() - } -} - -#[derive(Default)] -struct ResolvedQueryCacheStore { - cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, - no_cache: bool, -} - -impl ResolvedQueryCacheStore { - fn cache_resolved(&mut self, scope: ScopeId, query: NameQuery, resolved: Vec) { - if self.no_cache { - return; - } - self.cache.insert((scope, query), Either::Left(resolved)); - } - - fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { - if self.no_cache { - return; - } - self.cache.insert((scope, query), Either::Right(parent)); - } - - fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { - match self.cache.get(&(scope, query)) { - Some(Either::Left(resolved)) => Some(resolved.clone()), - Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), - _ => None, - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum NameDomain { - Item, - Value, - Type, - Field, - Variant, -} - -pub trait Importer { - fn glob_imports(&self, scope: ScopeId) -> &[Spanned]; - fn named_imports(&self, scope: ScopeId) -> &[Spanned]; -} - -trait QueryPropagator { - // TODO: `db` is not necessary if we implement prefilled keywords. - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery; -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum PropagatedQuery { - Terminated, - Continuation, - UnPropagated, -} - -impl QueryPropagator for LexEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { - PropagatedQuery::Continuation - } -} - -impl QueryPropagator for ModEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - match query.domain { - NameDomain::Item if self.0 == query.name => PropagatedQuery::Terminated, - _ => PropagatedQuery::UnPropagated, - } - } -} - -impl QueryPropagator for TypeEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for TraitEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for ValueEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Value) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for GenericParamEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for FieldEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Field) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for VariantEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Variant) { - return PropagatedQuery::UnPropagated; - } - - if self.0 == query.name { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for SuperEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item) { - return PropagatedQuery::UnPropagated; - } - - if query.name.is_super(db.upcast()) { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for IngotEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item) { - return PropagatedQuery::UnPropagated; - } - - if query.name.is_ingot(db.upcast()) { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for SelfTyEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Type) { - return PropagatedQuery::UnPropagated; - } - - if query.name.is_self_ty(db.upcast()) { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for SelfEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameDomain::Item | NameDomain::Value) { - return PropagatedQuery::UnPropagated; - } - - if query.name.is_self(db.upcast()) { - PropagatedQuery::Terminated - } else { - PropagatedQuery::UnPropagated - } - } -} - -impl QueryPropagator for AnonEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { - PropagatedQuery::UnPropagated - } -} - -impl QueryPropagator for EdgeKind { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { - match self { - EdgeKind::Lex(edge) => edge.propagate(db, query), - EdgeKind::Mod(edge) => edge.propagate(db, query), - EdgeKind::Type(edge) => edge.propagate(db, query), - EdgeKind::Trait(edge) => edge.propagate(db, query), - EdgeKind::GenericParam(edge) => edge.propagate(db, query), - EdgeKind::Value(edge) => edge.propagate(db, query), - EdgeKind::Field(edge) => edge.propagate(db, query), - EdgeKind::Variant(edge) => edge.propagate(db, query), - EdgeKind::Super(edge) => edge.propagate(db, query), - EdgeKind::Ingot(edge) => edge.propagate(db, query), - EdgeKind::Self_(edge) => edge.propagate(db, query), - EdgeKind::SelfTy(edge) => edge.propagate(db, query), - EdgeKind::Anon(edge) => edge.propagate(db, query), - } - } -} diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs new file mode 100644 index 0000000000..5db3463572 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -0,0 +1,415 @@ +use either::Either; +use hir::{ + hir_def::{ + scope_graph::{ + AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, + ScopeEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, + VariantEdge, + }, + IdentId, PathId, + }, + span::DynLazySpan, +}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::HirAnalysisDb; + +use super::import_resolver::Importer; + +pub struct NameResolver<'db, 'a> { + db: &'db dyn HirAnalysisDb, + importer: &'a dyn Importer, + cache_store: ResolvedQueryCacheStore, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ResolvedPath { + FullResolved(ScopeId), + PartialResolved { + resolved: ScopeId, + unresolved_from: usize, + }, + Failed { + failed_at: usize, + cause: NameResolutionFailure, + }, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum NameResolutionFailure { + Conflict, + Missing, +} + +impl<'db, 'a> NameResolver<'db, 'a> { + pub fn resolve_path( + &mut self, + _path: PathId, + _scope: ScopeId, + _context: NameContext, + ) -> ResolvedPath { + todo!() + } + + pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { + // If the query is already resolved, return the cached result. + if let Some(resolved) = self.cache_store.get(scope, query) { + return resolved; + }; + + // The shadowing rule is + // `$ = NamedImports > GlobImports > Lex > external ingot > builtin types`, + // where `$` means current scope. This ordering means that + // greater scope shadows lower scopes having the same name in the same + // domain and + + // 1. Look for the name in the current scope and named imports. + let mut results = Vec::new(); + let mut found_scopes = FxHashSet::default(); + let mut parent = None; + for edge in self.edges(scope) { + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(ResolvedName::new(edge.dest, None)); + } + } + + PropagatedQuery::Continuation => { + debug_assert!(parent.is_none()); + parent = Some(edge.dest); + } + + PropagatedQuery::UnPropagated => {} + } + } + + for named_import in self.importer.named_imports(scope) { + let edge = &named_import.data; + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(ResolvedName::new( + edge.dest, + Some(named_import.span.clone()), + )); + } + } + PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} + } + } + if !results.is_empty() { + self.cache_store + .cache_resolved(scope, query, results.clone()); + return results; + } + + // 2. Look for the name in the glob imports. + for glob_import in self.importer.glob_imports(scope) { + let edge = &glob_import.data; + match edge.kind.propagate(self.db, query) { + PropagatedQuery::Terminated => { + if found_scopes.insert(edge.dest) { + results.push(ResolvedName::new(edge.dest, Some(glob_import.span.clone()))); + } + } + PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} + } + } + if !results.is_empty() { + self.cache_store + .cache_resolved(scope, query, results.clone()); + return results; + } + + // 3. Look for the name in the lexical scope if it exists. + if let Some(parent) = parent { + self.cache_store.cache_delegated(scope, query, parent); + return self.resolve_query(parent, query); + } + + // 4. Look for the name in the external ingots. + if query.domain == NameContext::Item { + for (name, root_mod) in scope.top_mod.external_ingots(self.db.upcast()) { + if *name == query.name { + results.push(ResolvedName::new(ScopeId::root(*root_mod), None)); + } + } + // Ensure that all names of external ingots don't conflict with each other. + debug_assert!(results.len() < 2); + } + if !results.is_empty() { + self.cache_store + .cache_resolved(scope, query, results.clone()); + return results; + } + + // 5. Look for the name in the builtin types. + // TODO: Think about how to handle builtin types. + self.cache_store + .cache_resolved(scope, query, results.clone()); + + results + } + + fn edges(&self, scope: ScopeId) -> &'db [ScopeEdge] { + let graph = scope.top_mod.module_scope_graph(self.db.upcast()); + graph.edges(scope.local_id) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct NameQuery { + name: IdentId, + domain: NameContext, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ResolvedName { + pub scope: ScopeId, + pub import_span: Option, +} + +impl ResolvedName { + pub fn new(scope: ScopeId, import_span: Option) -> Self { + Self { scope, import_span } + } +} + +impl ResolvedName { + pub fn is_valid(&self) -> bool { + self.scope.is_valid() + } +} + +#[derive(Default)] +struct ResolvedQueryCacheStore { + cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, + no_cache: bool, +} + +impl ResolvedQueryCacheStore { + fn cache_resolved(&mut self, scope: ScopeId, query: NameQuery, resolved: Vec) { + if self.no_cache { + return; + } + self.cache.insert((scope, query), Either::Left(resolved)); + } + + fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { + if self.no_cache { + return; + } + self.cache.insert((scope, query), Either::Right(parent)); + } + + fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { + match self.cache.get(&(scope, query)) { + Some(Either::Left(resolved)) => Some(resolved.clone()), + Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), + _ => None, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum NameContext { + Item, + Value, + Type, + Field, + Variant, +} + +trait QueryPropagator { + // TODO: `db` is not necessary if we implement prefilled keywords. + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery; +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum PropagatedQuery { + Terminated, + Continuation, + UnPropagated, +} + +impl QueryPropagator for LexEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + PropagatedQuery::Continuation + } +} + +impl QueryPropagator for ModEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + match query.domain { + NameContext::Item if self.0 == query.name => PropagatedQuery::Terminated, + _ => PropagatedQuery::UnPropagated, + } + } +} + +impl QueryPropagator for TypeEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for TraitEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for ValueEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Value) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for GenericParamEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Type) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for FieldEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Field) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for VariantEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Variant) { + return PropagatedQuery::UnPropagated; + } + + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SuperEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_super(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for IngotEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_ingot(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SelfTyEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Type) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_self_ty(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for SelfEdge { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + if !matches!(query.domain, NameContext::Item | NameContext::Value) { + return PropagatedQuery::UnPropagated; + } + + if query.name.is_self(db.upcast()) { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated + } + } +} + +impl QueryPropagator for AnonEdge { + fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } +} + +impl QueryPropagator for EdgeKind { + fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + match self { + EdgeKind::Lex(edge) => edge.propagate(db, query), + EdgeKind::Mod(edge) => edge.propagate(db, query), + EdgeKind::Type(edge) => edge.propagate(db, query), + EdgeKind::Trait(edge) => edge.propagate(db, query), + EdgeKind::GenericParam(edge) => edge.propagate(db, query), + EdgeKind::Value(edge) => edge.propagate(db, query), + EdgeKind::Field(edge) => edge.propagate(db, query), + EdgeKind::Variant(edge) => edge.propagate(db, query), + EdgeKind::Super(edge) => edge.propagate(db, query), + EdgeKind::Ingot(edge) => edge.propagate(db, query), + EdgeKind::Self_(edge) => edge.propagate(db, query), + EdgeKind::SelfTy(edge) => edge.propagate(db, query), + EdgeKind::Anon(edge) => edge.propagate(db, query), + } + } +} diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs new file mode 100644 index 0000000000..a5949ac9dd --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -0,0 +1,47 @@ +use hir::hir_def::scope_graph::{ScopeId, ScopeKind}; + +use crate::HirAnalysisDb; + +use super::name_resolver::ResolvedName; + +/// Return `true` if the given `resolved` is visible from the `ref_scope`. +/// The resolved name is visible from `ref_scope` if +/// 1. It is declared as public, or +/// 2. The `ref_scope` is a child or the same scope of the scope where the +/// resolved name is defined. +pub fn check_visibility( + db: &dyn HirAnalysisDb, + ref_scope: ScopeId, + resolved: &ResolvedName, +) -> bool { + // If resolved is public, then it is visible. + if resolved.scope.data(db.upcast()).vis.is_pub() { + return true; + } + + let Some(def_scope) = (match resolved.scope.kind(db.upcast()) { + // We treat fields as if they are defined in the parent of the parent scope so + // that field can be accessible from the scope where the parent is defined. + ScopeKind::Field(_) => { + resolved.scope.parent(db.upcast()).and_then(|scope| scope.parent(db.upcast())) + }, + _ => { + resolved.scope.parent(db.upcast()) + } + }) else { + return false; + }; + + // If ref scope is a child scope or the same scope of the def scope, then it is + // visible. + let mut parent = Some(ref_scope); + while let Some(scope) = parent { + if scope == def_scope { + return true; + } else { + parent = scope.parent(db.upcast()); + } + } + + false +} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 0e9fbc928f..ab82d1bf1c 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -56,6 +56,25 @@ pub enum ItemKind { Body(Body), } +impl ItemKind { + pub fn vis(self, db: &dyn HirDb) -> Visibility { + use ItemKind::*; + match self { + TopMod(top_mod) => top_mod.vis(db), + Mod(mod_) => mod_.vis(db), + Func(func) => func.vis(db), + Struct(struct_) => struct_.vis(db), + Contract(contract) => contract.vis(db), + Enum(enum_) => enum_.vis(db), + TypeAlias(type_) => type_.vis(db), + Trait(trait_) => trait_.vis(db), + Const(const_) => const_.vis(db), + Use(use_) => use_.vis(db), + Impl(_) | ImplTrait(_) | Body(_) => Visibility::Private, + } + } +} + #[salsa::tracked] pub struct TopLevelMod { // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 6659811cae..62dd979567 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -11,6 +11,7 @@ pub struct ScopeGraph { pub scopes: PrimaryMap, pub item_map: FxHashMap, pub unresolved_imports: FxHashMap>, + pub unresolved_exports: FxHashMap>, } impl ScopeGraph { @@ -77,14 +78,16 @@ pub struct LocalScope { pub kind: ScopeKind, pub edges: Vec, pub parent_module: Option, + pub vis: Visibility, } impl LocalScope { - pub fn new(kind: ScopeKind, parent_module: Option) -> Self { + pub fn new(kind: ScopeKind, parent_module: Option, vis: Visibility) -> Self { Self { kind, edges: vec![], parent_module, + vis, } } } @@ -102,7 +105,6 @@ pub enum ScopeKind { pub struct ScopeEdge { pub dest: ScopeId, pub kind: EdgeKind, - pub vis: Visibility, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -135,14 +137,26 @@ impl ScopeId { self != Self::invalid() } - pub fn scope_data(self, db: &dyn HirDb) -> &LocalScope { + pub fn data(self, db: &dyn HirDb) -> &LocalScope { self.top_mod .module_scope_graph(db) .scope_data(self.local_id) } + pub fn kind(self, db: &dyn HirDb) -> ScopeKind { + self.data(db).kind + } + + pub fn parent(self, db: &dyn HirDb) -> Option { + self.data(db) + .edges + .iter() + .find(|e| matches!(e.kind, EdgeKind::Lex(_) | EdgeKind::Super(_))) + .map(|e| e.dest) + } + pub fn parent_module(self, db: &dyn HirDb) -> Option { - self.scope_data(db).parent_module + self.data(db).parent_module } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 159e96680b..8f2e1f05b9 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -27,6 +27,7 @@ impl<'db> ScopeGraphBuilder<'db> { scopes: Default::default(), item_map: Default::default(), unresolved_imports: Default::default(), + unresolved_exports: Default::default(), }, scope_stack: Default::default(), module_stack: Default::default(), @@ -63,28 +64,18 @@ impl<'db> ScopeGraphBuilder<'db> { if let ItemKind::TopMod(top_mod) = item { debug_assert!(self.scope_stack.is_empty()); - self.add_local_edge( - item_scope, - item_scope, - EdgeKind::self_(), - Visibility::Private, - ); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); - self.add_global_edge( - item_scope, - top_mod.ingot_root(self.db), - EdgeKind::ingot(), - Visibility::Private, - ); + self.add_global_edge(item_scope, top_mod.ingot_root(self.db), EdgeKind::ingot()); for child in top_mod.children(self.db) { let child_name = child.name(self.db); let edge = EdgeKind::mod_(child_name); - self.add_global_edge(item_scope, child, edge, child.vis(self.db)) + self.add_global_edge(item_scope, child, edge) } if let Some(parent) = top_mod.parent(self.db) { let parent_edge = EdgeKind::super_(); - self.add_global_edge(item_scope, parent, parent_edge, Visibility::Private); + self.add_global_edge(item_scope, parent, parent_edge); } self.module_stack.pop().unwrap(); @@ -92,36 +83,26 @@ impl<'db> ScopeGraphBuilder<'db> { } let parent_scope = *self.scope_stack.last().unwrap(); - let (parent_to_child_edge, vis) = match item { + let parent_to_child_edge = match item { Mod(inner) => { self.add_local_edge( item_scope, *self.module_stack.last().unwrap(), EdgeKind::super_(), - inner.vis(self.db), ); self.add_global_edge( item_scope, self.top_mod.ingot_root(self.db), EdgeKind::ingot(), - Visibility::Private, - ); - self.add_local_edge( - item_scope, - item_scope, - EdgeKind::self_(), - Visibility::Private, ); + self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); self.module_stack.pop().unwrap(); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::mod_) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::mod_) + .unwrap_or_else(EdgeKind::anon) } Func(inner) => { @@ -130,150 +111,115 @@ impl<'db> ScopeGraphBuilder<'db> { if let Some(params) = inner.params(self.db).to_opt() { self.add_func_param_scope(item_scope, params); } - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::value) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) } Struct(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_field_scope(item_scope, inner.fields(self.db)); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon), - Visibility::Private, - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) } Contract(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_field_scope(item_scope, inner.fields(self.db)); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) } Enum(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_variant_scope(item_scope, inner.variants(self.db)); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) } TypeAlias(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::type_) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) } Impl(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge( - item_scope, - item_scope, - EdgeKind::self_ty(), - Visibility::Private, - ); - (EdgeKind::anon(), Visibility::Private) + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + EdgeKind::anon() } Trait(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge( - item_scope, - item_scope, - EdgeKind::self_ty(), - Visibility::Private, - ); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::trait_) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::trait_) + .unwrap_or_else(EdgeKind::anon) } ImplTrait(inner) => { self.add_lex_edge(item_scope, parent_scope); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge( - item_scope, - item_scope, - EdgeKind::self_ty(), - Visibility::Private, - ); - (EdgeKind::anon(), Visibility::Private) + self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + EdgeKind::anon() } Const(inner) => { self.add_lex_edge(item_scope, parent_scope); - ( - inner - .name(self.db) - .to_opt() - .map(EdgeKind::value) - .unwrap_or_else(EdgeKind::anon), - inner.vis(self.db), - ) + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) } Use(use_) => { - self.graph - .unresolved_imports - .entry(parent_scope) - .or_default() - .push(use_); + let import_map = if use_.vis(self.db).is_pub() { + &mut self.graph.unresolved_exports + } else { + &mut self.graph.unresolved_imports + }; + import_map.entry(parent_scope).or_default().push(use_); self.add_lex_edge(item_scope, parent_scope); - (EdgeKind::anon(), use_.vis(self.db)) + EdgeKind::anon() } Body(_) => { self.add_lex_edge(item_scope, parent_scope); - (EdgeKind::anon(), Visibility::Private) + EdgeKind::anon() } _ => unreachable!(), }; - self.add_local_edge(parent_scope, item_scope, parent_to_child_edge, vis); + self.add_local_edge(parent_scope, item_scope, parent_to_child_edge); } fn add_field_scope(&mut self, current_scope: LocalScopeId, fields: RecordFieldListId) { for (i, field) in fields.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::Field(i), self.parent_module_id()); + let scope = LocalScope::new(ScopeKind::Field(i), self.parent_module_id(), field.vis); let field_scope = self.graph.scopes.push(scope); self.add_lex_edge(field_scope, current_scope); let kind = field @@ -281,13 +227,17 @@ impl<'db> ScopeGraphBuilder<'db> { .to_opt() .map(EdgeKind::field) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, field_scope, kind, field.vis) + self.add_local_edge(current_scope, field_scope, kind) } } fn add_variant_scope(&mut self, current_scope: LocalScopeId, variants: EnumVariantListId) { for (i, field) in variants.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::Variant(i), self.parent_module_id()); + let scope = LocalScope::new( + ScopeKind::Variant(i), + self.parent_module_id(), + Visibility::Public, + ); let variant_scope = self.graph.scopes.push(scope); self.add_lex_edge(variant_scope, current_scope); let kind = field @@ -295,13 +245,17 @@ impl<'db> ScopeGraphBuilder<'db> { .to_opt() .map(EdgeKind::variant) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, variant_scope, kind, Visibility::Public) + self.add_local_edge(current_scope, variant_scope, kind) } } fn add_func_param_scope(&mut self, current_scope: LocalScopeId, params: FnParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::FnParam(i), self.parent_module_id()); + let scope = LocalScope::new( + ScopeKind::FnParam(i), + self.parent_module_id(), + Visibility::Private, + ); let generic_param_scope = self.graph.scopes.push(scope); self.add_lex_edge(generic_param_scope, current_scope); let kind = param @@ -313,18 +267,17 @@ impl<'db> ScopeGraphBuilder<'db> { FnParamName::Underscore => EdgeKind::anon(), }) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge( - current_scope, - generic_param_scope, - kind, - Visibility::Private, - ) + self.add_local_edge(current_scope, generic_param_scope, kind) } } fn add_generic_param_scope(&mut self, current_scope: LocalScopeId, params: GenericParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::GenericParam(i), self.parent_module_id()); + let scope = LocalScope::new( + ScopeKind::GenericParam(i), + self.parent_module_id(), + Visibility::Private, + ); let generic_param_scope = self.graph.scopes.push(scope); self.add_lex_edge(generic_param_scope, current_scope); let kind = param @@ -332,12 +285,7 @@ impl<'db> ScopeGraphBuilder<'db> { .to_opt() .map(EdgeKind::generic_param) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge( - current_scope, - generic_param_scope, - kind, - Visibility::Private, - ) + self.add_local_edge(current_scope, generic_param_scope, kind) } } @@ -346,48 +294,35 @@ impl<'db> ScopeGraphBuilder<'db> { kind: ScopeKind::Item(self.top_mod.into()), edges: Vec::new(), parent_module: self.parent_module_id(), + vis: Visibility::Public, } } fn parent_module_id(&self) -> Option { if let Some(id) = self.module_stack.last() { Some(ScopeId::new(self.top_mod, *id)) - } else if let Some(top_mod) = self.top_mod.parent(self.db) { - Some(ScopeId::new(top_mod, LocalScopeId::root())) } else { - None + self.top_mod + .parent(self.db) + .map(|top_mod| ScopeId::new(top_mod, LocalScopeId::root())) } } - fn add_local_edge( - &mut self, - source: LocalScopeId, - dest: LocalScopeId, - kind: EdgeKind, - vis: Visibility, - ) { + fn add_local_edge(&mut self, source: LocalScopeId, dest: LocalScopeId, kind: EdgeKind) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(self.top_mod, dest), kind, - vis, }); } fn add_lex_edge(&mut self, source: LocalScopeId, dest: LocalScopeId) { - self.add_local_edge(source, dest, EdgeKind::lex(), Visibility::Private); + self.add_local_edge(source, dest, EdgeKind::lex()); } - fn add_global_edge( - &mut self, - source: LocalScopeId, - dest: TopLevelMod, - kind: EdgeKind, - vis: Visibility, - ) { + fn add_global_edge(&mut self, source: LocalScopeId, dest: TopLevelMod, kind: EdgeKind) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(dest, LocalScopeId::root()), kind, - vis, }); } } From 637211b97feba058fc5984bfe9742f41d610ac24 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 1 May 2023 17:53:19 +0200 Subject: [PATCH 146/678] Add proc macro to generate prefilled keywords --- Cargo.lock | 11 ++ .../src/name_resolution/name_resolver.rs | 57 +++++++-- .../src/name_resolution/visibility_checker.rs | 13 +- crates/hir/Cargo.toml | 5 +- crates/hir/src/hir_def/ident.rs | 45 +++++++ crates/hir/src/hir_def/mod.rs | 43 +------ crates/hir/src/hir_def/scope_graph.rs | 6 +- crates/hir/src/lib.rs | 22 +++- crates/hir/src/lower/path.rs | 10 +- crates/hir/src/lower/use_tree.rs | 8 +- crates/macros/Cargo.toml | 17 +++ crates/macros/src/kw.rs | 111 ++++++++++++++++++ crates/macros/src/lib.rs | 12 ++ 13 files changed, 286 insertions(+), 74 deletions(-) create mode 100644 crates/hir/src/hir_def/ident.rs create mode 100644 crates/macros/Cargo.toml create mode 100644 crates/macros/src/kw.rs create mode 100644 crates/macros/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index d52f40ca28..a4c8864a23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -910,6 +910,7 @@ dependencies = [ "cranelift-entity", "derive_more", "fe-common2", + "fe-macros", "fe-parser2", "num-bigint", "num-traits", @@ -937,6 +938,16 @@ dependencies = [ "include_dir", ] +[[package]] +name = "fe-macros" +version = "0.1.1" +dependencies = [ + "glob", + "proc-macro2", + "quote", + "syn 2.0.15", +] + [[package]] name = "fe-mir" version = "0.22.0" diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 5db3463572..dc68a2c566 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -25,10 +25,17 @@ pub struct NameResolver<'db, 'a> { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ResolvedPath { FullResolved(ScopeId), + + /// The path is partially resolved; this means that the segments from + /// `unresolved_from` depend on a type. + /// These unresolved parts are resolved in the later type inference and + /// trait solving phases. PartialResolved { resolved: ScopeId, unresolved_from: usize, }, + + /// The path resolution failed at the given segment. Failed { failed_at: usize, cause: NameResolutionFailure, @@ -71,7 +78,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new(edge.dest, None)); + results.push(ResolvedName::scope(edge.dest, None)); } } @@ -89,7 +96,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new( + results.push(ResolvedName::scope( edge.dest, Some(named_import.span.clone()), )); @@ -110,7 +117,10 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(self.db, query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(ResolvedName::new(edge.dest, Some(glob_import.span.clone()))); + results.push(ResolvedName::scope( + edge.dest, + Some(glob_import.span.clone()), + )); } } PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} @@ -132,7 +142,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if query.domain == NameContext::Item { for (name, root_mod) in scope.top_mod.external_ingots(self.db.upcast()) { if *name == query.name { - results.push(ResolvedName::new(ScopeId::root(*root_mod), None)); + results.push(ResolvedName::scope(ScopeId::root(*root_mod), None)); } } // Ensure that all names of external ingots don't conflict with each other. @@ -165,20 +175,47 @@ pub struct NameQuery { } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ResolvedName { - pub scope: ScopeId, - pub import_span: Option, +pub enum ResolvedName { + Builtin(BuiltinName), + Scope { + scope: ScopeId, + import_span: Option, + }, +} + +#[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] +pub enum BuiltinName { + Bool, + U8, + U16, + U32, + U64, + U128, + U256, + I8, + I16, + I32, + I64, + I128, + I256, } impl ResolvedName { - pub fn new(scope: ScopeId, import_span: Option) -> Self { - Self { scope, import_span } + pub fn scope(scope: ScopeId, import_span: Option) -> Self { + Self::Scope { scope, import_span } + } + + pub fn builtin(builtin: BuiltinName) -> Self { + Self::Builtin(builtin) } } impl ResolvedName { pub fn is_valid(&self) -> bool { - self.scope.is_valid() + match self { + Self::Scope { scope, .. } => scope.is_valid(), + Self::Builtin(_) => true, + } } } diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index a5949ac9dd..e63242f0a0 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -14,19 +14,24 @@ pub fn check_visibility( ref_scope: ScopeId, resolved: &ResolvedName, ) -> bool { + let ResolvedName::Scope{scope, .. } = resolved else { + // If resolved is a builtin name, then it's always visible . + return true; + }; + // If resolved is public, then it is visible. - if resolved.scope.data(db.upcast()).vis.is_pub() { + if scope.data(db.upcast()).vis.is_pub() { return true; } - let Some(def_scope) = (match resolved.scope.kind(db.upcast()) { + let Some(def_scope) = (match scope.kind(db.upcast()) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. ScopeKind::Field(_) => { - resolved.scope.parent(db.upcast()).and_then(|scope| scope.parent(db.upcast())) + scope.parent(db.upcast()).and_then(|scope| scope.parent(db.upcast())) }, _ => { - resolved.scope.parent(db.upcast()) + scope.parent(db.upcast()) } }) else { return false; diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index cfb4fdac8c..4bed6f466f 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -17,7 +17,8 @@ num-bigint = "0.4.3" num-traits = "0.2.15" camino = "1.1.4" rustc-hash = "1.1.0" +smallvec = "1.10.0" -parser = { path = "../parser2", package = "fe-parser2" } common = { path = "../common2", package = "fe-common2" } -smallvec = "1.10.0" +parser = { path = "../parser2", package = "fe-parser2" } +macros = { path = "../macros", package = "fe-macros" } diff --git a/crates/hir/src/hir_def/ident.rs b/crates/hir/src/hir_def/ident.rs new file mode 100644 index 0000000000..810c7a82aa --- /dev/null +++ b/crates/hir/src/hir_def/ident.rs @@ -0,0 +1,45 @@ +#[salsa::interned] +pub struct IdentId { + data: String, +} +impl IdentId { + pub fn is_super(self) -> bool { + self == kw::SUPER + } + + pub fn is_ingot(self) -> bool { + self == kw::INGOT + } + + pub fn is_self(self) -> bool { + self == kw::SELF + } + + pub fn is_self_ty(self) -> bool { + self == kw::SELF_TY + } +} + +pub mod kw { + use macros::define_keywords; + + define_keywords! { + (INGOT, "ingot"), + (SUPER, "super"), + (SELF, "self"), + (SELF_TY, "Self"), + (BOOL, "bool"), + (U8, "u8"), + (U16, "u16"), + (U32, "u32"), + (U64, "u64"), + (U128, "u128"), + (U256, "u256"), + (I8, "i8"), + (I16, "i16"), + (I32, "i32"), + (I64, "i64"), + (I128, "i128"), + (I256, "i256"), + } +} diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 8b9c10681b..4d739f5eec 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -1,6 +1,7 @@ pub mod attr; pub mod body; pub mod expr; +pub mod ident; pub mod item; pub mod params; pub mod pat; @@ -15,6 +16,7 @@ pub(crate) mod module_tree; pub use attr::*; pub use body::*; pub use expr::*; +pub use ident::*; pub use item::*; use num_bigint::BigUint; pub use params::*; @@ -26,47 +28,6 @@ pub use use_tree::*; pub use module_tree::*; -use crate::HirDb; - -#[salsa::interned] -pub struct IdentId { - data: String, -} -// TODO: Keyword should be prefilled in the database. -// ref: https://github.com/salsa-rs/salsa/pull/440 -impl IdentId { - pub fn is_super(self, db: &dyn HirDb) -> bool { - self == Self::super_kw(db) - } - - pub fn is_ingot(self, db: &dyn HirDb) -> bool { - self == Self::ingot_kw(db) - } - - pub fn is_self(self, db: &dyn HirDb) -> bool { - self == Self::self_kw(db) - } - - pub fn is_self_ty(self, db: &dyn HirDb) -> bool { - self == Self::self_ty_kw(db) - } - pub fn super_kw(db: &dyn HirDb) -> Self { - IdentId::new(db, "super".to_string()) - } - - pub fn ingot_kw(db: &dyn HirDb) -> Self { - IdentId::new(db, "ingot".to_string()) - } - - pub fn self_kw(db: &dyn HirDb) -> Self { - IdentId::new(db, "self".to_string()) - } - - pub fn self_ty_kw(db: &dyn HirDb) -> Self { - IdentId::new(db, "Self".to_string()) - } -} - #[salsa::interned] pub struct IntegerId { #[return_ref] diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 62dd979567..04645efe3a 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,7 +1,7 @@ use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::{span::DynLazySpan, HirDb}; +use crate::HirDb; use super::{IdentId, ItemKind, TopLevelMod, Use, Visibility}; @@ -122,10 +122,6 @@ impl ScopeId { Self::new(top_mod, LocalScopeId::root()) } - pub fn span(self, _db: &dyn HirDb) -> DynLazySpan { - todo!() - } - pub fn invalid() -> Self { Self { top_mod: TopLevelMod::invalid(), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index fc15887c55..57901dbab8 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,7 +1,6 @@ use common::{InputDb, InputIngot, Upcast}; use hir_def::{module_tree_impl, IdentId, TopLevelMod}; pub use lower::parse::ParseDiagnostic; - use lower::{ map_file_to_mod_impl, parse::{parse_file_impl, ParseDiagnosticAccumulator}, @@ -76,7 +75,14 @@ pub(crate) fn external_ingots_impl( res } -pub trait HirDb: salsa::DbWithJar + InputDb + Upcast {} +pub trait HirDb: salsa::DbWithJar + InputDb + Upcast { + fn prefill(&self) + where + Self: Sized, + { + IdentId::prefill(self) + } +} impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} /// `LowerHirDb` is a marker trait for lowering AST to HIR items. @@ -101,6 +107,7 @@ pub trait SpannedHirDb: HirDb + Upcast {} #[cfg(test)] mod test_db { + use super::HirDb; use std::collections::BTreeSet; use common::{ @@ -115,11 +122,20 @@ mod test_db { LowerHirDb, SpannedHirDb, }; - #[derive(Default)] #[salsa::db(common::Jar, crate::Jar)] pub(crate) struct TestDb { storage: salsa::Storage, } + + impl Default for TestDb { + fn default() -> Self { + let db = Self { + storage: Default::default(), + }; + db.prefill(); + db + } + } impl SpannedHirDb for TestDb {} impl LowerHirDb for TestDb {} impl salsa::Database for TestDb { diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs index f32b990752..4cf07d9a7a 100644 --- a/crates/hir/src/lower/path.rs +++ b/crates/hir/src/lower/path.rs @@ -1,6 +1,6 @@ use parser::{ast, SyntaxToken}; -use crate::hir_def::{IdentId, Partial, PathId}; +use crate::hir_def::{kw, IdentId, Partial, PathId}; use super::FileLowerCtxt; @@ -9,10 +9,10 @@ impl PathId { let mut segments = Vec::new(); for seg in ast.into_iter() { let segment = match seg.kind() { - Some(ast::PathSegmentKind::Ingot(_)) => Some(IdentId::ingot_kw(ctxt.db())), - Some(ast::PathSegmentKind::Super(_)) => Some(IdentId::super_kw(ctxt.db())), - Some(ast::PathSegmentKind::SelfTy(_)) => Some(IdentId::self_ty_kw(ctxt.db())), - Some(ast::PathSegmentKind::Self_(_)) => Some(IdentId::self_kw(ctxt.db())), + Some(ast::PathSegmentKind::Ingot(_)) => Some(kw::INGOT), + Some(ast::PathSegmentKind::Super(_)) => Some(kw::SUPER), + Some(ast::PathSegmentKind::SelfTy(_)) => Some(kw::SELF_TY), + Some(ast::PathSegmentKind::Self_(_)) => Some(kw::SELF), Some(ast::PathSegmentKind::Ident(ident)) => Some(IdentId::lower_token(ctxt, ident)), None => None, } diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 7d0ac3d66f..13459a6feb 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -1,6 +1,6 @@ use parser::ast; -use crate::hir_def::{use_tree::*, IdentId, Partial}; +use crate::hir_def::{kw, use_tree::*, IdentId, Partial}; use super::FileLowerCtxt; @@ -43,12 +43,12 @@ impl UsePathSegment { ) -> Partial { ast.kind() .map(|kind| match kind { - ast::UsePathSegmentKind::Ingot(_) => Self::Ident(IdentId::ingot_kw(ctxt.db())), - ast::UsePathSegmentKind::Super(_) => Self::Ident(IdentId::super_kw(ctxt.db())), + ast::UsePathSegmentKind::Ingot(_) => Self::Ident(kw::INGOT), + ast::UsePathSegmentKind::Super(_) => Self::Ident(kw::SUPER), ast::UsePathSegmentKind::Ident(ident) => { Self::Ident(IdentId::lower_token(ctxt, ident)) } - ast::UsePathSegmentKind::Self_(_) => Self::Ident(IdentId::self_kw(ctxt.db())), + ast::UsePathSegmentKind::Self_(_) => Self::Ident(kw::SELF), ast::UsePathSegmentKind::Glob(_) => Self::Glob, }) .into() diff --git a/crates/macros/Cargo.toml b/crates/macros/Cargo.toml new file mode 100644 index 0000000000..820f28f5e0 --- /dev/null +++ b/crates/macros/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "fe-macros" +authors = ["The Fe Project Developers"] +version = "0.1.1" +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides procudural macros for Fe lang." + +[lib] +proc_macro = true + +[dependencies] +syn = { version = "2.0", features = ["full"] } +proc-macro2 = "1.0" +quote = "1.0" +glob = "0.3" diff --git a/crates/macros/src/kw.rs b/crates/macros/src/kw.rs new file mode 100644 index 0000000000..dd82ac788d --- /dev/null +++ b/crates/macros/src/kw.rs @@ -0,0 +1,111 @@ +use std::collections::HashSet; + +use proc_macro2::TokenStream; +use quote::quote; + +use super::{Error, Result}; + +pub(super) fn define_keywords(attrs: proc_macro::TokenStream) -> Result { + let args = syn::parse::(attrs)?; + + let definer = KeywordDefiner { args }; + Ok(definer.build().into()) +} + +struct KeywordDefiner { + args: Args, +} + +impl KeywordDefiner { + fn build(self) -> TokenStream { + let keywords = self.define_keywords(); + let prefill_method = self.define_prefill_method(); + quote! { + #keywords + #prefill_method + } + } + + fn define_keywords(&self) -> TokenStream { + let mut stream = vec![]; + for (i, (kw, _)) in self.args.0.iter().enumerate() { + let ident_id = Self::ident_id(i); + stream.push(quote!( + pub const #kw: crate::hir_def::ident::IdentId = #ident_id; + )) + } + + quote! { + #(#stream)* + } + } + + fn define_prefill_method(&self) -> TokenStream { + let mut prefills = vec![]; + for (kw, kw_str) in self.args.0.iter() { + let kw_str = kw_str.value(); + prefills.push(quote!( + let generated_kw = crate::hir_def::ident::IdentId::new(db, #kw_str.to_string()); + assert_eq!(generated_kw, #kw); + )); + } + + quote! { + impl crate::hir_def::ident::IdentId { + pub fn prefill(db: &dyn crate::HirDb) { + #(#prefills)* + } + } + } + } + + fn ident_id(index: usize) -> TokenStream { + quote! { + crate::hir_def::ident::IdentId(::salsa::Id::from_u32((#index) as u32)) + } + } +} + +struct Args(Vec<(syn::Ident, syn::LitStr)>); +impl syn::parse::Parse for Args { + fn parse(input: syn::parse::ParseStream) -> Result { + let mut seen_kws = HashSet::new(); + let mut seen_kw_str = HashSet::new(); + let mut kws = vec![]; + + while !input.is_empty() { + let keyword; + syn::parenthesized!(keyword in input); + let kw = keyword.parse::()?; + keyword.parse::()?; + let kw_str = keyword.parse::()?; + + if !seen_kws.insert(kw.to_string()) { + return Err(Error::new_spanned( + kw.clone(), + format!("duplicated keyword `{kw}"), + )); + } + if !seen_kw_str.insert(kw_str.value()) { + return Err(Error::new_spanned( + kw_str.clone(), + format!("duplicated keyword string `{}`", kw_str.value()), + )); + } + kws.push((kw, kw_str)); + + if input.parse::().is_err() { + break; + } + } + + if !input.is_empty() { + return Err(Error::new_spanned( + input.parse::()?, + "unexpected token", + )); + } + + Ok(Args(kws)) + } +} diff --git a/crates/macros/src/lib.rs b/crates/macros/src/lib.rs new file mode 100644 index 0000000000..e8894e362d --- /dev/null +++ b/crates/macros/src/lib.rs @@ -0,0 +1,12 @@ +mod kw; + +#[proc_macro] +pub fn define_keywords(attrs: proc_macro::TokenStream) -> proc_macro::TokenStream { + match kw::define_keywords(attrs) { + Ok(tokens) => tokens, + Err(e) => e.to_compile_error().into(), + } +} + +type Error = syn::Error; +type Result = syn::Result; From 5a0c25c5ef88d05764511e06dfab232a775073cc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 1 May 2023 17:54:04 +0200 Subject: [PATCH 147/678] Integrate builtin types into name resolution --- Cargo.lock | 5 +- crates/hir-analysis/Cargo.toml | 1 + .../src/name_resolution/name_resolver.rs | 169 +++++++++++------- 3 files changed, 112 insertions(+), 63 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a4c8864a23..0420227122 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -926,6 +926,7 @@ dependencies = [ "either", "fe-common", "fe-hir", + "fe-macros", "rustc-hash", "salsa-2022", "smallvec", @@ -2126,7 +2127,7 @@ dependencies = [ [[package]] name = "salsa-2022" version = "0.1.0" -source = "git+https://github.com/salsa-rs/salsa#67d290dc26f0026d93982f2611b1dc2f6058ebd4" +source = "git+https://github.com/salsa-rs/salsa#d4a94fbf07bb837f3d9d0a4caa5db4d5db29243f" dependencies = [ "arc-swap", "crossbeam", @@ -2144,7 +2145,7 @@ dependencies = [ [[package]] name = "salsa-2022-macros" version = "0.1.0" -source = "git+https://github.com/salsa-rs/salsa#67d290dc26f0026d93982f2611b1dc2f6058ebd4" +source = "git+https://github.com/salsa-rs/salsa#d4a94fbf07bb837f3d9d0a4caa5db4d5db29243f" dependencies = [ "eyre", "heck 0.4.1", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 3d4f3d2ca4..a95b5d232b 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -15,3 +15,4 @@ either = "1.8" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common", package = "fe-common" } +macros = { path = "../macros", package = "fe-macros" } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index dc68a2c566..8dd2ff7f56 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -1,6 +1,7 @@ use either::Either; use hir::{ hir_def::{ + kw, scope_graph::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, @@ -26,8 +27,8 @@ pub struct NameResolver<'db, 'a> { pub enum ResolvedPath { FullResolved(ScopeId), - /// The path is partially resolved; this means that the segments from - /// `unresolved_from` depend on a type. + /// The path is partially resolved; this means that the `resolved` is a type + /// and the following segments depend on type to resolve. /// These unresolved parts are resolved in the later type inference and /// trait solving phases. PartialResolved { @@ -66,34 +67,34 @@ impl<'db, 'a> NameResolver<'db, 'a> { // The shadowing rule is // `$ = NamedImports > GlobImports > Lex > external ingot > builtin types`, - // where `$` means current scope. This ordering means that - // greater scope shadows lower scopes having the same name in the same - // domain and + // where `$` means current scope. + // This ordering means that greater one shadows lower ones having the same name + // in the same domain. - // 1. Look for the name in the current scope and named imports. let mut results = Vec::new(); - let mut found_scopes = FxHashSet::default(); let mut parent = None; + // 1. Look for the name in the current scope and named imports. + let mut found_scopes = FxHashSet::default(); for edge in self.edges(scope) { - match edge.kind.propagate(self.db, query) { + match edge.kind.propagate(query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { results.push(ResolvedName::scope(edge.dest, None)); } } - PropagatedQuery::Continuation => { + PropagatedQuery::Continuation if query.option.allow_lex => { debug_assert!(parent.is_none()); parent = Some(edge.dest); } - PropagatedQuery::UnPropagated => {} + _ => {} } } for named_import in self.importer.named_imports(scope) { let edge = &named_import.data; - match edge.kind.propagate(self.db, query) { + match edge.kind.propagate(query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { results.push(ResolvedName::scope( @@ -114,7 +115,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 2. Look for the name in the glob imports. for glob_import in self.importer.glob_imports(scope) { let edge = &glob_import.data; - match edge.kind.propagate(self.db, query) { + match edge.kind.propagate(query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { results.push(ResolvedName::scope( @@ -155,7 +156,9 @@ impl<'db, 'a> NameResolver<'db, 'a> { } // 5. Look for the name in the builtin types. - // TODO: Think about how to handle builtin types. + if let Some(builtin) = BuiltinName::lookup_for(query.name) { + results.push(ResolvedName::Builtin(builtin)); + } self.cache_store .cache_resolved(scope, query, results.clone()); @@ -172,6 +175,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { pub struct NameQuery { name: IdentId, domain: NameContext, + option: QueryOption, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -183,6 +187,45 @@ pub enum ResolvedName { }, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct QueryOption { + allow_lex: bool, +} + +impl QueryOption { + pub fn new() -> Self { + Self { allow_lex: true } + } + + pub fn disallow_lex(&mut self) -> &mut Self { + self.allow_lex = false; + self + } +} + +impl ResolvedName { + pub fn scope(scope: ScopeId, import_span: Option) -> Self { + Self::Scope { scope, import_span } + } + + pub fn builtin(builtin: BuiltinName) -> Self { + Self::Builtin(builtin) + } + + pub fn is_valid(&self) -> bool { + match self { + Self::Scope { scope, .. } => scope.is_valid(), + Self::Builtin(_) => true, + } + } +} + +impl Default for QueryOption { + fn default() -> Self { + Self::new() + } +} + #[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] pub enum BuiltinName { Bool, @@ -200,22 +243,26 @@ pub enum BuiltinName { I256, } -impl ResolvedName { - pub fn scope(scope: ScopeId, import_span: Option) -> Self { - Self::Scope { scope, import_span } - } - - pub fn builtin(builtin: BuiltinName) -> Self { - Self::Builtin(builtin) - } -} - -impl ResolvedName { - pub fn is_valid(&self) -> bool { - match self { - Self::Scope { scope, .. } => scope.is_valid(), - Self::Builtin(_) => true, - } +impl BuiltinName { + /// Returns the builtin name if the `name` is a builtin name. + pub fn lookup_for(name: IdentId) -> Option { + match name { + kw::BOOL => Self::Bool, + kw::U8 => Self::U8, + kw::U16 => Self::U16, + kw::U32 => Self::U32, + kw::U64 => Self::U64, + kw::U128 => Self::U128, + kw::U256 => Self::U256, + kw::I8 => Self::I8, + kw::I16 => Self::I16, + kw::I32 => Self::I32, + kw::I64 => Self::I64, + kw::I128 => Self::I128, + kw::I256 => Self::I256, + _ => return None, + } + .into() } } @@ -260,7 +307,7 @@ pub enum NameContext { trait QueryPropagator { // TODO: `db` is not necessary if we implement prefilled keywords. - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery; + fn propagate(&self, query: NameQuery) -> PropagatedQuery; } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -271,13 +318,13 @@ enum PropagatedQuery { } impl QueryPropagator for LexEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + fn propagate(&self, _query: NameQuery) -> PropagatedQuery { PropagatedQuery::Continuation } } impl QueryPropagator for ModEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { match query.domain { NameContext::Item if self.0 == query.name => PropagatedQuery::Terminated, _ => PropagatedQuery::UnPropagated, @@ -286,7 +333,7 @@ impl QueryPropagator for ModEdge { } impl QueryPropagator for TypeEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Type) { return PropagatedQuery::UnPropagated; } @@ -300,7 +347,7 @@ impl QueryPropagator for TypeEdge { } impl QueryPropagator for TraitEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Type) { return PropagatedQuery::UnPropagated; } @@ -314,7 +361,7 @@ impl QueryPropagator for TraitEdge { } impl QueryPropagator for ValueEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Value) { return PropagatedQuery::UnPropagated; } @@ -328,7 +375,7 @@ impl QueryPropagator for ValueEdge { } impl QueryPropagator for GenericParamEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Type) { return PropagatedQuery::UnPropagated; } @@ -342,7 +389,7 @@ impl QueryPropagator for GenericParamEdge { } impl QueryPropagator for FieldEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Field) { return PropagatedQuery::UnPropagated; } @@ -356,7 +403,7 @@ impl QueryPropagator for FieldEdge { } impl QueryPropagator for VariantEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Variant) { return PropagatedQuery::UnPropagated; } @@ -370,12 +417,12 @@ impl QueryPropagator for VariantEdge { } impl QueryPropagator for SuperEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item) { return PropagatedQuery::UnPropagated; } - if query.name.is_super(db.upcast()) { + if query.name.is_super() { PropagatedQuery::Terminated } else { PropagatedQuery::UnPropagated @@ -384,12 +431,12 @@ impl QueryPropagator for SuperEdge { } impl QueryPropagator for IngotEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item) { return PropagatedQuery::UnPropagated; } - if query.name.is_ingot(db.upcast()) { + if query.name.is_ingot() { PropagatedQuery::Terminated } else { PropagatedQuery::UnPropagated @@ -398,12 +445,12 @@ impl QueryPropagator for IngotEdge { } impl QueryPropagator for SelfTyEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Type) { return PropagatedQuery::UnPropagated; } - if query.name.is_self_ty(db.upcast()) { + if query.name.is_self_ty() { PropagatedQuery::Terminated } else { PropagatedQuery::UnPropagated @@ -412,12 +459,12 @@ impl QueryPropagator for SelfTyEdge { } impl QueryPropagator for SelfEdge { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { if !matches!(query.domain, NameContext::Item | NameContext::Value) { return PropagatedQuery::UnPropagated; } - if query.name.is_self(db.upcast()) { + if query.name.is_self() { PropagatedQuery::Terminated } else { PropagatedQuery::UnPropagated @@ -426,27 +473,27 @@ impl QueryPropagator for SelfEdge { } impl QueryPropagator for AnonEdge { - fn propagate(&self, _db: &dyn HirAnalysisDb, _query: NameQuery) -> PropagatedQuery { + fn propagate(&self, _query: NameQuery) -> PropagatedQuery { PropagatedQuery::UnPropagated } } impl QueryPropagator for EdgeKind { - fn propagate(&self, db: &dyn HirAnalysisDb, query: NameQuery) -> PropagatedQuery { + fn propagate(&self, query: NameQuery) -> PropagatedQuery { match self { - EdgeKind::Lex(edge) => edge.propagate(db, query), - EdgeKind::Mod(edge) => edge.propagate(db, query), - EdgeKind::Type(edge) => edge.propagate(db, query), - EdgeKind::Trait(edge) => edge.propagate(db, query), - EdgeKind::GenericParam(edge) => edge.propagate(db, query), - EdgeKind::Value(edge) => edge.propagate(db, query), - EdgeKind::Field(edge) => edge.propagate(db, query), - EdgeKind::Variant(edge) => edge.propagate(db, query), - EdgeKind::Super(edge) => edge.propagate(db, query), - EdgeKind::Ingot(edge) => edge.propagate(db, query), - EdgeKind::Self_(edge) => edge.propagate(db, query), - EdgeKind::SelfTy(edge) => edge.propagate(db, query), - EdgeKind::Anon(edge) => edge.propagate(db, query), + EdgeKind::Lex(edge) => edge.propagate(query), + EdgeKind::Mod(edge) => edge.propagate(query), + EdgeKind::Type(edge) => edge.propagate(query), + EdgeKind::Trait(edge) => edge.propagate(query), + EdgeKind::GenericParam(edge) => edge.propagate(query), + EdgeKind::Value(edge) => edge.propagate(query), + EdgeKind::Field(edge) => edge.propagate(query), + EdgeKind::Variant(edge) => edge.propagate(query), + EdgeKind::Super(edge) => edge.propagate(query), + EdgeKind::Ingot(edge) => edge.propagate(query), + EdgeKind::Self_(edge) => edge.propagate(query), + EdgeKind::SelfTy(edge) => edge.propagate(query), + EdgeKind::Anon(edge) => edge.propagate(query), } } } From cf252f62de1d2f24e5092a5eeff7fc42bb7e7fb1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 3 May 2023 00:58:00 +0200 Subject: [PATCH 148/678] Implement path resolution --- Cargo.lock | 1 + crates/analyzer/benches/bench.rs | 3 +- crates/analyzer/src/context.rs | 37 +- crates/analyzer/src/db.rs | 24 +- crates/analyzer/src/db/queries/contracts.rs | 22 +- crates/analyzer/src/db/queries/functions.rs | 33 +- crates/analyzer/src/db/queries/impls.rs | 15 +- crates/analyzer/src/db/queries/structs.rs | 31 +- crates/analyzer/src/db/queries/traits.rs | 17 +- crates/analyzer/src/db/queries/types.rs | 20 +- crates/analyzer/src/errors.rs | 6 +- crates/analyzer/src/namespace/items.rs | 38 +- crates/analyzer/src/namespace/scopes.rs | 33 +- crates/analyzer/src/namespace/types.rs | 22 +- crates/analyzer/src/operations.rs | 8 +- crates/analyzer/src/traversal/assignments.rs | 23 +- crates/analyzer/src/traversal/borrowck.rs | 12 +- crates/analyzer/src/traversal/call_args.rs | 22 +- crates/analyzer/src/traversal/declarations.rs | 19 +- crates/analyzer/src/traversal/expressions.rs | 49 +- crates/analyzer/src/traversal/functions.rs | 29 +- crates/analyzer/src/traversal/pragma.rs | 3 +- crates/analyzer/src/traversal/types.rs | 36 +- crates/analyzer/src/traversal/utils.rs | 15 +- crates/analyzer/tests/analysis.rs | 20 +- crates/analyzer/tests/errors.rs | 9 +- crates/codegen/src/yul/isel/function.rs | 4 +- crates/common/src/diagnostics.rs | 14 +- crates/common/src/files.rs | 3 +- crates/common/src/span.rs | 8 +- crates/driver/src/lib.rs | 4 +- crates/fe/src/task/build.rs | 11 +- crates/hir-analysis/Cargo.toml | 1 + .../src/name_resolution/import_resolver.rs | 26 +- .../src/name_resolution/name_resolver.rs | 628 +++++++++++++----- .../src/name_resolution/visibility_checker.rs | 16 +- crates/hir/src/hir_def/item.rs | 15 +- crates/hir/src/hir_def/path.rs | 2 +- crates/hir/src/hir_def/scope_graph.rs | 27 +- crates/mir/src/db/queries/function.rs | 8 +- crates/mir/src/ir/function.rs | 3 +- crates/mir/src/ir/types.rs | 3 +- crates/parser/src/ast.rs | 7 +- crates/parser/src/grammar/contracts.rs | 14 +- crates/parser/src/grammar/expressions.rs | 8 +- crates/parser/src/grammar/functions.rs | 17 +- crates/parser/src/grammar/module.rs | 22 +- crates/parser/src/grammar/types.rs | 17 +- crates/parser/src/lexer/token.rs | 3 +- crates/parser/src/lib.rs | 3 +- crates/parser/src/parser.rs | 14 +- crates/parser/tests/cases/errors.rs | 10 +- crates/parser/tests/cases/parse_ast.rs | 16 +- crates/parser/tests/cases/print_ast.rs | 4 +- crates/test-utils/src/lib.rs | 13 +- crates/tests-legacy/src/ingots.rs | 5 +- 56 files changed, 939 insertions(+), 534 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0420227122..0225c30a38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -923,6 +923,7 @@ dependencies = [ name = "fe-hir-analysis" version = "0.20.0-alpha" dependencies = [ + "derive_more", "either", "fe-common", "fe-hir", diff --git a/crates/analyzer/benches/bench.rs b/crates/analyzer/benches/bench.rs index b9403a89a9..09f5e734d7 100644 --- a/crates/analyzer/benches/bench.rs +++ b/crates/analyzer/benches/bench.rs @@ -1,6 +1,5 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; -use fe_analyzer::namespace::items::ModuleId; -use fe_analyzer::TestDb; +use fe_analyzer::{namespace::items::ModuleId, TestDb}; fn criterion_benchmark(c: &mut Criterion) { let path = "demos/uniswap.fe"; diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 80ca7da587..2aa7d57bd6 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -5,33 +5,34 @@ use crate::{ pattern_analysis::PatternMatrix, }; -use crate::namespace::items::{ - ContractId, DiagnosticSink, FunctionId, FunctionSigId, Item, TraitId, -}; -use crate::namespace::types::{Generic, SelfDecl, Type, TypeId}; -use crate::AnalyzerDb; use crate::{ builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}, - namespace::scopes::BlockScopeType, -}; -use crate::{ errors::{self, IncompleteItem, TypeError}, - namespace::items::ModuleId, + namespace::{ + items::{ContractId, DiagnosticSink, FunctionId, FunctionSigId, Item, ModuleId, TraitId}, + scopes::BlockScopeType, + types::{Generic, SelfDecl, Type, TypeId}, + }, + AnalyzerDb, }; -use fe_common::diagnostics::Diagnostic; pub use fe_common::diagnostics::Label; -use fe_common::Span; -use fe_parser::ast; -use fe_parser::node::{Node, NodeId}; +use fe_common::{diagnostics::Diagnostic, Span}; +use fe_parser::{ + ast, + node::{Node, NodeId}, +}; use indexmap::IndexMap; use num_bigint::BigInt; use smol_str::SmolStr; -use std::fmt::{self, Debug}; -use std::hash::Hash; -use std::marker::PhantomData; -use std::rc::Rc; -use std::{cell::RefCell, collections::HashMap}; +use std::{ + cell::RefCell, + collections::HashMap, + fmt::{self, Debug}, + hash::Hash, + marker::PhantomData, + rc::Rc, +}; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Analysis { diff --git a/crates/analyzer/src/db.rs b/crates/analyzer/src/db.rs index f094a51ecc..a8646e5c51 100644 --- a/crates/analyzer/src/db.rs +++ b/crates/analyzer/src/db.rs @@ -1,19 +1,19 @@ -use crate::namespace::items::{ - self, AttributeId, ContractFieldId, ContractId, DepGraphWrapper, EnumVariantKind, FunctionId, - FunctionSigId, ImplId, IngotId, Item, ModuleConstantId, ModuleId, StructFieldId, StructId, - TraitId, TypeAliasId, -}; -use crate::namespace::types::{self, Type, TypeId}; use crate::{ context::{Analysis, Constant, FunctionBody}, - namespace::items::EnumId, -}; -use crate::{ errors::{ConstEvalError, TypeError}, - namespace::items::EnumVariantId, + namespace::{ + items::{ + self, AttributeId, ContractFieldId, ContractId, DepGraphWrapper, EnumId, EnumVariantId, + EnumVariantKind, FunctionId, FunctionSigId, ImplId, IngotId, Item, ModuleConstantId, + ModuleId, StructFieldId, StructId, TraitId, TypeAliasId, + }, + types::{self, Type, TypeId}, + }, +}; +use fe_common::{ + db::{SourceDb, SourceDbStorage, Upcast, UpcastMut}, + SourceFileId, Span, }; -use fe_common::db::{SourceDb, SourceDbStorage, Upcast, UpcastMut}; -use fe_common::{SourceFileId, Span}; use fe_parser::ast; use indexmap::map::IndexMap; use smol_str::SmolStr; diff --git a/crates/analyzer/src/db/queries/contracts.rs b/crates/analyzer/src/db/queries/contracts.rs index 15a9080c02..dfb5087c57 100644 --- a/crates/analyzer/src/db/queries/contracts.rs +++ b/crates/analyzer/src/db/queries/contracts.rs @@ -1,13 +1,17 @@ -use crate::context::AnalyzerContext; -use crate::db::{Analysis, AnalyzerDb}; -use crate::errors; -use crate::namespace::items::{ - self, ContractFieldId, ContractId, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, - TypeDef, +use crate::{ + context::AnalyzerContext, + db::{Analysis, AnalyzerDb}, + errors, + namespace::{ + items::{ + self, ContractFieldId, ContractId, DepGraph, DepGraphWrapper, DepLocality, FunctionId, + Item, TypeDef, + }, + scopes::ItemScope, + types::{self, Type}, + }, + traversal::types::type_desc, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, Type}; -use crate::traversal::types::type_desc; use fe_common::diagnostics::Label; use fe_parser::ast; use indexmap::map::{Entry, IndexMap}; diff --git a/crates/analyzer/src/db/queries/functions.rs b/crates/analyzer/src/db/queries/functions.rs index 13d5575d31..8707155f50 100644 --- a/crates/analyzer/src/db/queries/functions.rs +++ b/crates/analyzer/src/db/queries/functions.rs @@ -1,21 +1,26 @@ -use crate::context::{AnalyzerContext, CallType, FunctionBody}; -use crate::db::{Analysis, AnalyzerDb}; -use crate::display::Displayable; -use crate::errors::TypeError; -use crate::namespace::items::{ - DepGraph, DepGraphWrapper, DepLocality, FunctionId, FunctionSigId, Item, TypeDef, +use crate::{ + context::{AnalyzerContext, CallType, FunctionBody}, + db::{Analysis, AnalyzerDb}, + display::Displayable, + errors::TypeError, + namespace::{ + items::{DepGraph, DepGraphWrapper, DepLocality, FunctionId, FunctionSigId, Item, TypeDef}, + scopes::{BlockScope, BlockScopeType, FunctionScope, ItemScope}, + types::{self, CtxDecl, Generic, SelfDecl, Type, TypeId}, + }, + traversal::{ + functions::traverse_statements, + types::{type_desc, type_desc_to_trait}, + }, }; -use crate::namespace::scopes::{BlockScope, BlockScopeType, FunctionScope, ItemScope}; -use crate::namespace::types::{self, CtxDecl, Generic, SelfDecl, Type, TypeId}; -use crate::traversal::functions::traverse_statements; -use crate::traversal::types::{type_desc, type_desc_to_trait}; use fe_common::diagnostics::Label; -use fe_parser::ast::{self, GenericParameter}; -use fe_parser::node::Node; +use fe_parser::{ + ast::{self, GenericParameter}, + node::Node, +}; use if_chain::if_chain; use smol_str::SmolStr; -use std::collections::HashMap; -use std::rc::Rc; +use std::{collections::HashMap, rc::Rc}; /// Gather context information for a function definition and check for type /// errors. Does not inspect the function body. diff --git a/crates/analyzer/src/db/queries/impls.rs b/crates/analyzer/src/db/queries/impls.rs index 86b39ac7eb..17336e8bf0 100644 --- a/crates/analyzer/src/db/queries/impls.rs +++ b/crates/analyzer/src/db/queries/impls.rs @@ -1,11 +1,14 @@ -use indexmap::map::Entry; -use indexmap::IndexMap; +use indexmap::{map::Entry, IndexMap}; use smol_str::SmolStr; -use crate::context::{Analysis, AnalyzerContext}; -use crate::namespace::items::{Function, FunctionId, ImplId, Item}; -use crate::namespace::scopes::ItemScope; -use crate::AnalyzerDb; +use crate::{ + context::{Analysis, AnalyzerContext}, + namespace::{ + items::{Function, FunctionId, ImplId, Item}, + scopes::ItemScope, + }, + AnalyzerDb, +}; use std::rc::Rc; pub fn impl_all_functions(db: &dyn AnalyzerDb, impl_: ImplId) -> Rc<[FunctionId]> { diff --git a/crates/analyzer/src/db/queries/structs.rs b/crates/analyzer/src/db/queries/structs.rs index e3786aed27..fe8ac40e97 100644 --- a/crates/analyzer/src/db/queries/structs.rs +++ b/crates/analyzer/src/db/queries/structs.rs @@ -1,22 +1,25 @@ -use crate::builtins; -use crate::constants::MAX_INDEXED_EVENT_FIELDS; -use crate::context::AnalyzerContext; -use crate::db::Analysis; -use crate::errors::TypeError; -use crate::namespace::items::{ - self, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, StructField, StructFieldId, - StructId, TypeDef, +use crate::{ + builtins, + constants::MAX_INDEXED_EVENT_FIELDS, + context::AnalyzerContext, + db::Analysis, + errors::TypeError, + namespace::{ + items::{ + self, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, StructField, + StructFieldId, StructId, TypeDef, + }, + scopes::ItemScope, + types::{Type, TypeId}, + }, + traversal::types::type_desc, + AnalyzerDb, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{Type, TypeId}; -use crate::traversal::types::type_desc; -use crate::AnalyzerDb; use fe_common::utils::humanize::pluralize_conditionally; use fe_parser::{ast, Label}; use indexmap::map::{Entry, IndexMap}; use smol_str::SmolStr; -use std::rc::Rc; -use std::str::FromStr; +use std::{rc::Rc, str::FromStr}; pub fn struct_all_fields(db: &dyn AnalyzerDb, struct_: StructId) -> Rc<[StructFieldId]> { struct_ diff --git a/crates/analyzer/src/db/queries/traits.rs b/crates/analyzer/src/db/queries/traits.rs index 435dc69653..1dcff8c68d 100644 --- a/crates/analyzer/src/db/queries/traits.rs +++ b/crates/analyzer/src/db/queries/traits.rs @@ -1,12 +1,15 @@ -use indexmap::map::Entry; -use indexmap::IndexMap; +use indexmap::{map::Entry, IndexMap}; use smol_str::SmolStr; -use crate::context::{Analysis, AnalyzerContext}; -use crate::namespace::items::{FunctionSig, FunctionSigId, Item, TraitId}; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::TypeId; -use crate::AnalyzerDb; +use crate::{ + context::{Analysis, AnalyzerContext}, + namespace::{ + items::{FunctionSig, FunctionSigId, Item, TraitId}, + scopes::ItemScope, + types::TypeId, + }, + AnalyzerDb, +}; use std::rc::Rc; pub fn trait_all_functions(db: &dyn AnalyzerDb, trait_: TraitId) -> Rc<[FunctionSigId]> { diff --git a/crates/analyzer/src/db/queries/types.rs b/crates/analyzer/src/db/queries/types.rs index eec14d4a45..218f53e91e 100644 --- a/crates/analyzer/src/db/queries/types.rs +++ b/crates/analyzer/src/db/queries/types.rs @@ -2,14 +2,18 @@ use std::rc::Rc; use smol_str::SmolStr; -use crate::context::{AnalyzerContext, TempContext}; -use crate::db::Analysis; -use crate::errors::TypeError; -use crate::namespace::items::{FunctionSigId, ImplId, TraitId, TypeAliasId}; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, TypeId}; -use crate::traversal::types::type_desc; -use crate::AnalyzerDb; +use crate::{ + context::{AnalyzerContext, TempContext}, + db::Analysis, + errors::TypeError, + namespace::{ + items::{FunctionSigId, ImplId, TraitId, TypeAliasId}, + scopes::ItemScope, + types::{self, TypeId}, + }, + traversal::types::type_desc, + AnalyzerDb, +}; /// Returns all `impl` for the given type from the current ingot as well as /// dependency ingots diff --git a/crates/analyzer/src/errors.rs b/crates/analyzer/src/errors.rs index baf4712f7a..f104521d51 100644 --- a/crates/analyzer/src/errors.rs +++ b/crates/analyzer/src/errors.rs @@ -1,8 +1,10 @@ //! Semantic errors. use crate::context::{DiagnosticVoucher, NamedThing}; -use fe_common::diagnostics::{Diagnostic, Label, Severity}; -use fe_common::Span; +use fe_common::{ + diagnostics::{Diagnostic, Label, Severity}, + Span, +}; use std::fmt::Display; /// Error indicating that a type is invalid. diff --git a/crates/analyzer/src/namespace/items.rs b/crates/analyzer/src/namespace/items.rs index 4504f824b7..868d609582 100644 --- a/crates/analyzer/src/namespace/items.rs +++ b/crates/analyzer/src/namespace/items.rs @@ -1,23 +1,27 @@ -use crate::constants::{EMITTABLE_TRAIT_NAME, INDEXED}; -use crate::context::{self, Analysis, Constant, NamedThing}; -use crate::display::{DisplayWithDb, Displayable}; -use crate::errors::{self, IncompleteItem, TypeError}; -use crate::namespace::types::{self, GenericType, Type, TypeId}; -use crate::traversal::pragma::check_pragma_version; -use crate::AnalyzerDb; -use crate::{builtins, errors::ConstEvalError}; -use fe_common::diagnostics::Diagnostic; -use fe_common::diagnostics::Label; -use fe_common::files::{common_prefix, Utf8Path}; -use fe_common::{impl_intern_key, FileKind, SourceFileId}; -use fe_parser::ast::GenericParameter; -use fe_parser::node::{Node, Span}; -use fe_parser::{ast, node::NodeId}; +use crate::{ + builtins, + constants::{EMITTABLE_TRAIT_NAME, INDEXED}, + context::{self, Analysis, Constant, NamedThing}, + display::{DisplayWithDb, Displayable}, + errors::{self, ConstEvalError, IncompleteItem, TypeError}, + namespace::types::{self, GenericType, Type, TypeId}, + traversal::pragma::check_pragma_version, + AnalyzerDb, +}; +use fe_common::{ + diagnostics::{Diagnostic, Label}, + files::{common_prefix, Utf8Path}, + impl_intern_key, FileKind, SourceFileId, +}; +use fe_parser::{ + ast, + ast::GenericParameter, + node::{Node, NodeId, Span}, +}; use indexmap::{indexmap, IndexMap}; use smallvec::SmallVec; use smol_str::SmolStr; -use std::rc::Rc; -use std::{fmt, ops::Deref}; +use std::{fmt, ops::Deref, rc::Rc}; use strum::IntoEnumIterator; use super::types::TraitOrType; diff --git a/crates/analyzer/src/namespace/scopes.rs b/crates/analyzer/src/namespace/scopes.rs index 6fc7eb0aba..affaf3da96 100644 --- a/crates/analyzer/src/namespace/scopes.rs +++ b/crates/analyzer/src/namespace/scopes.rs @@ -1,21 +1,26 @@ #![allow(unstable_name_collisions)] // expect_none, which ain't gonna be stabilized -use crate::context::{ - AnalyzerContext, CallType, Constant, ExpressionAttributes, FunctionBody, NamedThing, +use crate::{ + context::{ + AnalyzerContext, CallType, Constant, ExpressionAttributes, FunctionBody, NamedThing, + }, + errors::{AlreadyDefined, FatalError, IncompleteItem, TypeError}, + namespace::{ + items::{FunctionId, Item, ModuleId, TypeDef}, + types::{Type, TypeId}, + }, + pattern_analysis::PatternMatrix, + AnalyzerDb, +}; +use fe_common::{diagnostics::Diagnostic, Span}; +use fe_parser::{ + ast, + ast::Expr, + node::{Node, NodeId}, + Label, }; -use crate::errors::{AlreadyDefined, FatalError, IncompleteItem, TypeError}; -use crate::namespace::items::{FunctionId, ModuleId}; -use crate::namespace::items::{Item, TypeDef}; -use crate::namespace::types::{Type, TypeId}; -use crate::pattern_analysis::PatternMatrix; -use crate::AnalyzerDb; -use fe_common::diagnostics::Diagnostic; -use fe_common::Span; -use fe_parser::{ast, node::NodeId, Label}; -use fe_parser::{ast::Expr, node::Node}; use indexmap::IndexMap; -use std::cell::RefCell; -use std::collections::BTreeMap; +use std::{cell::RefCell, collections::BTreeMap}; pub struct ItemScope<'a> { db: &'a dyn AnalyzerDb, diff --git a/crates/analyzer/src/namespace/types.rs b/crates/analyzer/src/namespace/types.rs index 126b2e822d..74dc89cc57 100644 --- a/crates/analyzer/src/namespace/types.rs +++ b/crates/analyzer/src/namespace/types.rs @@ -1,20 +1,18 @@ -use crate::context::AnalyzerContext; -use crate::display::DisplayWithDb; -use crate::display::Displayable; -use crate::errors::TypeError; -use crate::namespace::items::{ - ContractId, EnumId, FunctionId, FunctionSigId, ImplId, Item, StructId, TraitId, +use crate::{ + context::AnalyzerContext, + display::{DisplayWithDb, Displayable}, + errors::TypeError, + namespace::items::{ + ContractId, EnumId, FunctionId, FunctionSigId, ImplId, Item, StructId, TraitId, + }, + AnalyzerDb, }; -use crate::AnalyzerDb; -use fe_common::impl_intern_key; -use fe_common::Span; +use fe_common::{impl_intern_key, Span}; use num_bigint::BigInt; use num_traits::ToPrimitive; use smol_str::SmolStr; -use std::fmt; -use std::rc::Rc; -use std::str::FromStr; +use std::{fmt, rc::Rc, str::FromStr}; use strum::{AsRefStr, EnumIter, EnumString}; pub fn u256_min() -> BigInt { diff --git a/crates/analyzer/src/operations.rs b/crates/analyzer/src/operations.rs index 31305ad854..59fe01d214 100644 --- a/crates/analyzer/src/operations.rs +++ b/crates/analyzer/src/operations.rs @@ -1,6 +1,8 @@ -use crate::context::AnalyzerContext; -use crate::errors::{BinaryOperationError, IndexingError}; -use crate::namespace::types::{Array, Integer, Map, TraitOrType, Type, TypeDowncast, TypeId}; +use crate::{ + context::AnalyzerContext, + errors::{BinaryOperationError, IndexingError}, + namespace::types::{Array, Integer, Map, TraitOrType, Type, TypeDowncast, TypeId}, +}; use crate::traversal::types::{deref_type, try_coerce_type}; use fe_parser::{ast as fe, node::Node}; diff --git a/crates/analyzer/src/traversal/assignments.rs b/crates/analyzer/src/traversal/assignments.rs index 973fa5649a..3ef9ce205e 100644 --- a/crates/analyzer/src/traversal/assignments.rs +++ b/crates/analyzer/src/traversal/assignments.rs @@ -1,13 +1,18 @@ -use crate::context::{AnalyzerContext, DiagnosticVoucher, NamedThing}; -use crate::errors::FatalError; -use crate::namespace::scopes::BlockScope; -use crate::namespace::types::{Type, TypeId}; -use crate::operations; -use crate::traversal::expressions; -use crate::traversal::utils::add_bin_operations_errors; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher, NamedThing}, + errors::FatalError, + namespace::{ + scopes::BlockScope, + types::{Type, TypeId}, + }, + operations, + traversal::{expressions, utils::add_bin_operations_errors}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast as fe; -use fe_parser::node::{Node, Span}; +use fe_parser::{ + ast as fe, + node::{Node, Span}, +}; use smol_str::SmolStr; /// Gather context information for assignments and check for type errors. diff --git a/crates/analyzer/src/traversal/borrowck.rs b/crates/analyzer/src/traversal/borrowck.rs index 4209398642..3c03c53fa1 100644 --- a/crates/analyzer/src/traversal/borrowck.rs +++ b/crates/analyzer/src/traversal/borrowck.rs @@ -1,9 +1,13 @@ use super::call_args::LabeledParameter; -use crate::context::{AnalyzerContext, NamedThing}; -use crate::namespace::types::{Type, TypeId}; +use crate::{ + context::{AnalyzerContext, NamedThing}, + namespace::types::{Type, TypeId}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast; -use fe_parser::node::{Node, Span}; +use fe_parser::{ + ast, + node::{Node, Span}, +}; use smallvec::{smallvec, SmallVec}; // NOTE: This is a temporary solution to the only borrowing bug that's possible diff --git a/crates/analyzer/src/traversal/call_args.rs b/crates/analyzer/src/traversal/call_args.rs index 9df034fdff..76bf7c27cf 100644 --- a/crates/analyzer/src/traversal/call_args.rs +++ b/crates/analyzer/src/traversal/call_args.rs @@ -1,13 +1,15 @@ -use super::expressions::{expr, expr_type}; -use super::types::try_coerce_type; -use crate::context::{AnalyzerContext, DiagnosticVoucher}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError, TypeError}; -use crate::namespace::types::{FunctionParam, Generic, Type, TypeId}; -use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally}; -use fe_common::{Span, Spanned}; -use fe_parser::ast as fe; -use fe_parser::node::Node; +use super::{ + expressions::{expr, expr_type}, + types::try_coerce_type, +}; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher}, + display::Displayable, + errors::{self, FatalError, TypeCoercionError, TypeError}, + namespace::types::{FunctionParam, Generic, Type, TypeId}, +}; +use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally, Span, Spanned}; +use fe_parser::{ast as fe, node::Node}; use smol_str::SmolStr; pub trait LabeledParameter { diff --git a/crates/analyzer/src/traversal/declarations.rs b/crates/analyzer/src/traversal/declarations.rs index 1a57338411..395f8cb1a5 100644 --- a/crates/analyzer/src/traversal/declarations.rs +++ b/crates/analyzer/src/traversal/declarations.rs @@ -1,12 +1,15 @@ -use crate::context::AnalyzerContext; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError}; -use crate::namespace::scopes::BlockScope; -use crate::namespace::types::{Type, TypeId}; -use crate::traversal::{const_expr, expressions, types}; +use crate::{ + context::AnalyzerContext, + display::Displayable, + errors::{self, FatalError, TypeCoercionError}, + namespace::{ + scopes::BlockScope, + types::{Type, TypeId}, + }, + traversal::{const_expr, expressions, types}, +}; use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally}; -use fe_parser::ast as fe; -use fe_parser::node::Node; +use fe_parser::{ast as fe, node::Node}; /// Gather context information for var declarations and check for type errors. pub fn var_decl(scope: &mut BlockScope, stmt: &Node) -> Result<(), FatalError> { diff --git a/crates/analyzer/src/traversal/expressions.rs b/crates/analyzer/src/traversal/expressions.rs index 2c0561d32d..fc0a75a3ae 100644 --- a/crates/analyzer/src/traversal/expressions.rs +++ b/crates/analyzer/src/traversal/expressions.rs @@ -1,33 +1,34 @@ use super::borrowck; -use crate::builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}; -use crate::context::{AnalyzerContext, CallType, Constant, ExpressionAttributes, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, IndexingError, TypeCoercionError}; -use crate::namespace::items::{ - EnumVariantId, EnumVariantKind, FunctionId, FunctionSigId, ImplId, Item, StructId, TypeDef, -}; -use crate::namespace::scopes::{check_visibility, BlockScopeType}; -use crate::namespace::types::{ - self, Array, Base, FeString, Integer, TraitOrType, Tuple, Type, TypeDowncast, TypeId, -}; -use crate::operations; -use crate::traversal::call_args::{validate_arg_count, validate_named_args}; -use crate::traversal::const_expr::eval_expr; -use crate::traversal::types::{ - apply_generic_type_args, deref_type, try_cast_type, try_coerce_type, +use crate::{ + builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}, + context::{AnalyzerContext, CallType, Constant, ExpressionAttributes, NamedThing}, + display::Displayable, + errors::{self, FatalError, IndexingError, TypeCoercionError}, + namespace::{ + items::{ + EnumVariantId, EnumVariantKind, FunctionId, FunctionSigId, ImplId, Item, StructId, + TypeDef, + }, + scopes::{check_visibility, BlockScopeType}, + types::{ + self, Array, Base, FeString, Integer, TraitOrType, Tuple, Type, TypeDowncast, TypeId, + }, + }, + operations, + traversal::{ + call_args::{validate_arg_count, validate_named_args}, + const_expr::eval_expr, + types::{apply_generic_type_args, deref_type, try_cast_type, try_coerce_type}, + utils::add_bin_operations_errors, + }, }; -use crate::traversal::utils::add_bin_operations_errors; -use fe_common::diagnostics::Label; -use fe_common::{numeric, Span}; -use fe_parser::ast as fe; -use fe_parser::ast::GenericArg; -use fe_parser::node::Node; +use fe_common::{diagnostics::Label, numeric, Span}; +use fe_parser::{ast as fe, ast::GenericArg, node::Node}; use num_bigint::BigInt; use num_traits::{ToPrimitive, Zero}; use smol_str::SmolStr; -use std::ops::RangeInclusive; -use std::str::FromStr; +use std::{ops::RangeInclusive, str::FromStr}; // TODO: don't fail fatally if expected type is provided diff --git a/crates/analyzer/src/traversal/functions.rs b/crates/analyzer/src/traversal/functions.rs index 2a4781d31d..9e5fbe2b0e 100644 --- a/crates/analyzer/src/traversal/functions.rs +++ b/crates/analyzer/src/traversal/functions.rs @@ -1,16 +1,21 @@ -use crate::context::{AnalyzerContext, ExpressionAttributes, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError}; -use crate::namespace::items::{EnumVariantId, EnumVariantKind, Item, StructId, TypeDef}; -use crate::namespace::scopes::{BlockScope, BlockScopeType}; -use crate::namespace::types::{Type, TypeId}; -use crate::pattern_analysis::PatternMatrix; -use crate::traversal::{assignments, declarations, expressions, types}; +use crate::{ + context::{AnalyzerContext, ExpressionAttributes, NamedThing}, + display::Displayable, + errors::{self, FatalError, TypeCoercionError}, + namespace::{ + items::{EnumVariantId, EnumVariantKind, Item, StructId, TypeDef}, + scopes::{BlockScope, BlockScopeType}, + types::{Type, TypeId}, + }, + pattern_analysis::PatternMatrix, + traversal::{assignments, declarations, expressions, types}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast::{self as fe, LiteralPattern, Pattern}; -use fe_parser::node::{Node, Span}; -use indexmap::map::Entry; -use indexmap::{IndexMap, IndexSet}; +use fe_parser::{ + ast::{self as fe, LiteralPattern, Pattern}, + node::{Node, Span}, +}; +use indexmap::{map::Entry, IndexMap, IndexSet}; use smol_str::SmolStr; use super::matching_anomaly; diff --git a/crates/analyzer/src/traversal/pragma.rs b/crates/analyzer/src/traversal/pragma.rs index b074784f89..cb0842f824 100644 --- a/crates/analyzer/src/traversal/pragma.rs +++ b/crates/analyzer/src/traversal/pragma.rs @@ -1,7 +1,6 @@ use crate::errors; use fe_common::diagnostics::{Diagnostic, Label}; -use fe_parser::ast; -use fe_parser::node::Node; +use fe_parser::{ast, node::Node}; use semver::{Version, VersionReq}; pub fn check_pragma_version(stmt: &Node) -> Option { diff --git a/crates/analyzer/src/traversal/types.rs b/crates/analyzer/src/traversal/types.rs index 92e548d2a4..c37a28dad2 100644 --- a/crates/analyzer/src/traversal/types.rs +++ b/crates/analyzer/src/traversal/types.rs @@ -1,21 +1,25 @@ -use crate::builtins::ValueMethod; -use crate::context::{ - Adjustment, AdjustmentKind, AnalyzerContext, CallType, Constant, ExpressionAttributes, - NamedThing, +use crate::{ + builtins::ValueMethod, + context::{ + Adjustment, AdjustmentKind, AnalyzerContext, CallType, Constant, ExpressionAttributes, + NamedThing, + }, + display::Displayable, + errors::{TypeCoercionError, TypeError}, + namespace::{ + items::{Item, TraitId}, + types::{ + Base, FeString, GenericArg, GenericParamKind, GenericType, Integer, TraitOrType, Tuple, + Type, TypeId, + }, + }, + traversal::call_args::validate_arg_count, }; -use crate::display::Displayable; -use crate::errors::{TypeCoercionError, TypeError}; -use crate::namespace::items::{Item, TraitId}; -use crate::namespace::types::{ - Base, FeString, GenericArg, GenericParamKind, GenericType, Integer, TraitOrType, Tuple, Type, - TypeId, +use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally, Spanned}; +use fe_parser::{ + ast, + node::{Node, Span}, }; -use crate::traversal::call_args::validate_arg_count; -use fe_common::diagnostics::Label; -use fe_common::utils::humanize::pluralize_conditionally; -use fe_common::Spanned; -use fe_parser::ast; -use fe_parser::node::{Node, Span}; use std::cmp::Ordering; /// Try to perform an explicit type cast, eg `u256(my_address)` or diff --git a/crates/analyzer/src/traversal/utils.rs b/crates/analyzer/src/traversal/utils.rs index cd5af63d91..1b415767c2 100644 --- a/crates/analyzer/src/traversal/utils.rs +++ b/crates/analyzer/src/traversal/utils.rs @@ -1,11 +1,12 @@ -use fe_common::diagnostics::Label; -use fe_common::Span; +use fe_common::{diagnostics::Label, Span}; -use crate::context::{AnalyzerContext, DiagnosticVoucher}; -use crate::display::Displayable; -use crate::errors::BinaryOperationError; -use crate::namespace::types::TypeId; -use crate::AnalyzerDb; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher}, + display::Displayable, + errors::BinaryOperationError, + namespace::types::TypeId, + AnalyzerDb, +}; use std::fmt::Display; fn type_label(db: &dyn AnalyzerDb, span: Span, typ: TypeId) -> Label { diff --git a/crates/analyzer/tests/analysis.rs b/crates/analyzer/tests/analysis.rs index 9d4ee524d7..2fedbcc0d7 100644 --- a/crates/analyzer/tests/analysis.rs +++ b/crates/analyzer/tests/analysis.rs @@ -1,14 +1,20 @@ -use fe_analyzer::display::Displayable; -use fe_analyzer::namespace::items::{self, IngotId, IngotMode, Item, ModuleId, TypeDef}; -use fe_analyzer::{AnalyzerDb, TestDb}; -use fe_common::diagnostics::{diagnostics_string, print_diagnostics, Diagnostic, Label, Severity}; -use fe_common::files::{FileKind, Utf8Path}; +use fe_analyzer::{ + display::Displayable, + namespace::items::{self, IngotId, IngotMode, Item, ModuleId, TypeDef}, + AnalyzerDb, TestDb, +}; +use fe_common::{ + diagnostics::{diagnostics_string, print_diagnostics, Diagnostic, Label, Severity}, + files::{FileKind, Utf8Path}, +}; use fe_parser::node::{NodeId, Span}; use indexmap::{indexmap, IndexMap}; use insta::assert_snapshot; use smallvec::SmallVec; -use std::collections::{HashMap, VecDeque}; -use std::fmt::Display; +use std::{ + collections::{HashMap, VecDeque}, + fmt::Display, +}; use wasm_bindgen_test::wasm_bindgen_test; #[test] diff --git a/crates/analyzer/tests/errors.rs b/crates/analyzer/tests/errors.rs index 1e109116e3..d348c509f2 100644 --- a/crates/analyzer/tests/errors.rs +++ b/crates/analyzer/tests/errors.rs @@ -1,9 +1,10 @@ //! Tests for contracts that should cause compile errors -use fe_analyzer::namespace::items::{IngotId, IngotMode, ModuleId}; -use fe_analyzer::TestDb; -use fe_common::diagnostics::diagnostics_string; -use fe_common::files::FileKind; +use fe_analyzer::{ + namespace::items::{IngotId, IngotMode, ModuleId}, + TestDb, +}; +use fe_common::{diagnostics::diagnostics_string, files::FileKind}; use indexmap::indexmap; use insta::assert_snapshot; use wasm_bindgen_test::wasm_bindgen_test; diff --git a/crates/codegen/src/yul/isel/function.rs b/crates/codegen/src/yul/isel/function.rs index 9170ce2904..78eaecce2a 100644 --- a/crates/codegen/src/yul/isel/function.rs +++ b/crates/codegen/src/yul/isel/function.rs @@ -26,10 +26,10 @@ use yultsur::{ use crate::{ db::CodegenDb, - yul::isel::inst_order::StructuralInst, - yul::slot_size::{function_hash_type, yul_primitive_type, SLOT_SIZE}, yul::{ + isel::inst_order::StructuralInst, runtime::{self, RuntimeProvider}, + slot_size::{function_hash_type, yul_primitive_type, SLOT_SIZE}, YulVariable, }, }; diff --git a/crates/common/src/diagnostics.rs b/crates/common/src/diagnostics.rs index f893a2a22e..265b52ece3 100644 --- a/crates/common/src/diagnostics.rs +++ b/crates/common/src/diagnostics.rs @@ -1,12 +1,12 @@ -use crate::db::SourceDb; -use crate::files::{SourceFileId, Utf8PathBuf}; -use crate::Span; +use crate::{ + db::SourceDb, + files::{SourceFileId, Utf8PathBuf}, + Span, +}; pub use codespan_reporting::diagnostic as cs; -use codespan_reporting::files::Error as CsError; -use codespan_reporting::term; +use codespan_reporting::{files::Error as CsError, term}; pub use cs::Severity; -use std::ops::Range; -use std::rc::Rc; +use std::{ops::Range, rc::Rc}; use term::termcolor::{BufferWriter, ColorChoice}; #[derive(Debug, PartialEq, Eq, Hash, Clone)] diff --git a/crates/common/src/files.rs b/crates/common/src/files.rs index 5cb18269da..c56657fa92 100644 --- a/crates/common/src/files.rs +++ b/crates/common/src/files.rs @@ -1,8 +1,7 @@ use crate::db::SourceDb; pub use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; pub use fe_library::include_dir; -use std::ops::Range; -use std::rc::Rc; +use std::{ops::Range, rc::Rc}; // NOTE: all file paths are stored as utf8 strings. // Non-utf8 paths (for user code) should be reported diff --git a/crates/common/src/span.rs b/crates/common/src/span.rs index 5d1c34be97..6092b388e7 100644 --- a/crates/common/src/span.rs +++ b/crates/common/src/span.rs @@ -1,8 +1,10 @@ use crate::files::SourceFileId; use serde::{Deserialize, Serialize}; -use std::cmp; -use std::fmt::{Debug, Formatter}; -use std::ops::{Add, AddAssign, Range}; +use std::{ + cmp, + fmt::{Debug, Formatter}, + ops::{Add, AddAssign, Range}, +}; /// An exclusive span of byte offsets in a source file. #[derive(Serialize, Deserialize, PartialEq, Copy, Clone, Hash, Eq)] diff --git a/crates/driver/src/lib.rs b/crates/driver/src/lib.rs index 91c6a47012..e02b8ec91c 100644 --- a/crates/driver/src/lib.rs +++ b/crates/driver/src/lib.rs @@ -3,9 +3,7 @@ pub use fe_codegen::db::{CodegenDb, Db}; use fe_analyzer::namespace::items::{ContractId, FunctionId, IngotId, IngotMode, ModuleId}; -use fe_common::db::Upcast; -use fe_common::diagnostics::Diagnostic; -use fe_common::files::FileKind; +use fe_common::{db::Upcast, diagnostics::Diagnostic, files::FileKind}; use fe_parser::ast::SmolStr; use fe_test_runner::TestSink; use indexmap::{indexmap, IndexMap}; diff --git a/crates/fe/src/task/build.rs b/crates/fe/src/task/build.rs index 5219ab999b..0b5cdfb07c 100644 --- a/crates/fe/src/task/build.rs +++ b/crates/fe/src/task/build.rs @@ -1,10 +1,11 @@ -use std::fs; -use std::io::{Error, Write}; -use std::path::Path; +use std::{ + fs, + io::{Error, Write}, + path::Path, +}; use clap::{ArgEnum, Args}; -use fe_common::diagnostics::print_diagnostics; -use fe_common::files::SourceFileId; +use fe_common::{diagnostics::print_diagnostics, files::SourceFileId}; use fe_driver::CompiledModule; const DEFAULT_OUTPUT_DIR_NAME: &str = "output"; diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index a95b5d232b..79432bc5f4 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -12,6 +12,7 @@ salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } smallvec = "1.10" rustc-hash = "1.1.0" either = "1.8" +derive_more = "0.99" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common", package = "fe-common" } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 9102d176a6..b624188267 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -1,24 +1,30 @@ #![allow(dead_code)] -use hir::hir_def::scope_graph::{ScopeEdge, ScopeId}; +use hir::hir_def::{ + scope_graph::{ScopeEdge, ScopeId}, + IdentId, +}; use rustc_hash::FxHashMap; -use crate::Spanned; +use super::name_resolver::{NameResolutionError, ResolvedNameSet}; pub struct ResolvedImports { pub resolved: FxHashMap, } pub struct ImportResolver { - resolved: FxHashMap>>, - glob_resolved: FxHashMap>>, + resolved: FxHashMap, + glob_resolved: FxHashMap, states: FxHashMap, } pub trait Importer { - fn glob_imports(&self, scope: ScopeId) -> &[Spanned]; - fn named_imports(&self, scope: ScopeId) -> &[Spanned]; + fn named_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet>; + fn glob_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet>; } +pub(super) type ResolvedImportSet = + FxHashMap>; + /// This is the state of import resolution for a given scope. #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum ScopeState { @@ -29,11 +35,11 @@ enum ScopeState { } impl Importer for ImportResolver { - fn glob_imports(&self, scope: ScopeId) -> &[Spanned] { - &self.glob_resolved[&scope] + fn named_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet> { + self.resolved.get(&scope) } - fn named_imports(&self, scope: ScopeId) -> &[Spanned] { - &self.resolved[&scope] + fn glob_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet> { + self.glob_resolved.get(&scope) } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 8dd2ff7f56..a08df85895 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -1,13 +1,15 @@ +use std::{collections::hash_map::IntoValues, fmt}; + use either::Either; use hir::{ hir_def::{ kw, scope_graph::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, - ScopeEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, - VariantEdge, + ScopeEdge, ScopeId, ScopeKind, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, + ValueEdge, VariantEdge, }, - IdentId, PathId, + IdentId, ItemKind, Partial, PathId, }, span::DynLazySpan, }; @@ -23,43 +25,101 @@ pub struct NameResolver<'db, 'a> { cache_store: ResolvedQueryCacheStore, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq)] pub enum ResolvedPath { - FullResolved(ScopeId), + Full(ResolvedNameSet), /// The path is partially resolved; this means that the `resolved` is a type /// and the following segments depend on type to resolve. /// These unresolved parts are resolved in the later type inference and /// trait solving phases. - PartialResolved { - resolved: ScopeId, + Partial { + resolved: ResolvedName, unresolved_from: usize, }, +} - /// The path resolution failed at the given segment. - Failed { - failed_at: usize, - cause: NameResolutionFailure, - }, +impl ResolvedPath { + pub fn partial(resolved: ResolvedName, unresolved_from: usize) -> Self { + Self::Partial { + resolved, + unresolved_from, + } + } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum NameResolutionFailure { - Conflict, - Missing, +#[derive(Debug, derive_more::Display, Clone, PartialEq, Eq, Hash, derive_more::Error)] +#[display(fmt = "failed_at: {failed_at}, kind: {kind}")] +pub struct PathResolutionError { + pub kind: NameResolutionError, + pub failed_at: usize, +} + +impl PathResolutionError { + fn new(kind: NameResolutionError, failed_at: usize) -> Self { + Self { kind, failed_at } + } + + fn not_found(failed_at: usize) -> Self { + Self::new(NameResolutionError::NotFound, failed_at) + } + + fn invalid(failed_at: usize) -> Self { + Self::new(NameResolutionError::Invalid, failed_at) + } } impl<'db, 'a> NameResolver<'db, 'a> { + /// Resolve the path to a set of possible resolutions. + /// A path can be resolved to multiple resolutions because we have multiple + /// name domains. + /// + /// For example, the `foo::FOO` can be resolved to both `const + /// FOO` and `struct FOO` in the following code: + /// ```fe + /// use foo::FOO + /// + /// mod foo { + /// const FOO: i32 = 1 + /// struct FOO {} + /// } + /// ``` pub fn resolve_path( &mut self, - _path: PathId, - _scope: ScopeId, - _context: NameContext, - ) -> ResolvedPath { - todo!() + path: PathId, + scope: ScopeId, + ) -> Result { + let segments = path.segments(self.db.upcast()); + if segments.is_empty() { + return Err(PathResolutionError::invalid(0)); + } + + // Set pred segment to the current scope. + let mut pred = ResolvedName::new_scope(scope, None, NameDomain::from_scope(self.db, scope)); + + let seg_len = segments.len(); + for (i, seg) in segments[0..seg_len - 1].iter().enumerate() { + pred = match self.resolve_segment(pred, *seg, i, false)? { + Either::Left(resolved) => { + return Ok(resolved); + } + Either::Right(resolved) => resolved, + } + } + + match self.resolve_segment(pred, *segments.last().unwrap(), seg_len - 1, true)? { + Either::Left(resolved) => Ok(resolved), + Either::Right(_) => { + unreachable!() + } + } } - pub fn resolve_query(&mut self, scope: ScopeId, query: NameQuery) -> Vec { + pub fn resolve_query( + &mut self, + scope: ScopeId, + query: NameQuery, + ) -> Result { // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(scope, query) { return resolved; @@ -69,9 +129,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { // `$ = NamedImports > GlobImports > Lex > external ingot > builtin types`, // where `$` means current scope. // This ordering means that greater one shadows lower ones having the same name - // in the same domain. - - let mut results = Vec::new(); + // in the same name context. + let mut resolutions = Vec::new(); let mut parent = None; // 1. Look for the name in the current scope and named imports. let mut found_scopes = FxHashSet::default(); @@ -79,58 +138,54 @@ impl<'db, 'a> NameResolver<'db, 'a> { match edge.kind.propagate(query) { PropagatedQuery::Terminated => { if found_scopes.insert(edge.dest) { - results.push(ResolvedName::scope(edge.dest, None)); + resolutions.push(ResolvedName::new_scope( + edge.dest, + None, + NameDomain::from_scope(self.db, edge.dest), + )); } } - PropagatedQuery::Continuation if query.option.allow_lex => { + PropagatedQuery::Continuation => { debug_assert!(parent.is_none()); parent = Some(edge.dest); } - _ => {} + PropagatedQuery::UnPropagated => {} } } - - for named_import in self.importer.named_imports(scope) { - let edge = &named_import.data; - match edge.kind.propagate(query) { - PropagatedQuery::Terminated => { - if found_scopes.insert(edge.dest) { - results.push(ResolvedName::scope( - edge.dest, - Some(named_import.span.clone()), - )); - } + if let Some(imported) = self + .importer + .named_imports(scope) + .and_then(|imports| imports.get(&query.name)) + { + match imported { + Ok(imported) => { + resolutions.extend(imported.iter().cloned()); + } + Err(_) => { + let err = NameResolutionError::InvalidImport; + self.cache_store.cache_result(scope, query, Err(err)); + return Err(err); } - PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} } } - if !results.is_empty() { - self.cache_store - .cache_resolved(scope, query, results.clone()); - return results; + if let Some(result) = self.store_result_opt(scope, query, resolutions) { + return result; } // 2. Look for the name in the glob imports. - for glob_import in self.importer.glob_imports(scope) { - let edge = &glob_import.data; - match edge.kind.propagate(query) { - PropagatedQuery::Terminated => { - if found_scopes.insert(edge.dest) { - results.push(ResolvedName::scope( - edge.dest, - Some(glob_import.span.clone()), - )); - } - } - PropagatedQuery::Continuation | PropagatedQuery::UnPropagated => {} - } - } - if !results.is_empty() { + if let Some(imported) = self + .importer + .named_imports(scope) + .and_then(|imports| imports.get(&query.name)) + { + let imported = imported + .clone() + .map_err(|_| NameResolutionError::InvalidImport); self.cache_store - .cache_resolved(scope, query, results.clone()); - return results; + .cache_result(scope, query, imported.clone()); + return imported; } // 3. Look for the name in the lexical scope if it exists. @@ -140,29 +195,136 @@ impl<'db, 'a> NameResolver<'db, 'a> { } // 4. Look for the name in the external ingots. - if query.domain == NameContext::Item { - for (name, root_mod) in scope.top_mod.external_ingots(self.db.upcast()) { + let resolutions: Vec<_> = scope + .top_mod + .external_ingots(self.db.upcast()) + .iter() + .filter_map(|(name, root_mod)| { if *name == query.name { - results.push(ResolvedName::scope(ScopeId::root(*root_mod), None)); + Some(ResolvedName::new_scope( + ScopeId::root(*root_mod), + None, + NameDomain::Item, + )) + } else { + None } - } - // Ensure that all names of external ingots don't conflict with each other. - debug_assert!(results.len() < 2); - } - if !results.is_empty() { - self.cache_store - .cache_resolved(scope, query, results.clone()); - return results; + }) + .collect(); + + // Ensure that all names of external ingots don't conflict with each other. + debug_assert!(resolutions.len() < 2); + if let Some(result) = self.store_result_opt(scope, query, resolutions) { + return result; } // 5. Look for the name in the builtin types. - if let Some(builtin) = BuiltinName::lookup_for(query.name) { - results.push(ResolvedName::Builtin(builtin)); + let result = if let Some(builtin) = BuiltinName::lookup_for(query.name) { + Ok(ResolvedName::new_builtin(builtin, builtin.domain()).into()) + } else { + Err(NameResolutionError::NotFound) + }; + + self.cache_store.cache_result(scope, query, result.clone()); + result + } + + /// Resolve the `segment`. `pred` is the resolution for the previous + /// segment, and `is_last` indicates the segment is the last segment of the + /// path. + /// + /// If the method returns `Right`, it means the path resolution is work in + /// progress and we need to continue look for the next segment. If the + /// method returns `Left`, that means the resolution for the entire path + /// is done. + /// + /// Even if the `is_last` is `false` the method may return `Left`, this will + /// happen if both 1. and 2. are satisfied: + /// 1. The `pred` is a type. + /// 2. The lookup for the `segment` results in `NotFound`. + /// This indicates we need further resolution for the `segment` in the later + /// trait solving phase. + /// In case the `is_last` is `true`, the function is guaranteed to return + /// `Ok(Left)` or `Error`. + /// + /// + /// We can return an error immediately in case the `is_last` is `false` and + /// multiple resolutions for the `segment` are found. + /// The reasoning is + /// 1. Our language allows only `Item` domain to have associated items. + /// 2. By 1., the middle segments should be resolved to the `Item` + /// domain. Otherwise, the following segment can't be resolved. + /// 3. By 2., if we obtain multiple resolutions from a middle segment, this + /// can be divided into two cases: + /// a. Name conflict occurs. We can immediately return `Conflict` error + /// in this case. + /// b. All resolutions belong to different domains. This + /// means that at least one of the resolutions belongs to non-`Item` + /// domain. This case can be regarded as `NotFound` error because the + /// following segment of the non-`Item` domain resolution can't be + /// resolved. + fn resolve_segment( + &mut self, + pred: ResolvedName, + segment: Partial, + seg_idx: usize, + is_last: bool, + ) -> Result, PathResolutionError> { + let Partial::Present(seg) = segment else { + return Err(PathResolutionError::invalid(seg_idx)); + }; + + let Some(scope) = pred.scope() + else { + // If pred is a builtin type, then the path resolution is done. + if pred.is_type(self.db) { + return Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); + } else { + return Err(PathResolutionError::not_found(seg_idx)); + } + }; + + let query = NameQuery::new(seg); + let resolved_set = match self.resolve_query(scope, query) { + Ok(resolved) => resolved, + Err(NameResolutionError::NotFound) if pred.is_type(self.db) => { + // If the parent scope of the current segment is a type and the segment is not + // found, then it should be resolved in the trait solving phase. + return Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); + } + Err(e) => { + return Err(PathResolutionError::new(e, seg_idx)); + } + }; + + if is_last { + Ok(Either::Left(ResolvedPath::Full(resolved_set))) + } else if resolved_set.num() > 1 { + // Case a. is already handled above. + // Handles case b. here. + return Err(PathResolutionError::not_found(seg_idx)); + } else { + Ok(Either::Right(resolved_set.into_iter().next().unwrap())) } - self.cache_store - .cache_resolved(scope, query, results.clone()); + } - results + /// Convert the `resolutions` into `ResolvedNameSet` and store it to the + /// cache store. + /// If the `resolutions` is empty return `None` instead of + /// returning an error. + fn store_result_opt( + &mut self, + scope: ScopeId, + query: NameQuery, + resolutions: Vec, + ) -> Option> { + if !resolutions.is_empty() { + let result = ResolvedNameSet::from_resolutions(resolutions); + self.cache_store.cache_result(scope, query, result.clone()); + Some(result) + } else { + None + } } fn edges(&self, scope: ScopeId) -> &'db [ScopeEdge] { @@ -174,21 +336,27 @@ impl<'db, 'a> NameResolver<'db, 'a> { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct NameQuery { name: IdentId, - domain: NameContext, option: QueryOption, } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum ResolvedName { - Builtin(BuiltinName), - Scope { - scope: ScopeId, - import_span: Option, - }, -} +impl NameQuery { + /// Create a new name query with the default query option. + pub fn new(name: IdentId) -> Self { + Self { + name, + option: Default::default(), + } + } + /// Create a new name query with the given query option. + pub fn with_option(name: IdentId, option: QueryOption) -> Self { + Self { name, option } + } +} #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct QueryOption { + /// If `allow_lex` is true, then the query will be propagated to the lexical + /// scope if the name is not found in the current scope. allow_lex: bool, } @@ -203,29 +371,145 @@ impl QueryOption { } } +impl Default for QueryOption { + fn default() -> Self { + Self::new() + } +} + +/// The struct contains the lookup result of a name query. +/// The results can contain more than one resolved names which belong to +/// different name domains. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ResolvedNameSet { + names: FxHashMap, +} + +impl ResolvedNameSet { + /// Returns the number of resolved names. + pub fn num(&self) -> usize { + self.names.len() + } + + pub fn iter(&self) -> impl Iterator { + self.names.values() + } + + /// Returns the resolved name of the given `domain`. + pub fn name_by_domain(&self, domain: NameDomain) -> Option<&ResolvedName> { + self.names.get(&domain) + } + + fn from_resolutions(resolutions: Vec) -> Result { + if resolutions.is_empty() { + return Err(NameResolutionError::NotFound); + } + let mut names = FxHashMap::default(); + for resolution in resolutions { + let domain = resolution.domain; + if names.insert(domain, resolution).is_some() { + return Err(NameResolutionError::Conflict); + } + } + + Ok(Self { names }) + } +} + +impl IntoIterator for ResolvedNameSet { + type Item = ResolvedName; + type IntoIter = IntoValues; + + fn into_iter(self) -> Self::IntoIter { + self.names.into_values() + } +} + +impl From for ResolvedNameSet { + fn from(resolution: ResolvedName) -> Self { + let mut names = FxHashMap::default(); + names.insert(resolution.domain, resolution); + Self { names } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ResolvedName { + pub kind: ResolvedNameKind, + pub domain: NameDomain, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum NameResolutionError { + /// The name is not found. + NotFound, + /// Multiple candidates are found, no need to report the error from the use + /// site because it's should be emitted from a def site. + /// The associated value is the first candidate. + Conflict, + + /// The name is found as the imported name, but the name resolution for the + /// import itself failed. No need to report the error from the use site + /// because it's should be emitted from a import resolution phase. + InvalidImport, + + /// The name is invalid in parsing. Basically, no need to report it because + /// the error is already emitted from parsing phase. + Invalid, +} + impl ResolvedName { - pub fn scope(scope: ScopeId, import_span: Option) -> Self { - Self::Scope { scope, import_span } + pub fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + ResolvedNameKind::Builtin(builtin) => builtin.is_type(), + ResolvedNameKind::Scope { scope, .. } => scope.is_type(db.upcast()), + } + } + + pub fn scope(&self) -> Option { + match self.kind { + ResolvedNameKind::Builtin(_) => None, + ResolvedNameKind::Scope { scope, .. } => Some(scope), + } } - pub fn builtin(builtin: BuiltinName) -> Self { - Self::Builtin(builtin) + fn new_scope(scope: ScopeId, import_span: Option, domain: NameDomain) -> Self { + Self { + kind: ResolvedNameKind::Scope { scope, import_span }, + domain, + } } - pub fn is_valid(&self) -> bool { - match self { - Self::Scope { scope, .. } => scope.is_valid(), - Self::Builtin(_) => true, + fn new_builtin(builtin: BuiltinName, domain: NameDomain) -> Self { + Self { + kind: ResolvedNameKind::Builtin(builtin), + domain, } } } -impl Default for QueryOption { - fn default() -> Self { - Self::new() +impl fmt::Display for NameResolutionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + NameResolutionError::NotFound => write!(f, "name not found"), + NameResolutionError::Conflict => write!(f, "multiple candidates found"), + NameResolutionError::InvalidImport => write!(f, "invalid import"), + NameResolutionError::Invalid => write!(f, "invalid name"), + } } } +impl std::error::Error for NameResolutionError {} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum ResolvedNameKind { + Builtin(BuiltinName), + Scope { + scope: ScopeId, + import_span: Option, + }, +} + #[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] pub enum BuiltinName { Bool, @@ -264,20 +548,66 @@ impl BuiltinName { } .into() } + + pub fn domain(self) -> NameDomain { + // Currently all builtin belong to the item domain. + match self { + Self::Bool + | Self::U8 + | Self::U16 + | Self::U32 + | Self::U64 + | Self::U128 + | Self::U256 + | Self::I8 + | Self::I16 + | Self::I32 + | Self::I64 + | Self::I128 + | Self::I256 => NameDomain::Item, + } + } + + pub fn is_type(self) -> bool { + // Currently all builtin names are types. + match self { + Self::Bool + | Self::U8 + | Self::U16 + | Self::U32 + | Self::U64 + | Self::U128 + | Self::U256 + | Self::I8 + | Self::I16 + | Self::I32 + | Self::I64 + | Self::I128 + | Self::I256 => true, + } + } } #[derive(Default)] struct ResolvedQueryCacheStore { - cache: FxHashMap<(ScopeId, NameQuery), Either, ScopeId>>, + cache: FxHashMap< + (ScopeId, NameQuery), + Either, ScopeId>, + >, no_cache: bool, } impl ResolvedQueryCacheStore { - fn cache_resolved(&mut self, scope: ScopeId, query: NameQuery, resolved: Vec) { + fn cache_result( + &mut self, + scope: ScopeId, + query: NameQuery, + result: Result, + ) { if self.no_cache { return; } - self.cache.insert((scope, query), Either::Left(resolved)); + self.cache.insert((scope, query), Either::Left(result)); } fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { @@ -287,7 +617,11 @@ impl ResolvedQueryCacheStore { self.cache.insert((scope, query), Either::Right(parent)); } - fn get(&self, scope: ScopeId, query: NameQuery) -> Option> { + fn get( + &self, + scope: ScopeId, + query: NameQuery, + ) -> Option> { match self.cache.get(&(scope, query)) { Some(Either::Left(resolved)) => Some(resolved.clone()), Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), @@ -296,17 +630,48 @@ impl ResolvedQueryCacheStore { } } +/// Each resolved name is associated with a domain that indicates which domain +/// the name belongs to. +/// The multiple same names can be introduced in a same scope as long as they +/// are in a different domain. +/// +/// E.g., A `Foo` can be introduced in a same scope as a type and variant at the +/// same time. This means the code below is valid. +/// ```fe +/// struct Foo {} +/// enum MyEnum { +/// Foo +/// } +/// use MyEnum::Foo +/// ``` #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum NameContext { +pub enum NameDomain { + /// The domain is associated with all items except for items that belongs to + /// the `Value` domain. Item, + /// The domain is associated with a local variable and items that are + /// guaranteed not to have associated names. e.g., `fn` or `const`. Value, - Type, + /// The domain is associated with struct fields. Field, + /// The domain is associated with enum variants. Variant, } +impl NameDomain { + fn from_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> Self { + match scope.data(db.upcast()).kind { + ScopeKind::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeKind::FnParam(_) => { + Self::Value + } + ScopeKind::Item(_) | ScopeKind::GenericParam(_) => Self::Item, + ScopeKind::Field(_) => Self::Field, + ScopeKind::Variant(_) => Self::Variant, + } + } +} + trait QueryPropagator { - // TODO: `db` is not necessary if we implement prefilled keywords. fn propagate(&self, query: NameQuery) -> PropagatedQuery; } @@ -318,26 +683,27 @@ enum PropagatedQuery { } impl QueryPropagator for LexEdge { - fn propagate(&self, _query: NameQuery) -> PropagatedQuery { - PropagatedQuery::Continuation + fn propagate(&self, query: NameQuery) -> PropagatedQuery { + if query.option.allow_lex { + PropagatedQuery::Continuation + } else { + PropagatedQuery::UnPropagated + } } } impl QueryPropagator for ModEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - match query.domain { - NameContext::Item if self.0 == query.name => PropagatedQuery::Terminated, - _ => PropagatedQuery::UnPropagated, + if self.0 == query.name { + PropagatedQuery::Terminated + } else { + PropagatedQuery::UnPropagated } } } impl QueryPropagator for TypeEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Type) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -348,10 +714,6 @@ impl QueryPropagator for TypeEdge { impl QueryPropagator for TraitEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Type) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -362,10 +724,6 @@ impl QueryPropagator for TraitEdge { impl QueryPropagator for ValueEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Value) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -376,10 +734,6 @@ impl QueryPropagator for ValueEdge { impl QueryPropagator for GenericParamEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Type) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -390,10 +744,6 @@ impl QueryPropagator for GenericParamEdge { impl QueryPropagator for FieldEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Field) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -404,10 +754,6 @@ impl QueryPropagator for FieldEdge { impl QueryPropagator for VariantEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Variant) { - return PropagatedQuery::UnPropagated; - } - if self.0 == query.name { PropagatedQuery::Terminated } else { @@ -418,10 +764,6 @@ impl QueryPropagator for VariantEdge { impl QueryPropagator for SuperEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item) { - return PropagatedQuery::UnPropagated; - } - if query.name.is_super() { PropagatedQuery::Terminated } else { @@ -432,10 +774,6 @@ impl QueryPropagator for SuperEdge { impl QueryPropagator for IngotEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item) { - return PropagatedQuery::UnPropagated; - } - if query.name.is_ingot() { PropagatedQuery::Terminated } else { @@ -446,10 +784,6 @@ impl QueryPropagator for IngotEdge { impl QueryPropagator for SelfTyEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Type) { - return PropagatedQuery::UnPropagated; - } - if query.name.is_self_ty() { PropagatedQuery::Terminated } else { @@ -460,10 +794,6 @@ impl QueryPropagator for SelfTyEdge { impl QueryPropagator for SelfEdge { fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if !matches!(query.domain, NameContext::Item | NameContext::Value) { - return PropagatedQuery::UnPropagated; - } - if query.name.is_self() { PropagatedQuery::Terminated } else { diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index e63242f0a0..70971b32dc 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -1,8 +1,8 @@ -use hir::hir_def::scope_graph::{ScopeId, ScopeKind}; +use hir::hir_def::scope_graph::ScopeId; use crate::HirAnalysisDb; -use super::name_resolver::ResolvedName; +use super::name_resolver::{NameDomain, ResolvedName, ResolvedNameKind}; /// Return `true` if the given `resolved` is visible from the `ref_scope`. /// The resolved name is visible from `ref_scope` if @@ -14,7 +14,7 @@ pub fn check_visibility( ref_scope: ScopeId, resolved: &ResolvedName, ) -> bool { - let ResolvedName::Scope{scope, .. } = resolved else { + let ResolvedNameKind::Scope{scope, .. } = resolved.kind else { // If resolved is a builtin name, then it's always visible . return true; }; @@ -24,16 +24,14 @@ pub fn check_visibility( return true; } - let Some(def_scope) = (match scope.kind(db.upcast()) { + let Some(def_scope) = (if resolved.domain == NameDomain::Field { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. - ScopeKind::Field(_) => { scope.parent(db.upcast()).and_then(|scope| scope.parent(db.upcast())) - }, - _ => { + } else { scope.parent(db.upcast()) - } - }) else { + }) + else { return false; }; diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index ab82d1bf1c..14045fd2aa 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -73,6 +73,13 @@ impl ItemKind { Impl(_) | ImplTrait(_) | Body(_) => Visibility::Private, } } + + pub fn is_type(self) -> bool { + matches!( + self, + Self::Struct(_) | Self::Enum(_) | Self::Contract(_) | Self::TypeAlias(_) + ) + } } #[salsa::tracked] @@ -111,14 +118,6 @@ impl TopLevelMod { module_tree.children(self) } - pub fn invalid() -> Self { - Self(salsa::Id::from_u32(u32::MAX - 1)) - } - - pub fn is_valid(self) -> bool { - self != Self::invalid() - } - pub fn vis(self, _db: &dyn HirDb) -> Visibility { // We don't have a way to specify visibility of a top level module. // Please change here if we introduce it. diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index bdf7232e44..418f51a4f0 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -4,5 +4,5 @@ use super::IdentId; #[salsa::interned] pub struct PathId { - segments: Vec>, + pub segments: Vec>, } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 04645efe3a..3675c03ffd 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -122,17 +122,6 @@ impl ScopeId { Self::new(top_mod, LocalScopeId::root()) } - pub fn invalid() -> Self { - Self { - top_mod: TopLevelMod::invalid(), - local_id: LocalScopeId::invalid(), - } - } - - pub fn is_valid(self) -> bool { - self != Self::invalid() - } - pub fn data(self, db: &dyn HirDb) -> &LocalScope { self.top_mod .module_scope_graph(db) @@ -154,6 +143,14 @@ impl ScopeId { pub fn parent_module(self, db: &dyn HirDb) -> Option { self.data(db).parent_module } + + pub fn is_type(self, db: &dyn HirDb) -> bool { + match self.data(db).kind { + ScopeKind::Item(item) => item.is_type(), + ScopeKind::GenericParam(_) => true, + _ => false, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -274,14 +271,6 @@ impl LocalScopeId { pub(crate) fn root() -> Self { LocalScopeId(0) } - - pub fn invalid() -> Self { - LocalScopeId(u32::MAX) - } - - pub fn is_valid(self) -> bool { - self != Self::invalid() - } } #[cfg(test)] diff --git a/crates/mir/src/db/queries/function.rs b/crates/mir/src/db/queries/function.rs index e5b33d30b2..e9f0e9f282 100644 --- a/crates/mir/src/db/queries/function.rs +++ b/crates/mir/src/db/queries/function.rs @@ -1,9 +1,9 @@ use std::{collections::BTreeMap, rc::Rc}; -use fe_analyzer::display::Displayable; -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::items::Item; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::{ + display::Displayable, + namespace::{items as analyzer_items, items::Item, types as analyzer_types}, +}; use smol_str::SmolStr; diff --git a/crates/mir/src/ir/function.rs b/crates/mir/src/ir/function.rs index 026b7fb0ca..c359f20f71 100644 --- a/crates/mir/src/ir/function.rs +++ b/crates/mir/src/ir/function.rs @@ -1,5 +1,4 @@ -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::namespace::{items as analyzer_items, types as analyzer_types}; use fe_common::impl_intern_key; use fxhash::FxHashMap; use id_arena::Arena; diff --git a/crates/mir/src/ir/types.rs b/crates/mir/src/ir/types.rs index 9692d858c5..8bdd9995c2 100644 --- a/crates/mir/src/ir/types.rs +++ b/crates/mir/src/ir/types.rs @@ -1,5 +1,4 @@ -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::namespace::{items as analyzer_items, types as analyzer_types}; use fe_common::{impl_intern_key, Span}; use smol_str::SmolStr; diff --git a/crates/parser/src/ast.rs b/crates/parser/src/ast.rs index 5273440f74..5f619588ad 100644 --- a/crates/parser/src/ast.rs +++ b/crates/parser/src/ast.rs @@ -3,9 +3,10 @@ use fe_common::{Span, Spanned}; use indenter::indented; use serde::{Deserialize, Serialize}; pub use smol_str::SmolStr; -use std::fmt; -use std::fmt::Formatter; -use std::fmt::Write; +use std::{ + fmt, + fmt::{Formatter, Write}, +}; use vec1::Vec1; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] diff --git a/crates/parser/src/grammar/contracts.rs b/crates/parser/src/grammar/contracts.rs index ed234aab57..acc38a4591 100644 --- a/crates/parser/src/grammar/contracts.rs +++ b/crates/parser/src/grammar/contracts.rs @@ -1,9 +1,13 @@ -use super::functions::parse_fn_def; -use super::types::{parse_field, parse_opt_qualifier}; +use super::{ + functions::parse_fn_def, + types::{parse_field, parse_opt_qualifier}, +}; -use crate::ast::{Contract, ContractStmt}; -use crate::node::{Node, Span}; -use crate::{ParseFailed, ParseResult, Parser, TokenKind}; +use crate::{ + ast::{Contract, ContractStmt}, + node::{Node, Span}, + ParseFailed, ParseResult, Parser, TokenKind, +}; // Rule: all "statement" level parse functions consume their trailing // newline(s), either directly or via a function they call. diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 10a752163d..82b6873b85 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -1,6 +1,8 @@ -use crate::ast::{self, CallArg, Expr, GenericArg, Path}; -use crate::node::Node; -use crate::{Label, ParseFailed, ParseResult, Parser, Token, TokenKind}; +use crate::{ + ast::{self, CallArg, Expr, GenericArg, Path}, + node::Node, + Label, ParseFailed, ParseResult, Parser, Token, TokenKind, +}; use super::types::parse_generic_args; diff --git a/crates/parser/src/grammar/functions.rs b/crates/parser/src/grammar/functions.rs index 3b2a8dae99..77344ae432 100644 --- a/crates/parser/src/grammar/functions.rs +++ b/crates/parser/src/grammar/functions.rs @@ -1,12 +1,13 @@ -use super::expressions::parse_expr; -use super::types::parse_type_desc; - -use crate::ast::{ - BinOperator, Expr, FuncStmt, Function, FunctionArg, FunctionSignature, GenericParameter, - LiteralPattern, MatchArm, Path, Pattern, TypeDesc, VarDeclTarget, +use super::{expressions::parse_expr, types::parse_type_desc}; + +use crate::{ + ast::{ + BinOperator, Expr, FuncStmt, Function, FunctionArg, FunctionSignature, GenericParameter, + LiteralPattern, MatchArm, Path, Pattern, TypeDesc, VarDeclTarget, + }, + node::{Node, Span}, + Label, ParseFailed, ParseResult, Parser, TokenKind, }; -use crate::node::{Node, Span}; -use crate::{Label, ParseFailed, ParseResult, Parser, TokenKind}; /// Parse a function definition without a body. The optional `pub` qualifier /// must be parsed by the caller, and passed in. Next token must be `unsafe` or diff --git a/crates/parser/src/grammar/module.rs b/crates/parser/src/grammar/module.rs index e6c1561695..9abd2d4d9e 100644 --- a/crates/parser/src/grammar/module.rs +++ b/crates/parser/src/grammar/module.rs @@ -1,13 +1,17 @@ -use super::expressions::parse_expr; -use super::functions::parse_fn_def; -use super::types::{ - parse_impl_def, parse_path_tail, parse_struct_def, parse_trait_def, parse_type_alias, - parse_type_desc, +use super::{ + contracts::parse_contract_def, + expressions::parse_expr, + functions::parse_fn_def, + types::{ + parse_enum_def, parse_impl_def, parse_path_tail, parse_struct_def, parse_trait_def, + parse_type_alias, parse_type_desc, + }, +}; +use crate::{ + ast::{ConstantDecl, Module, ModuleStmt, Pragma, Use, UseTree}, + node::{Node, Span}, + Label, ParseFailed, ParseResult, Parser, TokenKind, }; -use super::{contracts::parse_contract_def, types::parse_enum_def}; -use crate::ast::{ConstantDecl, Module, ModuleStmt, Pragma, Use, UseTree}; -use crate::node::{Node, Span}; -use crate::{Label, ParseFailed, ParseResult, Parser, TokenKind}; use semver::VersionReq; diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index 54adf78a28..f4b8c9c169 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -1,11 +1,14 @@ -use crate::ast::{ - self, Enum, Field, GenericArg, Impl, Path, Trait, TypeAlias, TypeDesc, Variant, VariantKind, +use crate::{ + ast::{ + self, Enum, Field, GenericArg, Impl, Path, Trait, TypeAlias, TypeDesc, Variant, VariantKind, + }, + grammar::{ + expressions::parse_expr, + functions::{parse_fn_def, parse_fn_sig}, + }, + node::{Node, Span}, + ParseFailed, ParseResult, Parser, Token, TokenKind, }; -use crate::grammar::expressions::parse_expr; -use crate::grammar::functions::{parse_fn_def, parse_fn_sig}; -use crate::node::{Node, Span}; -use crate::Token; -use crate::{ParseFailed, ParseResult, Parser, TokenKind}; use fe_common::diagnostics::Label; use if_chain::if_chain; use smol_str::SmolStr; diff --git a/crates/parser/src/lexer/token.rs b/crates/parser/src/lexer/token.rs index 643ffe74d4..5400982bfc 100644 --- a/crates/parser/src/lexer/token.rs +++ b/crates/parser/src/lexer/token.rs @@ -1,5 +1,4 @@ -use crate::node::Node; -use crate::node::Span; +use crate::node::{Node, Span}; use logos::Logos; use smol_str::SmolStr; use std::ops::Add; diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index a11e77aed8..5556e195e5 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -7,8 +7,7 @@ pub use parser::{Label, ParseFailed, ParseResult, Parser}; pub mod node; use ast::Module; -use fe_common::diagnostics::Diagnostic; -use fe_common::files::SourceFileId; +use fe_common::{diagnostics::Diagnostic, files::SourceFileId}; /// Parse a [`Module`] from the file content string. /// diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 56436d4097..7ef39aca16 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -1,9 +1,13 @@ pub use fe_common::diagnostics::Label; -use fe_common::diagnostics::{Diagnostic, Severity}; -use fe_common::files::SourceFileId; - -use crate::lexer::{Lexer, Token, TokenKind}; -use crate::node::Span; +use fe_common::{ + diagnostics::{Diagnostic, Severity}, + files::SourceFileId, +}; + +use crate::{ + lexer::{Lexer, Token, TokenKind}, + node::Span, +}; use std::{error, fmt}; #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] diff --git a/crates/parser/tests/cases/errors.rs b/crates/parser/tests/cases/errors.rs index becc803558..dd7fa7b6ea 100644 --- a/crates/parser/tests/cases/errors.rs +++ b/crates/parser/tests/cases/errors.rs @@ -1,8 +1,8 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::diagnostics_string; -use fe_common::SourceFileId; -use fe_parser::grammar::{expressions, functions, module}; -use fe_parser::Parser; +use fe_common::{db::TestDb, diagnostics::diagnostics_string, SourceFileId}; +use fe_parser::{ + grammar::{expressions, functions, module}, + Parser, +}; use insta::assert_snapshot; pub fn err_string(test_name: &str, mut parse_fn: F, src: &str) -> String diff --git a/crates/parser/tests/cases/parse_ast.rs b/crates/parser/tests/cases/parse_ast.rs index 78a849d58e..8102fe5857 100644 --- a/crates/parser/tests/cases/parse_ast.rs +++ b/crates/parser/tests/cases/parse_ast.rs @@ -1,10 +1,12 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::print_diagnostics; -use fe_common::utils::ron::to_ron_string_pretty; -use fe_common::SourceFileId; -use fe_parser::grammar::{expressions, functions, module, types}; -use fe_parser::node::Node; -use fe_parser::{ast, ParseResult, Parser}; +use fe_common::{ + db::TestDb, diagnostics::print_diagnostics, utils::ron::to_ron_string_pretty, SourceFileId, +}; +use fe_parser::{ + ast, + grammar::{expressions, functions, module, types}, + node::Node, + ParseResult, Parser, +}; use insta::assert_snapshot; use serde::Serialize; use wasm_bindgen_test::wasm_bindgen_test; diff --git a/crates/parser/tests/cases/print_ast.rs b/crates/parser/tests/cases/print_ast.rs index 900f3472d1..d6292efd84 100644 --- a/crates/parser/tests/cases/print_ast.rs +++ b/crates/parser/tests/cases/print_ast.rs @@ -1,6 +1,4 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::print_diagnostics; -use fe_common::SourceFileId; +use fe_common::{db::TestDb, diagnostics::print_diagnostics, SourceFileId}; use fe_parser::parse_file; use fe_test_files::fixture; use insta::assert_snapshot; diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 8ba1bcfea1..3cb1fa94c2 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -2,15 +2,16 @@ pub mod _macro_support; use evm_runtime::{ExitReason, Handler}; -use fe_common::diagnostics::print_diagnostics; -use fe_common::utils::keccak; +use fe_common::{diagnostics::print_diagnostics, utils::keccak}; use fe_driver as driver; use primitive_types::{H160, U256}; -use std::cell::RefCell; -use std::collections::BTreeMap; -use std::fmt::{Display, Formatter}; -use std::str::FromStr; +use std::{ + cell::RefCell, + collections::BTreeMap, + fmt::{Display, Formatter}, + str::FromStr, +}; use yultsur::*; #[macro_export] diff --git a/crates/tests-legacy/src/ingots.rs b/crates/tests-legacy/src/ingots.rs index 99dfe80d58..f5475a73de 100644 --- a/crates/tests-legacy/src/ingots.rs +++ b/crates/tests-legacy/src/ingots.rs @@ -1,6 +1,7 @@ #![cfg(feature = "solc-backend")] -use fe_compiler_test_utils::*; -use fe_compiler_test_utils::{self as test_utils}; +use fe_compiler_test_utils::{ + *, {self as test_utils}, +}; pub fn deploy_ingot( executor: &mut Executor, From 051cc2169cd25f32d4ac32cbabb6e49ea8f2eb74 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 3 May 2023 16:52:38 +0200 Subject: [PATCH 149/678] Add `propagate_glob` method to `QueryPropagator` --- .../src/name_resolution/name_resolver.rs | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index a08df85895..2f0c1d2a2d 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -673,6 +673,7 @@ impl NameDomain { trait QueryPropagator { fn propagate(&self, query: NameQuery) -> PropagatedQuery; + fn propagate_glob(&self) -> PropagatedQuery; } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -690,6 +691,10 @@ impl QueryPropagator for LexEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for ModEdge { @@ -700,6 +705,10 @@ impl QueryPropagator for ModEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::Terminated + } } impl QueryPropagator for TypeEdge { @@ -710,6 +719,10 @@ impl QueryPropagator for TypeEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::Terminated + } } impl QueryPropagator for TraitEdge { @@ -720,6 +733,9 @@ impl QueryPropagator for TraitEdge { PropagatedQuery::UnPropagated } } + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::Terminated + } } impl QueryPropagator for ValueEdge { @@ -730,6 +746,9 @@ impl QueryPropagator for ValueEdge { PropagatedQuery::UnPropagated } } + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::Terminated + } } impl QueryPropagator for GenericParamEdge { @@ -740,6 +759,10 @@ impl QueryPropagator for GenericParamEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for FieldEdge { @@ -750,6 +773,10 @@ impl QueryPropagator for FieldEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for VariantEdge { @@ -760,6 +787,10 @@ impl QueryPropagator for VariantEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::Terminated + } } impl QueryPropagator for SuperEdge { @@ -770,6 +801,10 @@ impl QueryPropagator for SuperEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for IngotEdge { @@ -780,6 +815,10 @@ impl QueryPropagator for IngotEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for SelfTyEdge { @@ -790,6 +829,10 @@ impl QueryPropagator for SelfTyEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for SelfEdge { @@ -800,12 +843,20 @@ impl QueryPropagator for SelfEdge { PropagatedQuery::UnPropagated } } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for AnonEdge { fn propagate(&self, _query: NameQuery) -> PropagatedQuery { PropagatedQuery::UnPropagated } + + fn propagate_glob(&self) -> PropagatedQuery { + PropagatedQuery::UnPropagated + } } impl QueryPropagator for EdgeKind { @@ -826,4 +877,22 @@ impl QueryPropagator for EdgeKind { EdgeKind::Anon(edge) => edge.propagate(query), } } + + fn propagate_glob(&self) -> PropagatedQuery { + match self { + EdgeKind::Lex(edge) => edge.propagate_glob(), + EdgeKind::Mod(edge) => edge.propagate_glob(), + EdgeKind::Type(edge) => edge.propagate_glob(), + EdgeKind::Trait(edge) => edge.propagate_glob(), + EdgeKind::GenericParam(edge) => edge.propagate_glob(), + EdgeKind::Value(edge) => edge.propagate_glob(), + EdgeKind::Field(edge) => edge.propagate_glob(), + EdgeKind::Variant(edge) => edge.propagate_glob(), + EdgeKind::Super(edge) => edge.propagate_glob(), + EdgeKind::Ingot(edge) => edge.propagate_glob(), + EdgeKind::Self_(edge) => edge.propagate_glob(), + EdgeKind::SelfTy(edge) => edge.propagate_glob(), + EdgeKind::Anon(edge) => edge.propagate_glob(), + } + } } From d5499d16845592087050b301609e6f573121a333 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 4 May 2023 10:45:10 -0500 Subject: [PATCH 150/678] Initial language-server crate --- crates/language-server/Cargo.toml | 14 ++++++++++++++ crates/language-server/src/main.rs | 3 +++ 2 files changed, 17 insertions(+) create mode 100644 crates/language-server/Cargo.toml create mode 100644 crates/language-server/src/main.rs diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml new file mode 100644 index 0000000000..942c33d02b --- /dev/null +++ b/crates/language-server/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "fe-language-server" +version = "0.22.0" +edition = "2021" +authors = ["The Fe Developers "] +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "An LSP language server for Fe lang" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +lsp-server = "0.7.0" +lsp-types = "0.94.0" diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs new file mode 100644 index 0000000000..e7a11a969c --- /dev/null +++ b/crates/language-server/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} From b5af320aab96a14bc6069a0ff0ee36d5864ec146 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 5 May 2023 01:10:49 +0200 Subject: [PATCH 151/678] Change HIR Use definition to have flat use path instead of nested use tree --- crates/hir/src/hir_def/item.rs | 7 +- crates/hir/src/hir_def/use_tree.rs | 19 +--- crates/hir/src/lib.rs | 12 +- crates/hir/src/lower/item.rs | 22 +--- crates/hir/src/lower/use_tree.rs | 157 +++++++++++++++++++++----- crates/hir/src/span/item.rs | 124 ++++++++++++++++---- crates/hir/src/span/mod.rs | 38 +++++++ crates/hir/src/span/transition.rs | 85 +++++++++++--- crates/hir/src/span/use_tree.rs | 94 ++++++++++----- crates/parser2/src/ast/item.rs | 4 + crates/parser2/src/ast/use_tree.rs | 14 ++- crates/parser2/src/parser/use_tree.rs | 6 +- 12 files changed, 436 insertions(+), 146 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 14045fd2aa..5304e788ec 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -23,7 +23,7 @@ use crate::{ use super::{ module_tree_impl, scope_graph::ScopeGraph, AttrListId, Body, FnParamListId, GenericParamListId, - IdentId, ModuleTree, Partial, TypeId, WhereClauseId, + IdentId, ModuleTree, Partial, TypeId, UseAlias, WhereClauseId, }; #[derive( @@ -353,7 +353,8 @@ pub struct Use { #[id] id: TrackedItemId, - pub tree: Partial, + pub path: Partial, + pub alias: Option>, pub vis: Visibility, pub top_mod: TopLevelMod, @@ -443,7 +444,7 @@ pub enum TrackedItemId { Trait(Partial), ImplTrait(Partial, Partial), Const(Partial), - Use(Partial), + Use(Partial), Extern, Joined(Box, Box), } diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 643c8047a9..f1ea7c367d 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -3,24 +3,9 @@ use crate::hir_def::Partial; use super::IdentId; #[salsa::interned] -pub struct UseTreeId { - /// The base path of the use tree. - /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` - /// - /// NOTE: If the tree root is started with `{}`, then the `path` is `None`. +pub struct UsePathId { #[return_ref] - pub path: Vec>, - - /// The subtree of the use tree. - /// - /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. - #[return_ref] - pub subtree: Vec, - - //// The alias of this use tree. - /// `Bar` in `Foo as Bar;` - #[return_ref] - pub alias: Option>, + pub segments: Vec>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 57901dbab8..a87fe584e1 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -42,7 +42,7 @@ pub struct Jar( hir_def::EnumVariantListId, hir_def::ImplItemListId, hir_def::TypeId, - hir_def::UseTreeId, + hir_def::UsePathId, /// Accumulated diagnostics. ParseDiagnosticAccumulator, /// Private tracked functions. These are not part of the public API, and @@ -178,6 +178,16 @@ mod test_db { tree.items_dfs().find_map(|it| it.try_into().ok()).unwrap() } + pub fn expect_items(&mut self, text: &str) -> Vec + where + ItemKind: TryInto, + { + let tree = self.parse_source(text); + tree.items_dfs() + .filter_map(|it| it.try_into().ok()) + .collect() + } + pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { let range = span.resolve(self).range; let file = top_mod.file(self.upcast()); diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index dce3ad41ee..e228ed82bf 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -3,7 +3,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{ item::*, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, - UseTreeId, WhereClauseId, + WhereClauseId, }, span::HirOrigin, }; @@ -399,26 +399,8 @@ impl Const { } } -impl Use { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Use, - ) -> Self { - ctxt.enter_scope(false); - - let tree = UseTreeId::lower_ast_partial(ctxt, ast.use_tree()); - let id = TrackedItemId::Use(tree).join(parent_id); - - let origin = HirOrigin::raw(&ast); - let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); - let use_ = Self::new(ctxt.db(), id, tree, vis, ctxt.top_mod(), origin); - ctxt.leave_scope(use_) - } -} - impl ItemModifier { - fn lower_ast(ast: Option) -> Self { + pub(super) fn lower_ast(ast: Option) -> Self { let Some(ast) = ast else { return Self::None; }; diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 13459a6feb..502ec0ae1b 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -1,46 +1,94 @@ -use parser::ast; +use parser::ast::{self, prelude::*}; -use crate::hir_def::{kw, use_tree::*, IdentId, Partial}; +use crate::{ + hir_def::{kw, use_tree::*, IdentId, ItemModifier, Partial, TrackedItemId, Use}, + span::{HirOrigin, UseDesugared}, +}; use super::FileLowerCtxt; -impl UseTreeId { - pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::UseTree) -> Self { - let path = if let Some(path) = ast.path() { - path.into_iter() - .map(|ast| UsePathSegment::lower_ast_partial(ctxt, ast)) - .collect() - } else { - vec![] +impl Use { + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'_>, + parent_id: TrackedItemId, + ast: ast::Use, + ) -> Vec { + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + + let Some(use_tree) = ast.use_tree() else { + ctxt.enter_scope(false); + let id = TrackedItemId::Use(Partial::Absent).join(parent_id); + let path = Partial::Absent; + let alias = None; + let top_mod = ctxt.top_mod(); + let origin = HirOrigin::raw(&ast); + let use_ = Self::new(ctxt.db(),id, path, alias, vis, top_mod, origin); + ctxt.leave_scope(use_); + return vec![use_]; }; - let subtree = if let Some(children) = ast.children() { - children - .into_iter() - .map(|ast| UseTreeId::lower_ast(ctxt, ast)) - .collect() - } else { - vec![] + + // If the use tree has no subtree, then there is no need to decompose it. + if !use_tree.has_subtree() { + ctxt.enter_scope(false); + let path = UsePathId::lower_ast_partial(ctxt, use_tree.path()); + let id = TrackedItemId::Use(path).join(parent_id); + let alias = use_tree + .alias() + .map(|alias| UseAlias::lower_ast_partial(ctxt, alias)); + let top_mod = ctxt.top_mod(); + let origin = HirOrigin::raw(&ast); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_scope(use_); + return vec![use_]; + } + + let decomposed_paths = decompose_tree(ctxt, ast, use_tree); + decomposed_paths + .into_iter() + .map(|(path, alias, origin)| { + ctxt.enter_scope(false); + let id = TrackedItemId::Use(path).join(parent_id.clone()); + let top_mod = ctxt.top_mod(); + let alias = alias; + let origin = HirOrigin::desugared(origin); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_scope(use_) + }) + .collect() + } +} + +impl UsePathId { + fn lower_ast_partial(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Partial { + let Some(ast) = ast else { + return Partial::Absent; }; - let alias = ast - .alias() - .map(|ast| UseAlias::lower_ast_partial(ctxt, ast)); - Self::new(ctxt.db(), path, subtree, alias) + let segments = ast + .into_iter() + .map(|ast| UsePathSegment::lower_ast_partial(ctxt, ast)) + .collect(); + Some(Self::new(ctxt.db(), segments)).into() } - pub(super) fn lower_ast_partial( + fn from_segments( ctxt: &mut FileLowerCtxt<'_>, - ast: Option, + ast_segs: Vec, ) -> Partial { - ast.map(|ast| Self::lower_ast(ctxt, ast)).into() + if ast_segs.is_empty() { + Partial::Absent + } else { + let segs = ast_segs + .into_iter() + .map(|seg| UsePathSegment::lower_ast_partial(ctxt, seg)) + .collect(); + Partial::Present(Self::new(ctxt.db(), segs)) + } } } impl UsePathSegment { - pub(super) fn lower_ast_partial( - ctxt: &mut FileLowerCtxt<'_>, - ast: ast::UsePathSegment, - ) -> Partial { + fn lower_ast_partial(ctxt: &mut FileLowerCtxt<'_>, ast: ast::UsePathSegment) -> Partial { ast.kind() .map(|kind| match kind { ast::UsePathSegmentKind::Ingot(_) => Self::Ident(kw::INGOT), @@ -58,7 +106,7 @@ impl UsePathSegment { impl UseAlias { pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, - ast: ast::UseTreeAlias, + ast: ast::UseAlias, ) -> Partial { if let Some(ident) = ast.ident() { Some(Self::Ident(IdentId::lower_token(ctxt, ident))) @@ -70,3 +118,54 @@ impl UseAlias { .into() } } + +fn decompose_tree( + ctxt: &mut FileLowerCtxt<'_>, + ast: ast::Use, + use_tree: ast::UseTree, +) -> Vec<(Partial, Option>, UseDesugared)> { + let use_desugared = UseDesugared::new(&ast); + decompose_subtree(ctxt, use_tree, (vec![], use_desugared)) + .into_iter() + .map(|(ast_segs, alias, desugared)| { + let path = UsePathId::from_segments(ctxt, ast_segs); + (path, alias, desugared) + }) + .collect() +} + +fn decompose_subtree( + ctxt: &mut FileLowerCtxt, + subtree: ast::UseTree, + succ: (Vec, UseDesugared), +) -> Vec<( + Vec, + Option>, + UseDesugared, +)> { + let (mut succ_path, mut succ_desugared) = succ; + if let Some(path) = subtree.path() { + for seg in path { + succ_desugared.push_seg(&seg); + succ_path.push(seg.clone()); + } + } + + if let Some(alias) = subtree.alias() { + succ_desugared.add_alias(&alias); + let alias = UseAlias::lower_ast_partial(ctxt, alias); + assert!(subtree.children().is_none()); + return vec![(succ_path, Some(alias), succ_desugared)]; + } + + let Some(children) = subtree.children() else { + return vec![(succ_path, None, succ_desugared)]; + }; + + children + .into_iter() + .flat_map(|subtree| { + decompose_subtree(ctxt, subtree, (succ_path.clone(), succ_desugared.clone())) + }) + .collect() +} diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 15636dcc65..a0e658dcfa 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -1,8 +1,15 @@ -use parser::ast; - -use crate::hir_def::{ - Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, - Use, +use parser::ast::{self, prelude::AstNode}; + +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, + }, + span::{ + transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, + use_tree::LazyUsePathSpan, + DesugaredOrigin, DesugaredUseFocus, + }, }; use super::{ @@ -10,7 +17,7 @@ use super::{ define_lazy_span_node, params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, types::{LazyPathTypeSpan, LazyTypeSpan}, - use_tree::LazyUseTreeSpan, + use_tree::LazyUseAliasSpan, }; define_lazy_span_node!(LazyTopLevelModSpan, ast::Root, new(TopLevelMod),); @@ -167,10 +174,63 @@ define_lazy_span_node!( new(Use), @node { (attributes, attr_list, LazyAttrListSpan), - (use_tree, use_tree, LazyUseTreeSpan), } ); +impl LazyUseSpan { + pub fn path(&self) -> LazyUsePathSpan { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::Use::cast(node) + .and_then(|use_| use_.use_tree()) + .and_then(|tree| tree.path()) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(mut use_) => { + use_.focus = DesugaredUseFocus::Path; + ResolvedOriginKind::Desugared(root, DesugaredOrigin::Use(use_)) + } + _ => ResolvedOriginKind::None, + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; + + LazyUsePathSpan(self.0.push_transition(lazy_transition)) + } + + pub fn alias(&self) -> LazyUseAliasSpan { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::Use::cast(node) + .and_then(|use_| use_.use_tree()) + .and_then(|tree| tree.alias()) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(mut use_) => { + use_.focus = DesugaredUseFocus::Alias; + ResolvedOriginKind::Desugared(root, DesugaredOrigin::Use(use_)) + } + _ => ResolvedOriginKind::None, + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; + + LazyUseAliasSpan(self.0.push_transition(lazy_transition)) + } +} + define_lazy_span_node!(LazyBodySpan, ast::Expr, new(Body),); define_lazy_span_node!( @@ -394,24 +454,48 @@ mod tests { let mut db = TestDb::default(); let text = r#" - use foo::bar::{baz::*, qux as Alias} + use foo::bar::baz::Trait as _ "#; let use_ = db.expect_item::(text); + let top_mod = use_.top_mod(db.upcast()); - let use_tree = use_.lazy_span().use_tree(); + let use_span = use_.lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("baz", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("Trait", db.text_at(top_mod, &use_path_span.segment(3))); + assert_eq!("as _", db.text_at(top_mod, &use_span.alias())); + assert_eq!("_", db.text_at(top_mod, &use_span.alias().name())); + } - assert_eq!("foo::bar", db.text_at(top_mod, &use_tree.path())); - let use_tree_list = use_tree.subtree(); - let use_tree_1 = use_tree_list.tree(0); - let use_tree_2 = use_tree_list.tree(1); + #[test] + fn use_span_desugared() { + let mut db = TestDb::default(); - assert_eq!("baz::*", db.text_at(top_mod, &use_tree_1.path())); - assert_eq!("qux", db.text_at(top_mod, &use_tree_2.path())); - assert_eq!("as Alias", db.text_at(top_mod, &use_tree_2.alias())); - assert_eq!( - "Alias", - db.text_at(top_mod, &use_tree_2.alias().alias_name()) - ); + let text = r#" + use foo::bar::{baz::*, qux as Alias} + "#; + + let uses = db.expect_items::(text); + assert_eq!(uses.len(), 2); + + let top_mod = uses[0].top_mod(db.upcast()); + + let use_span = uses[0].lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("qux", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("as Alias", db.text_at(top_mod, &use_span.alias())); + assert_eq!("Alias", db.text_at(top_mod, &use_span.alias().name())); + + let use_span = uses[1].lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("baz", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("*", db.text_at(top_mod, &use_path_span.segment(3))); } } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 053b96c932..578f0bc4de 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -157,6 +157,10 @@ pub enum DesugaredOrigin { /// The HIR node is the result of desugaring an augmented assignment /// statement. AugAssign(AugAssignDesugared), + + /// The HIR node is the result of desugaring a AST use. + /// In HIR lowering, nested use tree is flattened into a single use path. + Use(UseDesugared), } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -176,6 +180,40 @@ impl AugAssignDesugared { } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseDesugared { + pub root: AstPtr, + pub path: Vec>, + pub alias: Option>, + focus: DesugaredUseFocus, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum DesugaredUseFocus { + Root, + Path, + Alias, +} + +impl UseDesugared { + pub(super) fn new(ast: &ast::Use) -> Self { + Self { + root: AstPtr::new(ast), + path: vec![], + alias: None, + focus: DesugaredUseFocus::Root, + } + } + + pub(super) fn add_alias(&mut self, alias: &ast::UseAlias) { + self.alias = Some(AstPtr::new(alias)) + } + + pub(super) fn push_seg(&mut self, seg: &ast::UsePathSegment) { + self.path.push(AstPtr::new(seg)); + } +} + use transition::define_lazy_span_node; use self::transition::SpanTransitionChain; diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 13902f52f5..a493539580 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -11,14 +11,15 @@ use crate::{ Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, TypeAlias, Use, }, - lower::{map_file_to_mod_impl, top_mod_ast}, + lower::top_mod_ast, SpannedHirDb, }; use super::{ body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, func_ast, impl_ast, impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, type_alias_ast, - use_ast, AugAssignDesugared, DesugaredOrigin, HirOrigin, LazySpan, + use_ast, AugAssignDesugared, DesugaredOrigin, DesugaredUseFocus, HirOrigin, LazySpan, + UseDesugared, }; /// This type represents function from the hir origin to another hir origin to @@ -88,7 +89,9 @@ impl ResolvedOrigin { let kind = match origin { HirOrigin::Raw(ptr) => ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root)), HirOrigin::Expanded(ptr) => ResolvedOriginKind::Expanded(ptr.to_node(&root)), - HirOrigin::Desugared(desugared) => ResolvedOriginKind::Desugared(desugared.clone()), + HirOrigin::Desugared(desugared) => { + ResolvedOriginKind::Desugared(root, desugared.clone()) + } HirOrigin::None => ResolvedOriginKind::None, }; @@ -113,13 +116,28 @@ impl ResolvedOrigin { kind, } } + + pub(crate) fn map_desugared(self, f: F) -> Self + where + F: FnOnce(SyntaxNode, DesugaredOrigin) -> ResolvedOriginKind, + { + let kind = match self.kind { + ResolvedOriginKind::Desugared(root, desugared) => f(root, desugared), + kind => kind, + }; + + ResolvedOrigin { + file: self.file, + kind, + } + } } pub(crate) enum ResolvedOriginKind { Node(SyntaxNode), Token(SyntaxToken), Expanded(SyntaxNode), - Desugared(DesugaredOrigin), + Desugared(SyntaxNode, DesugaredOrigin), None, } @@ -179,7 +197,9 @@ impl LazySpan for SpanTransitionChain { ResolvedOriginKind::Expanded(node) => { Span::new(resolved.file, node.text_range(), SpanKind::Expanded) } - ResolvedOriginKind::Desugared(desugared) => desugared.resolve(db, resolved.file), + ResolvedOriginKind::Desugared(root, desugared) => { + desugared.resolve(db, root, resolved.file) + } ResolvedOriginKind::None => Span::new( resolved.file, TextRange::new(0.into(), 0.into()), @@ -333,25 +353,54 @@ macro_rules! define_lazy_span_node { } impl DesugaredOrigin { - fn resolve(self, db: &dyn SpannedHirDb, file: InputFile) -> Span { + fn resolve(self, _db: &dyn SpannedHirDb, root: SyntaxNode, file: InputFile) -> Span { let range = match self { Self::AugAssign(AugAssignDesugared::Stmt(ptr)) => { - let top_mod = map_file_to_mod_impl(db.upcast(), file); - let top_mod_ast = top_mod_ast(db.upcast(), top_mod); - ptr.syntax_node_ptr() - .to_node(top_mod_ast.syntax()) - .text_range() + ptr.syntax_node_ptr().to_node(&root).text_range() } - Self::AugAssign(AugAssignDesugared::Lhs(range)) => range, - Self::AugAssign(AugAssignDesugared::Rhs(ptr)) => { - let top_mod = map_file_to_mod_impl(db.upcast(), file); - let top_mod_ast = top_mod_ast(db.upcast(), top_mod); - ptr.syntax_node_ptr() - .to_node(top_mod_ast.syntax()) - .text_range() + ptr.syntax_node_ptr().to_node(&root).text_range() } + + Self::Use(UseDesugared { + root: use_root, + path, + alias, + focus, + }) => match focus { + DesugaredUseFocus::Root => use_root.syntax_node_ptr().to_node(&root).text_range(), + DesugaredUseFocus::Path => { + if let Some(first_seg) = path.first() { + let last_seg = path.last().unwrap(); + TextRange::new( + first_seg + .syntax_node_ptr() + .to_node(&root) + .text_range() + .start(), + last_seg.syntax_node_ptr().to_node(&root).text_range().end(), + ) + } else { + return Span::new( + file, + TextRange::new(0.into(), 0.into()), + SpanKind::NotFound, + ); + } + } + DesugaredUseFocus::Alias => { + if let Some(alias) = alias { + alias.syntax_node_ptr().to_node(&root).text_range() + } else { + return Span::new( + file, + TextRange::new(0.into(), 0.into()), + SpanKind::NotFound, + ); + } + } + }, }; Span::new(file, range, SpanKind::Original) diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index 2d9c3c3d46..a4ba2975ec 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -1,40 +1,72 @@ -use parser::ast; +use parser::ast::{self, prelude::*}; -use super::define_lazy_span_node; +use crate::span::{ + transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, + DesugaredOrigin, +}; -define_lazy_span_node!( - LazyUseTreeSpan, - ast::UseTree, - @node { - (path, path, LazyUsePathSpan), - (subtree, children, LazySubUseTreeSpan), - (alias, alias, LazyUseTreeAliasSpan), - } -); +use super::{define_lazy_span_node, LazySpanAtom}; -define_lazy_span_node!( - LazyUsePathSpan, - ast::UsePath, - @idx { - (segment, LazyUsePathSegmentSpan), - } +define_lazy_span_node!(LazyUsePathSpan); +impl LazyUsePathSpan { + pub fn segment(&self, idx: usize) -> LazyUsePathSegmentSpan { + fn f(origin: ResolvedOrigin, arg: LazyArg) -> ResolvedOrigin { + let LazyArg::Idx(idx) = arg else { + unreachable!() + }; -); + origin + .map(|node| { + ast::UsePath::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(use_) => use_ + .path + .get(idx) + .map(|ptr| ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root))) + .unwrap_or_else(|| ResolvedOriginKind::None), + _ => ResolvedOriginKind::None, + }) + } -define_lazy_span_node!(LazyUsePathSegmentSpan); + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::Idx(idx), + }; -define_lazy_span_node!( - LazySubUseTreeSpan, - ast::UseTreeList, - @idx { - (tree, LazyUseTreeSpan), + LazyUsePathSegmentSpan(self.0.push_transition(lazy_transition)) } -); +} + +define_lazy_span_node!(LazyUsePathSegmentSpan); + +define_lazy_span_node!(LazyUseAliasSpan, ast::UseAlias,); + +impl LazyUseAliasSpan { + pub fn name(&self) -> LazySpanAtom { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::UseAlias::cast(node) + .and_then(|a| a.alias()) + .map(|n| n.into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(use_) => use_ + .alias + .and_then(|ptr| ptr.to_node(&root).alias().map(ResolvedOriginKind::Token)) + .unwrap_or_else(|| ResolvedOriginKind::None), + _ => ResolvedOriginKind::None, + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; -define_lazy_span_node!( - LazyUseTreeAliasSpan, - ast::UseTreeAlias, - @token { - (alias_name, ident), + LazySpanAtom(self.0.push_transition(lazy_transition)) } -); +} diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 628ea370f4..ea521573c7 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -304,6 +304,10 @@ impl Use { pub fn use_tree(&self) -> Option { support::child(self.syntax()) } + + pub fn has_sub_tree(&self) -> bool { + self.use_tree().map_or(false, |it| it.has_subtree()) + } } ast_node! { diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs index 975a0f9080..1ea0bf33d6 100644 --- a/crates/parser2/src/ast/use_tree.rs +++ b/crates/parser2/src/ast/use_tree.rs @@ -27,9 +27,14 @@ impl UseTree { support::child(self.syntax()) } + /// Returns `true` if this use tree has children tree. + pub fn has_subtree(&self) -> bool { + self.children().is_some() + } + //// Returns the alias of this use tree. /// `Bar` in `Foo as Bar;` - pub fn alias(&self) -> Option { + pub fn alias(&self) -> Option { support::child(self.syntax()) } } @@ -87,10 +92,10 @@ impl UsePathSegment { } ast_node! { - pub struct UseTreeAlias, + pub struct UseAlias, SK::UseTreeRename, } -impl UseTreeAlias { +impl UseAlias { //// Returns `Some` if the alias is specified as an ident. pub fn ident(&self) -> Option { support::token(self.syntax(), SK::Ident) @@ -101,7 +106,8 @@ impl UseTreeAlias { support::token(self.syntax(), SK::Underscore) } - pub fn alias_syntax(&self) -> Option { + /// Returns `Some` if the alias has a name or `_`. + pub fn alias(&self) -> Option { self.ident().or_else(|| self.underscore()) } } diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs index 3bb8cc82c5..0d5caf54fe 100644 --- a/crates/parser2/src/parser/use_tree.rs +++ b/crates/parser2/src/parser/use_tree.rs @@ -26,7 +26,7 @@ impl super::Parse for UseTreeScope { parser.error_and_recover("can't use `as` with `*`", None); } if parser.current_kind() == Some(SyntaxKind::AsKw) { - parser.parse(UseTreeRenameScope::default(), None); + parser.parse(UseTreeAliasScope::default(), None); } return; } @@ -122,11 +122,11 @@ impl super::Parse for UsePathSegmentScope { } define_scope! { - UseTreeRenameScope, + UseTreeAliasScope, UseTreeRename, Inheritance } -impl super::Parse for UseTreeRenameScope { +impl super::Parse for UseTreeAliasScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::AsKw); From 3470086972705577c0215bd3e5767e2b9cbfeef3 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 9 May 2023 09:48:36 -0500 Subject: [PATCH 152/678] Some initial language server scaffolding --- crates/language-server/Cargo.toml | 5 ++++ crates/language-server/src/config.rs | 0 crates/language-server/src/main.rs | 7 ++++- crates/language-server/src/server.rs | 14 ++++++++++ crates/language-server/src/state.rs | 40 ++++++++++++++++++++++++++++ 5 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 crates/language-server/src/config.rs create mode 100644 crates/language-server/src/server.rs create mode 100644 crates/language-server/src/state.rs diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 942c33d02b..d6a8df34da 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -10,5 +10,10 @@ description = "An LSP language server for Fe lang" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = "1.0.71" +clap = "4.2.7" +crossbeam-channel = "0.5.8" lsp-server = "0.7.0" lsp-types = "0.94.0" +serde = "1.0.162" +serde_json = "1.0.96" diff --git a/crates/language-server/src/config.rs b/crates/language-server/src/config.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index e7a11a969c..8d82f94f2a 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,3 +1,8 @@ +mod server; +mod state; + +use server::run_server; + fn main() { - println!("Hello, world!"); + let _ = run_server(); } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs new file mode 100644 index 0000000000..f9a2014ed8 --- /dev/null +++ b/crates/language-server/src/server.rs @@ -0,0 +1,14 @@ +use anyhow::Result; +use lsp_server::Connection; +use super::state::ServerState; + +pub fn run_server() -> Result<()> { + let (connection, io_threads) = Connection::stdio(); + + let result = ServerState::new(connection.sender) + .run(connection.receiver); + + io_threads.join().unwrap(); + + result +} \ No newline at end of file diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs new file mode 100644 index 0000000000..630b264d3f --- /dev/null +++ b/crates/language-server/src/state.rs @@ -0,0 +1,40 @@ +use anyhow::Result; +use crossbeam_channel::{Receiver, Sender}; +use lsp_server::Message; +use lsp_types::{notification::Notification}; + +pub struct ServerState { + sender: Sender, +} + +impl ServerState { + pub fn new(sender: Sender) -> Self { + ServerState { + sender + } + } + + pub fn run(&mut self, receiver: Receiver) -> Result<()> { + while let Some(msg) = self.next_message(&receiver) { + if let lsp_server::Message::Notification(notification) = &msg { + if notification.method == lsp_types::notification::Exit::METHOD { + return Ok(()); + } + } + + self.handle_message(msg)?; + } + Ok(()) + } + + fn next_message(&self, receiver: &Receiver) -> Option { + crossbeam_channel::select! { + recv(receiver) -> msg => msg.ok() + } + } + + fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { + // unimplemented!() + Ok(()) + } +} From 18c102996a6608dbbce76dca5c737c3f222679ca Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 12 May 2023 14:07:22 -0500 Subject: [PATCH 153/678] initialization --- crates/language-server/src/server.rs | 26 +++++++++++++++++++++++--- crates/language-server/src/state.rs | 1 - 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index f9a2014ed8..1cdaa7432f 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,14 +1,34 @@ use anyhow::Result; use lsp_server::Connection; +use lsp_types::ServerCapabilities; use super::state::ServerState; +fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + ..Default::default() + } +} + pub fn run_server() -> Result<()> { let (connection, io_threads) = Connection::stdio(); - let result = ServerState::new(connection.sender) - .run(connection.receiver); + let (request_id, initialize_params) = connection.initialize_start()?; + let capabilities = server_capabilities(); + + let initialize_result = lsp_types::InitializeResult { + capabilities: capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + + let initialize_result = serde_json::to_value(initialize_result).unwrap(); + + connection.initialize_finish(request_id, initialize_result)?; io_threads.join().unwrap(); - result + ServerState::new(connection.sender) + .run(connection.receiver) } \ No newline at end of file diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 630b264d3f..902ba87ac5 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -34,7 +34,6 @@ impl ServerState { } fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - // unimplemented!() Ok(()) } } From 2a001d6ac692007d3cfd5f8647b6e6e94193ebdd Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 15 May 2023 17:46:43 -0500 Subject: [PATCH 154/678] Initial vscode client extension for language server --- .../editors/vscode/.eslintrc.json | 24 + .../language-server/editors/vscode/.gitignore | 14 + .../editors/vscode/.vscodeignore | 10 + .../editors/vscode/CHANGELOG.md | 9 + .../language-server/editors/vscode/README.md | 71 + .../editors/vscode/out/extension.js | 36 + .../editors/vscode/out/extension.js.map | 1 + .../editors/vscode/out/test/runTest.js | 22 + .../editors/vscode/out/test/runTest.js.map | 1 + .../vscode/out/test/suite/extension.test.js | 15 + .../out/test/suite/extension.test.js.map | 1 + .../editors/vscode/out/test/suite/index.js | 40 + .../vscode/out/test/suite/index.js.map | 1 + .../editors/vscode/package-lock.json | 4141 +++++++++++++++++ .../editors/vscode/package.json | 56 + .../editors/vscode/src/extension.ts | 52 + .../editors/vscode/src/test/runTest.ts | 23 + .../vscode/src/test/suite/extension.test.ts | 15 + .../editors/vscode/src/test/suite/index.ts | 38 + .../editors/vscode/tsconfig.json | 17 + .../vscode/vsc-extension-quickstart.md | 42 + 21 files changed, 4629 insertions(+) create mode 100644 crates/language-server/editors/vscode/.eslintrc.json create mode 100644 crates/language-server/editors/vscode/.gitignore create mode 100644 crates/language-server/editors/vscode/.vscodeignore create mode 100644 crates/language-server/editors/vscode/CHANGELOG.md create mode 100644 crates/language-server/editors/vscode/README.md create mode 100644 crates/language-server/editors/vscode/out/extension.js create mode 100644 crates/language-server/editors/vscode/out/extension.js.map create mode 100644 crates/language-server/editors/vscode/out/test/runTest.js create mode 100644 crates/language-server/editors/vscode/out/test/runTest.js.map create mode 100644 crates/language-server/editors/vscode/out/test/suite/extension.test.js create mode 100644 crates/language-server/editors/vscode/out/test/suite/extension.test.js.map create mode 100644 crates/language-server/editors/vscode/out/test/suite/index.js create mode 100644 crates/language-server/editors/vscode/out/test/suite/index.js.map create mode 100644 crates/language-server/editors/vscode/package-lock.json create mode 100644 crates/language-server/editors/vscode/package.json create mode 100644 crates/language-server/editors/vscode/src/extension.ts create mode 100644 crates/language-server/editors/vscode/src/test/runTest.ts create mode 100644 crates/language-server/editors/vscode/src/test/suite/extension.test.ts create mode 100644 crates/language-server/editors/vscode/src/test/suite/index.ts create mode 100644 crates/language-server/editors/vscode/tsconfig.json create mode 100644 crates/language-server/editors/vscode/vsc-extension-quickstart.md diff --git a/crates/language-server/editors/vscode/.eslintrc.json b/crates/language-server/editors/vscode/.eslintrc.json new file mode 100644 index 0000000000..f9b22b793c --- /dev/null +++ b/crates/language-server/editors/vscode/.eslintrc.json @@ -0,0 +1,24 @@ +{ + "root": true, + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": 6, + "sourceType": "module" + }, + "plugins": [ + "@typescript-eslint" + ], + "rules": { + "@typescript-eslint/naming-convention": "warn", + "@typescript-eslint/semi": "warn", + "curly": "warn", + "eqeqeq": "warn", + "no-throw-literal": "warn", + "semi": "off" + }, + "ignorePatterns": [ + "out", + "dist", + "**/*.d.ts" + ] +} diff --git a/crates/language-server/editors/vscode/.gitignore b/crates/language-server/editors/vscode/.gitignore new file mode 100644 index 0000000000..434f2389b0 --- /dev/null +++ b/crates/language-server/editors/vscode/.gitignore @@ -0,0 +1,14 @@ +# Logs +logs +*.log + +# Compiled output +# dist +*.tsbuildinfo +node_modules/ + +# Optional npm cache directory +.npm + +# Environment variables +.env \ No newline at end of file diff --git a/crates/language-server/editors/vscode/.vscodeignore b/crates/language-server/editors/vscode/.vscodeignore new file mode 100644 index 0000000000..389996760c --- /dev/null +++ b/crates/language-server/editors/vscode/.vscodeignore @@ -0,0 +1,10 @@ +.vscode/** +.vscode-test/** +src/** +.gitignore +.yarnrc +vsc-extension-quickstart.md +**/tsconfig.json +**/.eslintrc.json +**/*.map +**/*.ts diff --git a/crates/language-server/editors/vscode/CHANGELOG.md b/crates/language-server/editors/vscode/CHANGELOG.md new file mode 100644 index 0000000000..d3504f3ab5 --- /dev/null +++ b/crates/language-server/editors/vscode/CHANGELOG.md @@ -0,0 +1,9 @@ +# Change Log + +All notable changes to the "fe-analyzer" extension will be documented in this file. + +Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file. + +## [Unreleased] + +- Initial release \ No newline at end of file diff --git a/crates/language-server/editors/vscode/README.md b/crates/language-server/editors/vscode/README.md new file mode 100644 index 0000000000..5a9b0e116a --- /dev/null +++ b/crates/language-server/editors/vscode/README.md @@ -0,0 +1,71 @@ +# fe-analyzer README + +This is the README for your extension "fe-analyzer". After writing up a brief description, we recommend including the following sections. + +## Features + +Describe specific features of your extension including screenshots of your extension in action. Image paths are relative to this README file. + +For example if there is an image subfolder under your extension project workspace: + +\!\[feature X\]\(images/feature-x.png\) + +> Tip: Many popular extensions utilize animations. This is an excellent way to show off your extension! We recommend short, focused animations that are easy to follow. + +## Requirements + +If you have any requirements or dependencies, add a section describing those and how to install and configure them. + +## Extension Settings + +Include if your extension adds any VS Code settings through the `contributes.configuration` extension point. + +For example: + +This extension contributes the following settings: + +* `myExtension.enable`: Enable/disable this extension. +* `myExtension.thing`: Set to `blah` to do something. + +## Known Issues + +Calling out known issues can help limit users opening duplicate issues against your extension. + +## Release Notes + +Users appreciate release notes as you update your extension. + +### 1.0.0 + +Initial release of ... + +### 1.0.1 + +Fixed issue #. + +### 1.1.0 + +Added features X, Y, and Z. + +--- + +## Following extension guidelines + +Ensure that you've read through the extensions guidelines and follow the best practices for creating your extension. + +* [Extension Guidelines](https://code.visualstudio.com/api/references/extension-guidelines) + +## Working with Markdown + +You can author your README using Visual Studio Code. Here are some useful editor keyboard shortcuts: + +* Split the editor (`Cmd+\` on macOS or `Ctrl+\` on Windows and Linux). +* Toggle preview (`Shift+Cmd+V` on macOS or `Shift+Ctrl+V` on Windows and Linux). +* Press `Ctrl+Space` (Windows, Linux, macOS) to see a list of Markdown snippets. + +## For more information + +* [Visual Studio Code's Markdown Support](http://code.visualstudio.com/docs/languages/markdown) +* [Markdown Syntax Reference](https://help.github.com/articles/markdown-basics/) + +**Enjoy!** diff --git a/crates/language-server/editors/vscode/out/extension.js b/crates/language-server/editors/vscode/out/extension.js new file mode 100644 index 0000000000..7f0d360705 --- /dev/null +++ b/crates/language-server/editors/vscode/out/extension.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deactivate = exports.activate = void 0; +const vscode = require("vscode"); +const node_1 = require("vscode-languageclient/node"); +const path_1 = require("path"); +vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { + vscode.window.showInformationMessage('Hello World from fe-language-server!'); +}); +let client; +async function activate(context) { + // todo: bundle binary with extension + const serverPath = (0, path_1.join)(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server'); + const serverExecutable = { + command: serverPath, + }; + const serverOptions = { + run: serverExecutable, + debug: serverExecutable, + }; + const clientOptions = { + documentSelector: [{ scheme: "file", language: "Fe" }], + }; + client = new node_1.LanguageClient("fe-language-server", "Fe Language Server", serverOptions, clientOptions); + // Start the client. This will also launch the server + client.start(); +} +exports.activate = activate; +function deactivate() { + if (!client) { + return undefined; + } + return client.stop(); +} +exports.deactivate = deactivate; +//# sourceMappingURL=extension.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/extension.js.map b/crates/language-server/editors/vscode/out/extension.js.map new file mode 100644 index 0000000000..498351c5ec --- /dev/null +++ b/crates/language-server/editors/vscode/out/extension.js.map @@ -0,0 +1 @@ +{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AACjC,qDAKoC;AAEpC,+BAA4B;AAE5B,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,wBAAwB,EAAE,GAAG,EAAE;IAC3D,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,sCAAsC,CAAC,CAAC;AACjF,CAAC,CAAC,CAAC;AAEH,IAAI,MAAsB,CAAC;AAEpB,KAAK,UAAU,QAAQ,CAC5B,OAAgC;IAEhC,uCAAuC;IACvC,MAAM,UAAU,GAAG,IAAA,WAAI,EAAC,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,oBAAoB,CAAC,CAAA;IAEzG,MAAM,gBAAgB,GAAe;QACnC,OAAO,EAAE,UAAU;KACpB,CAAC;IAEF,MAAM,aAAa,GAAkB;QACnC,GAAG,EAAE,gBAAgB;QACrB,KAAK,EAAE,gBAAgB;KACxB,CAAC;IAEF,MAAM,aAAa,GAA0B;QAC3C,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC;KACvD,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CACzB,oBAAoB,EACpB,oBAAoB,EACpB,aAAa,EACb,aAAa,CACd,CAAC;IAEF,qDAAqD;IACrD,MAAM,CAAC,KAAK,EAAE,CAAC;AACjB,CAAC;AA5BD,4BA4BC;AAED,SAAgB,UAAU;IACxB,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACvB,CAAC;AALD,gCAKC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/runTest.js b/crates/language-server/editors/vscode/out/test/runTest.js new file mode 100644 index 0000000000..783f8f39fb --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/runTest.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const test_electron_1 = require("@vscode/test-electron"); +async function main() { + try { + // The folder containing the Extension Manifest package.json + // Passed to `--extensionDevelopmentPath` + const extensionDevelopmentPath = path.resolve(__dirname, '../../'); + // The path to test runner + // Passed to --extensionTestsPath + const extensionTestsPath = path.resolve(__dirname, './suite/index'); + // Download VS Code, unzip it and run the integration test + await (0, test_electron_1.runTests)({ extensionDevelopmentPath, extensionTestsPath }); + } + catch (err) { + console.error('Failed to run tests', err); + process.exit(1); + } +} +main(); +//# sourceMappingURL=runTest.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/runTest.js.map b/crates/language-server/editors/vscode/out/test/runTest.js.map new file mode 100644 index 0000000000..a813f5fbf5 --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/runTest.js.map @@ -0,0 +1 @@ +{"version":3,"file":"runTest.js","sourceRoot":"","sources":["../../src/test/runTest.ts"],"names":[],"mappings":";;AAAA,6BAA6B;AAE7B,yDAAiD;AAEjD,KAAK,UAAU,IAAI;IAClB,IAAI;QACH,4DAA4D;QAC5D,yCAAyC;QACzC,MAAM,wBAAwB,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;QAEnE,0BAA0B;QAC1B,iCAAiC;QACjC,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;QAEpE,0DAA0D;QAC1D,MAAM,IAAA,wBAAQ,EAAC,EAAE,wBAAwB,EAAE,kBAAkB,EAAE,CAAC,CAAC;KACjE;IAAC,OAAO,GAAG,EAAE;QACb,OAAO,CAAC,KAAK,CAAC,qBAAqB,EAAE,GAAG,CAAC,CAAC;QAC1C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KAChB;AACF,CAAC;AAED,IAAI,EAAE,CAAC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/extension.test.js b/crates/language-server/editors/vscode/out/test/suite/extension.test.js new file mode 100644 index 0000000000..d88089ebf7 --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/suite/extension.test.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert = require("assert"); +// You can import and use all API from the 'vscode' module +// as well as import your extension to test it +const vscode = require("vscode"); +// import * as myExtension from '../../extension'; +suite('Extension Test Suite', () => { + vscode.window.showInformationMessage('Start all tests.'); + test('Sample test', () => { + assert.strictEqual(-1, [1, 2, 3].indexOf(5)); + assert.strictEqual(-1, [1, 2, 3].indexOf(0)); + }); +}); +//# sourceMappingURL=extension.test.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map b/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map new file mode 100644 index 0000000000..26e2c09ba4 --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"extension.test.js","sourceRoot":"","sources":["../../../src/test/suite/extension.test.ts"],"names":[],"mappings":";;AAAA,iCAAiC;AAEjC,0DAA0D;AAC1D,8CAA8C;AAC9C,iCAAiC;AACjC,kDAAkD;AAElD,KAAK,CAAC,sBAAsB,EAAE,GAAG,EAAE;IAClC,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC;IAEzD,IAAI,CAAC,aAAa,EAAE,GAAG,EAAE;QACxB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9C,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/index.js b/crates/language-server/editors/vscode/out/test/suite/index.js new file mode 100644 index 0000000000..f0da16d94a --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/suite/index.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.run = void 0; +const path = require("path"); +const Mocha = require("mocha"); +const glob = require("glob"); +function run() { + // Create the mocha test + const mocha = new Mocha({ + ui: 'tdd', + color: true + }); + const testsRoot = path.resolve(__dirname, '..'); + return new Promise((c, e) => { + glob('**/**.test.js', { cwd: testsRoot }, (err, files) => { + if (err) { + return e(err); + } + // Add files to the test suite + files.forEach(f => mocha.addFile(path.resolve(testsRoot, f))); + try { + // Run the mocha test + mocha.run(failures => { + if (failures > 0) { + e(new Error(`${failures} tests failed.`)); + } + else { + c(); + } + }); + } + catch (err) { + console.error(err); + e(err); + } + }); + }); +} +exports.run = run; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/index.js.map b/crates/language-server/editors/vscode/out/test/suite/index.js.map new file mode 100644 index 0000000000..dfd0c62e4c --- /dev/null +++ b/crates/language-server/editors/vscode/out/test/suite/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/test/suite/index.ts"],"names":[],"mappings":";;;AAAA,6BAA6B;AAC7B,+BAA+B;AAC/B,6BAA6B;AAE7B,SAAgB,GAAG;IAClB,wBAAwB;IACxB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC;QACvB,EAAE,EAAE,KAAK;QACT,KAAK,EAAE,IAAI;KACX,CAAC,CAAC;IAEH,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;IAEhD,OAAO,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QAC3B,IAAI,CAAC,eAAe,EAAE,EAAE,GAAG,EAAE,SAAS,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;YACxD,IAAI,GAAG,EAAE;gBACR,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;YAED,8BAA8B;YAC9B,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;YAE9D,IAAI;gBACH,qBAAqB;gBACrB,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;oBACpB,IAAI,QAAQ,GAAG,CAAC,EAAE;wBACjB,CAAC,CAAC,IAAI,KAAK,CAAC,GAAG,QAAQ,gBAAgB,CAAC,CAAC,CAAC;qBAC1C;yBAAM;wBACN,CAAC,EAAE,CAAC;qBACJ;gBACF,CAAC,CAAC,CAAC;aACH;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;gBACnB,CAAC,CAAC,GAAG,CAAC,CAAC;aACP;QACF,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACJ,CAAC;AAjCD,kBAiCC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/package-lock.json b/crates/language-server/editors/vscode/package-lock.json new file mode 100644 index 0000000000..3a8d23e9a0 --- /dev/null +++ b/crates/language-server/editors/vscode/package-lock.json @@ -0,0 +1,4141 @@ +{ + "name": "fe-analyzer", + "version": "0.0.1", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "fe-analyzer", + "version": "0.0.1", + "dependencies": { + "vscode-languageclient": "^8.1.0", + "vscode-languageserver": "^8.1.0" + }, + "devDependencies": { + "@types/glob": "^8.1.0", + "@types/mocha": "^10.0.1", + "@types/node": "16.x", + "@types/vscode": "^1.78.0", + "@typescript-eslint/eslint-plugin": "^5.59.1", + "@typescript-eslint/parser": "^5.59.1", + "@vscode/test-electron": "^2.3.0", + "eslint": "^8.39.0", + "glob": "^8.1.0", + "mocha": "^10.2.0", + "typescript": "^5.0.4" + }, + "engines": { + "vscode": "^1.78.0" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", + "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", + "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.5.2", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.40.0.tgz", + "integrity": "sha512-ElyB54bJIhXQYVKjDSvCkPO1iU1tSAeVQJbllWJq1XQSmmA4dgFk8CbiBGpiOPxleE48vDogxCtmMYku4HSVLA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==", + "dev": true, + "dependencies": { + "@types/minimatch": "^5.1.2", + "@types/node": "*" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "node_modules/@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "node_modules/@types/node": { + "version": "16.18.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.30.tgz", + "integrity": "sha512-Kmp/wBZk19Dn7uRiol8kF8agnf8m0+TU9qIwyfPmXglVxMlmiIz0VQSMw5oFgwhmD2aKTlfBIO5FtsVj3y7hKQ==", + "dev": true + }, + "node_modules/@types/semver": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", + "dev": true + }, + "node_modules/@types/vscode": { + "version": "1.78.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.78.0.tgz", + "integrity": "sha512-LJZIJpPvKJ0HVQDqfOy6W4sNKUBBwyDu1Bs8chHBZOe9MNuKTJtidgZ2bqjhmmWpUb0TIIqv47BFUcVmAsgaVA==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.6.tgz", + "integrity": "sha512-sXtOgJNEuRU5RLwPUb1jxtToZbgvq3M6FPpY4QENxoOggK+UpTxUBpj6tD8+Qh2g46Pi9We87E+eHnUw8YcGsw==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.4.0", + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/type-utils": "5.59.6", + "@typescript-eslint/utils": "5.59.6", + "debug": "^4.3.4", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "natural-compare-lite": "^1.4.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^5.0.0", + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.6.tgz", + "integrity": "sha512-7pCa6al03Pv1yf/dUg/s1pXz/yGMUBAw5EeWqNTFiSueKvRNonze3hma3lhdsOrQcaOXhbk5gKu2Fludiho9VA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/typescript-estree": "5.59.6", + "debug": "^4.3.4" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.6.tgz", + "integrity": "sha512-gLbY3Le9Dxcb8KdpF0+SJr6EQ+hFGYFl6tVY8VxLPFDfUZC7BHFw+Vq7bM5lE9DwWPfx4vMWWTLGXgpc0mAYyQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/visitor-keys": "5.59.6" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.6.tgz", + "integrity": "sha512-A4tms2Mp5yNvLDlySF+kAThV9VTBPCvGf0Rp8nl/eoDX9Okun8byTKoj3fJ52IJitjWOk0fKPNQhXEB++eNozQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "5.59.6", + "@typescript-eslint/utils": "5.59.6", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.6.tgz", + "integrity": "sha512-tH5lBXZI7T2MOUgOWFdVNUILsI02shyQvfzG9EJkoONWugCG77NDDa1EeDGw7oJ5IvsTAAGVV8I3Tk2PNu9QfA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.6.tgz", + "integrity": "sha512-vW6JP3lMAs/Tq4KjdI/RiHaaJSO7IUsbkz17it/Rl9Q+WkQ77EOuOnlbaU8kKfVIOJxMhnRiBG+olE7f3M16DA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/visitor-keys": "5.59.6", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.6.tgz", + "integrity": "sha512-vzaaD6EXbTS29cVH0JjXBdzMt6VBlv+hE31XktDRMX1j3462wZCJa7VzO2AxXEXcIl8GQqZPcOPuW/Z1tZVogg==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@types/json-schema": "^7.0.9", + "@types/semver": "^7.3.12", + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/typescript-estree": "5.59.6", + "eslint-scope": "^5.1.1", + "semver": "^7.3.7" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.6.tgz", + "integrity": "sha512-zEfbFLzB9ETcEJ4HZEEsCR9HHeNku5/Qw1jSS5McYJv5BR+ftYXwFFAH5Al+xkGaZEqowMwl7uoJjQb1YSPF8Q==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.59.6", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@vscode/test-electron": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.2.tgz", + "integrity": "sha512-CRfQIs5Wi5Ok5SUCC3PTvRRXa74LD43cSXHC8EuNlmHHEPaJa/AGrv76brcA1hVSxrdja9tiYwp95Lq8kwY0tw==", + "dev": true, + "dependencies": { + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "jszip": "^3.10.1", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/acorn": { + "version": "8.8.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", + "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.40.0.tgz", + "integrity": "sha512-bvR+TsP9EHL3TqNtj9sCNJVAFK3fBN8Q7g5waghxyRsPLIMwL73XSKnZFK0hk/O2ANC+iAoq6PWMQ+IfBAJIiQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.4.0", + "@eslint/eslintrc": "^2.0.3", + "@eslint/js": "8.40.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.0", + "eslint-visitor-keys": "^3.4.1", + "espree": "^9.5.2", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", + "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", + "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/espree": { + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", + "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", + "dev": true, + "dependencies": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esquery/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/globals": { + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ignore": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "dev": true + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-sdsl": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.0.tgz", + "integrity": "sha512-FfVSdx6pJ41Oa+CF7RDaFmTnCaFhua+SNYQX74riGOpl96x+2jQCqEfQ2bnXu/5DPCqlRuiqyvTJM0Qjz26IVg==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dev": true, + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "dependencies": { + "immediate": "~3.0.5" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/minimatch/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/mocha/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/mocha/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/natural-compare-lite": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", + "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/punycode": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", + "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/semver": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", + "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", + "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=12.20" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/vscode-jsonrpc": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.1.0.tgz", + "integrity": "sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageclient": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.1.0.tgz", + "integrity": "sha512-GL4QdbYUF/XxQlAsvYWZRV3V34kOkpRlvV60/72ghHfsYFnS/v2MANZ9P6sHmxFcZKOse8O+L9G7Czg0NUWing==", + "dependencies": { + "minimatch": "^5.1.0", + "semver": "^7.3.7", + "vscode-languageserver-protocol": "3.17.3" + }, + "engines": { + "vscode": "^1.67.0" + } + }, + "node_modules/vscode-languageclient/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/vscode-languageclient/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/vscode-languageserver": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-8.1.0.tgz", + "integrity": "sha512-eUt8f1z2N2IEUDBsKaNapkz7jl5QpskN2Y0G01T/ItMxBxw1fJwvtySGB9QMecatne8jFIWJGWI61dWjyTLQsw==", + "dependencies": { + "vscode-languageserver-protocol": "3.17.3" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz", + "integrity": "sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA==", + "dependencies": { + "vscode-jsonrpc": "8.1.0", + "vscode-languageserver-types": "3.17.3" + } + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.3.tgz", + "integrity": "sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA==" + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^3.3.0" + } + }, + "@eslint-community/regexpp": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", + "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "dev": true + }, + "@eslint/eslintrc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", + "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.5.2", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + } + }, + "@eslint/js": { + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.40.0.tgz", + "integrity": "sha512-ElyB54bJIhXQYVKjDSvCkPO1iU1tSAeVQJbllWJq1XQSmmA4dgFk8CbiBGpiOPxleE48vDogxCtmMYku4HSVLA==", + "dev": true + }, + "@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + } + }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true + }, + "@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true + }, + "@types/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==", + "dev": true, + "requires": { + "@types/minimatch": "^5.1.2", + "@types/node": "*" + } + }, + "@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "@types/node": { + "version": "16.18.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.30.tgz", + "integrity": "sha512-Kmp/wBZk19Dn7uRiol8kF8agnf8m0+TU9qIwyfPmXglVxMlmiIz0VQSMw5oFgwhmD2aKTlfBIO5FtsVj3y7hKQ==", + "dev": true + }, + "@types/semver": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", + "dev": true + }, + "@types/vscode": { + "version": "1.78.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.78.0.tgz", + "integrity": "sha512-LJZIJpPvKJ0HVQDqfOy6W4sNKUBBwyDu1Bs8chHBZOe9MNuKTJtidgZ2bqjhmmWpUb0TIIqv47BFUcVmAsgaVA==", + "dev": true + }, + "@typescript-eslint/eslint-plugin": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.6.tgz", + "integrity": "sha512-sXtOgJNEuRU5RLwPUb1jxtToZbgvq3M6FPpY4QENxoOggK+UpTxUBpj6tD8+Qh2g46Pi9We87E+eHnUw8YcGsw==", + "dev": true, + "requires": { + "@eslint-community/regexpp": "^4.4.0", + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/type-utils": "5.59.6", + "@typescript-eslint/utils": "5.59.6", + "debug": "^4.3.4", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "natural-compare-lite": "^1.4.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/parser": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.6.tgz", + "integrity": "sha512-7pCa6al03Pv1yf/dUg/s1pXz/yGMUBAw5EeWqNTFiSueKvRNonze3hma3lhdsOrQcaOXhbk5gKu2Fludiho9VA==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/typescript-estree": "5.59.6", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.6.tgz", + "integrity": "sha512-gLbY3Le9Dxcb8KdpF0+SJr6EQ+hFGYFl6tVY8VxLPFDfUZC7BHFw+Vq7bM5lE9DwWPfx4vMWWTLGXgpc0mAYyQ==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/visitor-keys": "5.59.6" + } + }, + "@typescript-eslint/type-utils": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.6.tgz", + "integrity": "sha512-A4tms2Mp5yNvLDlySF+kAThV9VTBPCvGf0Rp8nl/eoDX9Okun8byTKoj3fJ52IJitjWOk0fKPNQhXEB++eNozQ==", + "dev": true, + "requires": { + "@typescript-eslint/typescript-estree": "5.59.6", + "@typescript-eslint/utils": "5.59.6", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/types": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.6.tgz", + "integrity": "sha512-tH5lBXZI7T2MOUgOWFdVNUILsI02shyQvfzG9EJkoONWugCG77NDDa1EeDGw7oJ5IvsTAAGVV8I3Tk2PNu9QfA==", + "dev": true + }, + "@typescript-eslint/typescript-estree": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.6.tgz", + "integrity": "sha512-vW6JP3lMAs/Tq4KjdI/RiHaaJSO7IUsbkz17it/Rl9Q+WkQ77EOuOnlbaU8kKfVIOJxMhnRiBG+olE7f3M16DA==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/visitor-keys": "5.59.6", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/utils": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.6.tgz", + "integrity": "sha512-vzaaD6EXbTS29cVH0JjXBdzMt6VBlv+hE31XktDRMX1j3462wZCJa7VzO2AxXEXcIl8GQqZPcOPuW/Z1tZVogg==", + "dev": true, + "requires": { + "@eslint-community/eslint-utils": "^4.2.0", + "@types/json-schema": "^7.0.9", + "@types/semver": "^7.3.12", + "@typescript-eslint/scope-manager": "5.59.6", + "@typescript-eslint/types": "5.59.6", + "@typescript-eslint/typescript-estree": "5.59.6", + "eslint-scope": "^5.1.1", + "semver": "^7.3.7" + } + }, + "@typescript-eslint/visitor-keys": { + "version": "5.59.6", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.6.tgz", + "integrity": "sha512-zEfbFLzB9ETcEJ4HZEEsCR9HHeNku5/Qw1jSS5McYJv5BR+ftYXwFFAH5Al+xkGaZEqowMwl7uoJjQb1YSPF8Q==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.59.6", + "eslint-visitor-keys": "^3.3.0" + } + }, + "@vscode/test-electron": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.2.tgz", + "integrity": "sha512-CRfQIs5Wi5Ok5SUCC3PTvRRXa74LD43cSXHC8EuNlmHHEPaJa/AGrv76brcA1hVSxrdja9tiYwp95Lq8kwY0tw==", + "dev": true, + "requires": { + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "jszip": "^3.10.1", + "semver": "^7.3.8" + } + }, + "acorn": { + "version": "8.8.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", + "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", + "dev": true + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "requires": {} + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "requires": { + "path-type": "^4.0.0" + } + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "eslint": { + "version": "8.40.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.40.0.tgz", + "integrity": "sha512-bvR+TsP9EHL3TqNtj9sCNJVAFK3fBN8Q7g5waghxyRsPLIMwL73XSKnZFK0hk/O2ANC+iAoq6PWMQ+IfBAJIiQ==", + "dev": true, + "requires": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.4.0", + "@eslint/eslintrc": "^2.0.3", + "@eslint/js": "8.40.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.0", + "eslint-visitor-keys": "^3.4.1", + "espree": "^9.5.2", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "dependencies": { + "eslint-scope": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", + "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + } + }, + "eslint-visitor-keys": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", + "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "dev": true + }, + "espree": { + "version": "9.5.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", + "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", + "dev": true, + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + } + }, + "esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "requires": { + "estraverse": "^5.1.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + } + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, + "file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "requires": { + "flat-cache": "^3.0.4" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "requires": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + } + }, + "flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, + "globals": { + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, + "globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "requires": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + } + }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "ignore": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "dev": true + }, + "immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "dev": true + }, + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "js-sdsl": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.0.tgz", + "integrity": "sha512-FfVSdx6pJ41Oa+CF7RDaFmTnCaFhua+SNYQX74riGOpl96x+2jQCqEfQ2bnXu/5DPCqlRuiqyvTJM0Qjz26IVg==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dev": true, + "requires": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, + "lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "requires": { + "immediate": "~3.0.5" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "dependencies": { + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "dependencies": { + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + } + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "natural-compare-lite": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", + "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "punycode": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", + "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "dev": true + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "semver": { + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", + "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "requires": { + "prelude-ls": "^1.2.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + }, + "typescript": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", + "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", + "dev": true + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "vscode-jsonrpc": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.1.0.tgz", + "integrity": "sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw==" + }, + "vscode-languageclient": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.1.0.tgz", + "integrity": "sha512-GL4QdbYUF/XxQlAsvYWZRV3V34kOkpRlvV60/72ghHfsYFnS/v2MANZ9P6sHmxFcZKOse8O+L9G7Czg0NUWing==", + "requires": { + "minimatch": "^5.1.0", + "semver": "^7.3.7", + "vscode-languageserver-protocol": "3.17.3" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "vscode-languageserver": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-8.1.0.tgz", + "integrity": "sha512-eUt8f1z2N2IEUDBsKaNapkz7jl5QpskN2Y0G01T/ItMxBxw1fJwvtySGB9QMecatne8jFIWJGWI61dWjyTLQsw==", + "requires": { + "vscode-languageserver-protocol": "3.17.3" + } + }, + "vscode-languageserver-protocol": { + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz", + "integrity": "sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA==", + "requires": { + "vscode-jsonrpc": "8.1.0", + "vscode-languageserver-types": "3.17.3" + } + }, + "vscode-languageserver-types": { + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.3.tgz", + "integrity": "sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA==" + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true + }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + } + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } + } +} diff --git a/crates/language-server/editors/vscode/package.json b/crates/language-server/editors/vscode/package.json new file mode 100644 index 0000000000..aea5cee424 --- /dev/null +++ b/crates/language-server/editors/vscode/package.json @@ -0,0 +1,56 @@ +{ + "name": "fe-analyzer", + "displayName": "fe-analyzer", + "description": "Support for the Fe language.", + "version": "0.0.1", + "engines": { + "vscode": "^1.78.0" + }, + "categories": [ + "Other" + ], + "main": "./out/extension.js", + "activationEvents": [ + "onLanguage:fe" + ], + "contributes": { + "commands": [ + { + "command": "fe-analyzer.helloWorld", + "title": "Hello World" + } + ], + "languages": [ + { + "id": "fe", + "extensions": [ + ".fe" + ] + } + ] + }, + "scripts": { + "vscode:prepublish": "npm run compile", + "compile": "tsc -p ./", + "watch": "tsc -watch -p ./", + "pretest": "npm run compile && npm run lint", + "lint": "eslint src --ext ts", + "test": "node ./out/test/runTest.js" + }, + "devDependencies": { + "@types/glob": "^8.1.0", + "@types/mocha": "^10.0.1", + "@types/node": "16.x", + "@types/vscode": "^1.78.0", + "@typescript-eslint/eslint-plugin": "^5.59.1", + "@typescript-eslint/parser": "^5.59.1", + "@vscode/test-electron": "^2.3.0", + "eslint": "^8.39.0", + "glob": "^8.1.0", + "mocha": "^10.2.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "vscode-languageclient": "^8.1.0" + } +} diff --git a/crates/language-server/editors/vscode/src/extension.ts b/crates/language-server/editors/vscode/src/extension.ts new file mode 100644 index 0000000000..8c8afdf3c9 --- /dev/null +++ b/crates/language-server/editors/vscode/src/extension.ts @@ -0,0 +1,52 @@ +import * as vscode from "vscode"; +import { + Executable, + LanguageClient, + LanguageClientOptions, + ServerOptions, +} from "vscode-languageclient/node"; + +import { join } from 'path'; + +vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { + vscode.window.showInformationMessage('Hello World from fe-language-server!'); +}); + +let client: LanguageClient; + +export async function activate( + context: vscode.ExtensionContext +): Promise { + // todo: bundle binary with extension + const serverPath = join(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server') + + const serverExecutable: Executable = { + command: serverPath, + }; + + const serverOptions: ServerOptions = { + run: serverExecutable, + debug: serverExecutable, + }; + + const clientOptions: LanguageClientOptions = { + documentSelector: [{ scheme: "file", language: "Fe" }], + }; + + client = new LanguageClient( + "fe-language-server", + "Fe Language Server", + serverOptions, + clientOptions + ); + + // Start the client. This will also launch the server + client.start(); +} + +export function deactivate(): Thenable | undefined { + if (!client) { + return undefined; + } + return client.stop(); +} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/src/test/runTest.ts b/crates/language-server/editors/vscode/src/test/runTest.ts new file mode 100644 index 0000000000..93a4441de8 --- /dev/null +++ b/crates/language-server/editors/vscode/src/test/runTest.ts @@ -0,0 +1,23 @@ +import * as path from 'path'; + +import { runTests } from '@vscode/test-electron'; + +async function main() { + try { + // The folder containing the Extension Manifest package.json + // Passed to `--extensionDevelopmentPath` + const extensionDevelopmentPath = path.resolve(__dirname, '../../'); + + // The path to test runner + // Passed to --extensionTestsPath + const extensionTestsPath = path.resolve(__dirname, './suite/index'); + + // Download VS Code, unzip it and run the integration test + await runTests({ extensionDevelopmentPath, extensionTestsPath }); + } catch (err) { + console.error('Failed to run tests', err); + process.exit(1); + } +} + +main(); diff --git a/crates/language-server/editors/vscode/src/test/suite/extension.test.ts b/crates/language-server/editors/vscode/src/test/suite/extension.test.ts new file mode 100644 index 0000000000..4ca0ab4198 --- /dev/null +++ b/crates/language-server/editors/vscode/src/test/suite/extension.test.ts @@ -0,0 +1,15 @@ +import * as assert from 'assert'; + +// You can import and use all API from the 'vscode' module +// as well as import your extension to test it +import * as vscode from 'vscode'; +// import * as myExtension from '../../extension'; + +suite('Extension Test Suite', () => { + vscode.window.showInformationMessage('Start all tests.'); + + test('Sample test', () => { + assert.strictEqual(-1, [1, 2, 3].indexOf(5)); + assert.strictEqual(-1, [1, 2, 3].indexOf(0)); + }); +}); diff --git a/crates/language-server/editors/vscode/src/test/suite/index.ts b/crates/language-server/editors/vscode/src/test/suite/index.ts new file mode 100644 index 0000000000..7029e38ed3 --- /dev/null +++ b/crates/language-server/editors/vscode/src/test/suite/index.ts @@ -0,0 +1,38 @@ +import * as path from 'path'; +import * as Mocha from 'mocha'; +import * as glob from 'glob'; + +export function run(): Promise { + // Create the mocha test + const mocha = new Mocha({ + ui: 'tdd', + color: true + }); + + const testsRoot = path.resolve(__dirname, '..'); + + return new Promise((c, e) => { + glob('**/**.test.js', { cwd: testsRoot }, (err, files) => { + if (err) { + return e(err); + } + + // Add files to the test suite + files.forEach(f => mocha.addFile(path.resolve(testsRoot, f))); + + try { + // Run the mocha test + mocha.run(failures => { + if (failures > 0) { + e(new Error(`${failures} tests failed.`)); + } else { + c(); + } + }); + } catch (err) { + console.error(err); + e(err); + } + }); + }); +} diff --git a/crates/language-server/editors/vscode/tsconfig.json b/crates/language-server/editors/vscode/tsconfig.json new file mode 100644 index 0000000000..315af7ec73 --- /dev/null +++ b/crates/language-server/editors/vscode/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "ES2020", + "outDir": "out", + "lib": [ + "ES2020" + ], + "sourceMap": true, + "rootDir": "src", + "strict": true /* enable all strict type-checking options */ + /* Additional Checks */ + // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + // "noUnusedParameters": true, /* Report errors on unused parameters. */ + } +} diff --git a/crates/language-server/editors/vscode/vsc-extension-quickstart.md b/crates/language-server/editors/vscode/vsc-extension-quickstart.md new file mode 100644 index 0000000000..04a57fc6a3 --- /dev/null +++ b/crates/language-server/editors/vscode/vsc-extension-quickstart.md @@ -0,0 +1,42 @@ +# Welcome to your VS Code Extension + +## What's in the folder + +* This folder contains all of the files necessary for your extension. +* `package.json` - this is the manifest file in which you declare your extension and command. + * The sample plugin registers a command and defines its title and command name. With this information VS Code can show the command in the command palette. It doesn’t yet need to load the plugin. +* `src/extension.ts` - this is the main file where you will provide the implementation of your command. + * The file exports one function, `activate`, which is called the very first time your extension is activated (in this case by executing the command). Inside the `activate` function we call `registerCommand`. + * We pass the function containing the implementation of the command as the second parameter to `registerCommand`. + +## Get up and running straight away + +* Press `F5` to open a new window with your extension loaded. +* Run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Hello World`. +* Set breakpoints in your code inside `src/extension.ts` to debug your extension. +* Find output from your extension in the debug console. + +## Make changes + +* You can relaunch the extension from the debug toolbar after changing code in `src/extension.ts`. +* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes. + +## Explore the API + +* You can open the full set of our API when you open the file `node_modules/@types/vscode/index.d.ts`. + +## Run tests + +* Open the debug viewlet (`Ctrl+Shift+D` or `Cmd+Shift+D` on Mac) and from the launch configuration dropdown pick `Extension Tests`. +* Press `F5` to run the tests in a new window with your extension loaded. +* See the output of the test result in the debug console. +* Make changes to `src/test/suite/extension.test.ts` or create new test files inside the `test/suite` folder. + * The provided test runner will only consider files matching the name pattern `**.test.ts`. + * You can create folders inside the `test` folder to structure your tests any way you want. + +## Go further + +* [Follow UX guidelines](https://code.visualstudio.com/api/ux-guidelines/overview) to create extensions that seamlessly integrate with VS Code's native interface and patterns. + * Reduce the extension size and improve the startup time by [bundling your extension](https://code.visualstudio.com/api/working-with-extensions/bundling-extension). + * [Publish your extension](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) on the VS Code extension marketplace. + * Automate builds by setting up [Continuous Integration](https://code.visualstudio.com/api/working-with-extensions/continuous-integration). From 1e8feda1dbf2b97ea411c61c67cec08b239bfb2e Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 15 May 2023 18:20:02 -0500 Subject: [PATCH 155/678] LSP client vscode extension debug workspace --- crates/language-server/editors/vscode/out/extension.js | 2 +- crates/language-server/editors/vscode/out/extension.js.map | 2 +- crates/language-server/editors/vscode/src/extension.ts | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/language-server/editors/vscode/out/extension.js b/crates/language-server/editors/vscode/out/extension.js index 7f0d360705..914db25f6e 100644 --- a/crates/language-server/editors/vscode/out/extension.js +++ b/crates/language-server/editors/vscode/out/extension.js @@ -5,7 +5,7 @@ const vscode = require("vscode"); const node_1 = require("vscode-languageclient/node"); const path_1 = require("path"); vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { - vscode.window.showInformationMessage('Hello World from fe-language-server!'); + vscode.window.showInformationMessage('Hello World from fe-analyzer extension!'); }); let client; async function activate(context) { diff --git a/crates/language-server/editors/vscode/out/extension.js.map b/crates/language-server/editors/vscode/out/extension.js.map index 498351c5ec..92b69f1dee 100644 --- a/crates/language-server/editors/vscode/out/extension.js.map +++ b/crates/language-server/editors/vscode/out/extension.js.map @@ -1 +1 @@ -{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AACjC,qDAKoC;AAEpC,+BAA4B;AAE5B,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,wBAAwB,EAAE,GAAG,EAAE;IAC3D,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,sCAAsC,CAAC,CAAC;AACjF,CAAC,CAAC,CAAC;AAEH,IAAI,MAAsB,CAAC;AAEpB,KAAK,UAAU,QAAQ,CAC5B,OAAgC;IAEhC,uCAAuC;IACvC,MAAM,UAAU,GAAG,IAAA,WAAI,EAAC,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,oBAAoB,CAAC,CAAA;IAEzG,MAAM,gBAAgB,GAAe;QACnC,OAAO,EAAE,UAAU;KACpB,CAAC;IAEF,MAAM,aAAa,GAAkB;QACnC,GAAG,EAAE,gBAAgB;QACrB,KAAK,EAAE,gBAAgB;KACxB,CAAC;IAEF,MAAM,aAAa,GAA0B;QAC3C,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC;KACvD,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CACzB,oBAAoB,EACpB,oBAAoB,EACpB,aAAa,EACb,aAAa,CACd,CAAC;IAEF,qDAAqD;IACrD,MAAM,CAAC,KAAK,EAAE,CAAC;AACjB,CAAC;AA5BD,4BA4BC;AAED,SAAgB,UAAU;IACxB,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACvB,CAAC;AALD,gCAKC"} \ No newline at end of file +{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AACjC,qDAKoC;AAEpC,+BAA4B;AAE5B,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,wBAAwB,EAAE,GAAG,EAAE;IAC3D,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,yCAAyC,CAAC,CAAC;AACpF,CAAC,CAAC,CAAC;AAEH,IAAI,MAAsB,CAAC;AAEpB,KAAK,UAAU,QAAQ,CAC5B,OAAgC;IAEhC,uCAAuC;IACvC,MAAM,UAAU,GAAG,IAAA,WAAI,EAAC,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,oBAAoB,CAAC,CAAC;IAE1G,MAAM,gBAAgB,GAAe;QACnC,OAAO,EAAE,UAAU;KACpB,CAAC;IAEF,MAAM,aAAa,GAAkB;QACnC,GAAG,EAAE,gBAAgB;QACrB,KAAK,EAAE,gBAAgB;KACxB,CAAC;IAEF,MAAM,aAAa,GAA0B;QAC3C,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC;KACvD,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CACzB,oBAAoB,EACpB,oBAAoB,EACpB,aAAa,EACb,aAAa,CACd,CAAC;IAEF,qDAAqD;IACrD,MAAM,CAAC,KAAK,EAAE,CAAC;AACjB,CAAC;AA5BD,4BA4BC;AAED,SAAgB,UAAU;IACxB,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACvB,CAAC;AALD,gCAKC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/src/extension.ts b/crates/language-server/editors/vscode/src/extension.ts index 8c8afdf3c9..3e584ce845 100644 --- a/crates/language-server/editors/vscode/src/extension.ts +++ b/crates/language-server/editors/vscode/src/extension.ts @@ -9,7 +9,7 @@ import { import { join } from 'path'; vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { - vscode.window.showInformationMessage('Hello World from fe-language-server!'); + vscode.window.showInformationMessage('Hello World from fe-analyzer extension!'); }); let client: LanguageClient; @@ -18,7 +18,7 @@ export async function activate( context: vscode.ExtensionContext ): Promise { // todo: bundle binary with extension - const serverPath = join(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server') + const serverPath = join(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server'); const serverExecutable: Executable = { command: serverPath, From 4c612717a0d3707410086b08aff218a31c816cdc Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 15 May 2023 19:20:16 -0500 Subject: [PATCH 156/678] attempt to send hover info --- .../language-server/src/handlers/request.rs | 0 crates/language-server/src/main.rs | 7 +++++ crates/language-server/src/server.rs | 19 ++++++++------ crates/language-server/src/state.rs | 26 +++++++++++++++++-- 4 files changed, 42 insertions(+), 10 deletions(-) create mode 100644 crates/language-server/src/handlers/request.rs diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 8d82f94f2a..3e5fa0d79c 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,8 +1,15 @@ mod server; mod state; +mod handlers { + // pub(crate) mod notification; + pub(crate) mod request; +} + use server::run_server; fn main() { let _ = run_server(); + // log "hello world" to the console + println!("Hello, world!"); } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 1cdaa7432f..54f0452180 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,18 +1,22 @@ +use super::state::ServerState; use anyhow::Result; use lsp_server::Connection; -use lsp_types::ServerCapabilities; -use super::state::ServerState; +use lsp_types::{ServerCapabilities, HoverProviderCapability}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { + hover_provider: Some(HoverProviderCapability::Simple(true)), ..Default::default() } } pub fn run_server() -> Result<()> { let (connection, io_threads) = Connection::stdio(); - + let (request_id, initialize_params) = connection.initialize_start()?; + // + // todo: actually use initialization params + let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { @@ -22,13 +26,12 @@ pub fn run_server() -> Result<()> { version: Some(String::from(env!("CARGO_PKG_VERSION"))), }), }; - + let initialize_result = serde_json::to_value(initialize_result).unwrap(); connection.initialize_finish(request_id, initialize_result)?; io_threads.join().unwrap(); - - ServerState::new(connection.sender) - .run(connection.receiver) -} \ No newline at end of file + + ServerState::new(connection.sender).run(connection.receiver) +} diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 902ba87ac5..7513894dcd 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,7 +1,8 @@ use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; -use lsp_server::Message; -use lsp_types::{notification::Notification}; +use lsp_server::{Message, Response}; +use lsp_types::{notification::Notification, request::Request}; +use serde::Deserialize; pub struct ServerState { sender: Sender, @@ -34,6 +35,27 @@ impl ServerState { } fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { + // handle hover request + if let lsp_server::Message::Request(req) = msg { + if req.method == lsp_types::request::HoverRequest::METHOD { + let params = lsp_types::HoverParams::deserialize(req.params)?; + // for now let's just return "hi" + + let result = lsp_types::Hover { + contents: lsp_types::HoverContents::Scalar(lsp_types::MarkedString::String(String::from("hi"))), + range: None, + }; + + let response_message = lsp_server::Message::Response(Response { + id: req.id, + result: Some(serde_json::to_value(result)?), + error: None, + }); + + self.sender.send(response_message)?; + } + } + Ok(()) } } From 010bcea10daf4e27e0a8b7190d4daa27fab44b54 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 17 May 2023 08:20:14 -0500 Subject: [PATCH 157/678] language server/client test notifications --- .../language-server/editors/vscode/.gitignore | 3 +- .../editors/vscode/out/extension.js | 36 ------------ .../editors/vscode/out/extension.js.map | 1 - .../editors/vscode/out/test/runTest.js | 22 -------- .../editors/vscode/out/test/runTest.js.map | 1 - .../vscode/out/test/suite/extension.test.js | 15 ----- .../out/test/suite/extension.test.js.map | 1 - .../editors/vscode/out/test/suite/index.js | 40 -------------- .../vscode/out/test/suite/index.js.map | 1 - .../editors/vscode/src/extension.ts | 10 +++- .../language-server/editors/vscode/src/log.ts | 55 +++++++++++++++++++ crates/language-server/src/server.rs | 23 +++++++- crates/language-server/src/state.rs | 5 +- 13 files changed, 91 insertions(+), 122 deletions(-) delete mode 100644 crates/language-server/editors/vscode/out/extension.js delete mode 100644 crates/language-server/editors/vscode/out/extension.js.map delete mode 100644 crates/language-server/editors/vscode/out/test/runTest.js delete mode 100644 crates/language-server/editors/vscode/out/test/runTest.js.map delete mode 100644 crates/language-server/editors/vscode/out/test/suite/extension.test.js delete mode 100644 crates/language-server/editors/vscode/out/test/suite/extension.test.js.map delete mode 100644 crates/language-server/editors/vscode/out/test/suite/index.js delete mode 100644 crates/language-server/editors/vscode/out/test/suite/index.js.map create mode 100644 crates/language-server/editors/vscode/src/log.ts diff --git a/crates/language-server/editors/vscode/.gitignore b/crates/language-server/editors/vscode/.gitignore index 434f2389b0..beebc08577 100644 --- a/crates/language-server/editors/vscode/.gitignore +++ b/crates/language-server/editors/vscode/.gitignore @@ -6,9 +6,10 @@ logs # dist *.tsbuildinfo node_modules/ +out/ # Optional npm cache directory .npm # Environment variables -.env \ No newline at end of file +.env diff --git a/crates/language-server/editors/vscode/out/extension.js b/crates/language-server/editors/vscode/out/extension.js deleted file mode 100644 index 914db25f6e..0000000000 --- a/crates/language-server/editors/vscode/out/extension.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.deactivate = exports.activate = void 0; -const vscode = require("vscode"); -const node_1 = require("vscode-languageclient/node"); -const path_1 = require("path"); -vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { - vscode.window.showInformationMessage('Hello World from fe-analyzer extension!'); -}); -let client; -async function activate(context) { - // todo: bundle binary with extension - const serverPath = (0, path_1.join)(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server'); - const serverExecutable = { - command: serverPath, - }; - const serverOptions = { - run: serverExecutable, - debug: serverExecutable, - }; - const clientOptions = { - documentSelector: [{ scheme: "file", language: "Fe" }], - }; - client = new node_1.LanguageClient("fe-language-server", "Fe Language Server", serverOptions, clientOptions); - // Start the client. This will also launch the server - client.start(); -} -exports.activate = activate; -function deactivate() { - if (!client) { - return undefined; - } - return client.stop(); -} -exports.deactivate = deactivate; -//# sourceMappingURL=extension.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/extension.js.map b/crates/language-server/editors/vscode/out/extension.js.map deleted file mode 100644 index 92b69f1dee..0000000000 --- a/crates/language-server/editors/vscode/out/extension.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;AAAA,iCAAiC;AACjC,qDAKoC;AAEpC,+BAA4B;AAE5B,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,wBAAwB,EAAE,GAAG,EAAE;IAC3D,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,yCAAyC,CAAC,CAAC;AACpF,CAAC,CAAC,CAAC;AAEH,IAAI,MAAsB,CAAC;AAEpB,KAAK,UAAU,QAAQ,CAC5B,OAAgC;IAEhC,uCAAuC;IACvC,MAAM,UAAU,GAAG,IAAA,WAAI,EAAC,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,oBAAoB,CAAC,CAAC;IAE1G,MAAM,gBAAgB,GAAe;QACnC,OAAO,EAAE,UAAU;KACpB,CAAC;IAEF,MAAM,aAAa,GAAkB;QACnC,GAAG,EAAE,gBAAgB;QACrB,KAAK,EAAE,gBAAgB;KACxB,CAAC;IAEF,MAAM,aAAa,GAA0B;QAC3C,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC;KACvD,CAAC;IAEF,MAAM,GAAG,IAAI,qBAAc,CACzB,oBAAoB,EACpB,oBAAoB,EACpB,aAAa,EACb,aAAa,CACd,CAAC;IAEF,qDAAqD;IACrD,MAAM,CAAC,KAAK,EAAE,CAAC;AACjB,CAAC;AA5BD,4BA4BC;AAED,SAAgB,UAAU;IACxB,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;AACvB,CAAC;AALD,gCAKC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/runTest.js b/crates/language-server/editors/vscode/out/test/runTest.js deleted file mode 100644 index 783f8f39fb..0000000000 --- a/crates/language-server/editors/vscode/out/test/runTest.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const test_electron_1 = require("@vscode/test-electron"); -async function main() { - try { - // The folder containing the Extension Manifest package.json - // Passed to `--extensionDevelopmentPath` - const extensionDevelopmentPath = path.resolve(__dirname, '../../'); - // The path to test runner - // Passed to --extensionTestsPath - const extensionTestsPath = path.resolve(__dirname, './suite/index'); - // Download VS Code, unzip it and run the integration test - await (0, test_electron_1.runTests)({ extensionDevelopmentPath, extensionTestsPath }); - } - catch (err) { - console.error('Failed to run tests', err); - process.exit(1); - } -} -main(); -//# sourceMappingURL=runTest.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/runTest.js.map b/crates/language-server/editors/vscode/out/test/runTest.js.map deleted file mode 100644 index a813f5fbf5..0000000000 --- a/crates/language-server/editors/vscode/out/test/runTest.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"runTest.js","sourceRoot":"","sources":["../../src/test/runTest.ts"],"names":[],"mappings":";;AAAA,6BAA6B;AAE7B,yDAAiD;AAEjD,KAAK,UAAU,IAAI;IAClB,IAAI;QACH,4DAA4D;QAC5D,yCAAyC;QACzC,MAAM,wBAAwB,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;QAEnE,0BAA0B;QAC1B,iCAAiC;QACjC,MAAM,kBAAkB,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;QAEpE,0DAA0D;QAC1D,MAAM,IAAA,wBAAQ,EAAC,EAAE,wBAAwB,EAAE,kBAAkB,EAAE,CAAC,CAAC;KACjE;IAAC,OAAO,GAAG,EAAE;QACb,OAAO,CAAC,KAAK,CAAC,qBAAqB,EAAE,GAAG,CAAC,CAAC;QAC1C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KAChB;AACF,CAAC;AAED,IAAI,EAAE,CAAC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/extension.test.js b/crates/language-server/editors/vscode/out/test/suite/extension.test.js deleted file mode 100644 index d88089ebf7..0000000000 --- a/crates/language-server/editors/vscode/out/test/suite/extension.test.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = require("assert"); -// You can import and use all API from the 'vscode' module -// as well as import your extension to test it -const vscode = require("vscode"); -// import * as myExtension from '../../extension'; -suite('Extension Test Suite', () => { - vscode.window.showInformationMessage('Start all tests.'); - test('Sample test', () => { - assert.strictEqual(-1, [1, 2, 3].indexOf(5)); - assert.strictEqual(-1, [1, 2, 3].indexOf(0)); - }); -}); -//# sourceMappingURL=extension.test.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map b/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map deleted file mode 100644 index 26e2c09ba4..0000000000 --- a/crates/language-server/editors/vscode/out/test/suite/extension.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"extension.test.js","sourceRoot":"","sources":["../../../src/test/suite/extension.test.ts"],"names":[],"mappings":";;AAAA,iCAAiC;AAEjC,0DAA0D;AAC1D,8CAA8C;AAC9C,iCAAiC;AACjC,kDAAkD;AAElD,KAAK,CAAC,sBAAsB,EAAE,GAAG,EAAE;IAClC,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC;IAEzD,IAAI,CAAC,aAAa,EAAE,GAAG,EAAE;QACxB,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9C,CAAC,CAAC,CAAC;AACJ,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/index.js b/crates/language-server/editors/vscode/out/test/suite/index.js deleted file mode 100644 index f0da16d94a..0000000000 --- a/crates/language-server/editors/vscode/out/test/suite/index.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.run = void 0; -const path = require("path"); -const Mocha = require("mocha"); -const glob = require("glob"); -function run() { - // Create the mocha test - const mocha = new Mocha({ - ui: 'tdd', - color: true - }); - const testsRoot = path.resolve(__dirname, '..'); - return new Promise((c, e) => { - glob('**/**.test.js', { cwd: testsRoot }, (err, files) => { - if (err) { - return e(err); - } - // Add files to the test suite - files.forEach(f => mocha.addFile(path.resolve(testsRoot, f))); - try { - // Run the mocha test - mocha.run(failures => { - if (failures > 0) { - e(new Error(`${failures} tests failed.`)); - } - else { - c(); - } - }); - } - catch (err) { - console.error(err); - e(err); - } - }); - }); -} -exports.run = run; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/crates/language-server/editors/vscode/out/test/suite/index.js.map b/crates/language-server/editors/vscode/out/test/suite/index.js.map deleted file mode 100644 index dfd0c62e4c..0000000000 --- a/crates/language-server/editors/vscode/out/test/suite/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/test/suite/index.ts"],"names":[],"mappings":";;;AAAA,6BAA6B;AAC7B,+BAA+B;AAC/B,6BAA6B;AAE7B,SAAgB,GAAG;IAClB,wBAAwB;IACxB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC;QACvB,EAAE,EAAE,KAAK;QACT,KAAK,EAAE,IAAI;KACX,CAAC,CAAC;IAEH,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;IAEhD,OAAO,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QAC3B,IAAI,CAAC,eAAe,EAAE,EAAE,GAAG,EAAE,SAAS,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;YACxD,IAAI,GAAG,EAAE;gBACR,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;YAED,8BAA8B;YAC9B,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;YAE9D,IAAI;gBACH,qBAAqB;gBACrB,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;oBACpB,IAAI,QAAQ,GAAG,CAAC,EAAE;wBACjB,CAAC,CAAC,IAAI,KAAK,CAAC,GAAG,QAAQ,gBAAgB,CAAC,CAAC,CAAC;qBAC1C;yBAAM;wBACN,CAAC,EAAE,CAAC;qBACJ;gBACF,CAAC,CAAC,CAAC;aACH;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;gBACnB,CAAC,CAAC,GAAG,CAAC,CAAC;aACP;QACF,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC,CAAC;AACJ,CAAC;AAjCD,kBAiCC"} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/src/extension.ts b/crates/language-server/editors/vscode/src/extension.ts index 3e584ce845..f4ac081950 100644 --- a/crates/language-server/editors/vscode/src/extension.ts +++ b/crates/language-server/editors/vscode/src/extension.ts @@ -7,6 +7,7 @@ import { } from "vscode-languageclient/node"; import { join } from 'path'; +import { clientLog, serverOutputChannel } from "./log"; vscode.commands.registerCommand('fe-analyzer.helloWorld', () => { vscode.window.showInformationMessage('Hello World from fe-analyzer extension!'); @@ -30,7 +31,10 @@ export async function activate( }; const clientOptions: LanguageClientOptions = { - documentSelector: [{ scheme: "file", language: "Fe" }], + documentSelector: [{ scheme: "file", language: "fe" }], + traceOutputChannel: serverOutputChannel, + // @ts-ignore + outputChannel: clientLog.output, }; client = new LanguageClient( @@ -41,7 +45,9 @@ export async function activate( ); // Start the client. This will also launch the server - client.start(); + await client.start(); + + // console log all messages from the server } export function deactivate(): Thenable | undefined { diff --git a/crates/language-server/editors/vscode/src/log.ts b/crates/language-server/editors/vscode/src/log.ts new file mode 100644 index 0000000000..3d6fd43146 --- /dev/null +++ b/crates/language-server/editors/vscode/src/log.ts @@ -0,0 +1,55 @@ +// adapted from rust-analyzer /editors/code/src/util.ts + +import { inspect } from 'util'; +import * as vscode from 'vscode'; + +const clientLog = new (class { + private enabled = true; + private readonly output = vscode.window.createOutputChannel("Fe Analyzer Client"); + + setEnabled(yes: boolean): void { + clientLog.enabled = yes; + } + + // Hint: the type [T, ...T[]] means a non-empty array + debug(...msg: [unknown, ...unknown[]]): void { + if (!clientLog.enabled) return; + clientLog.write("DEBUG", ...msg); + } + + info(...msg: [unknown, ...unknown[]]): void { + clientLog.write("INFO", ...msg); + } + + warn(...msg: [unknown, ...unknown[]]): void { + debugger; + clientLog.write("WARN", ...msg); + } + + error(...msg: [unknown, ...unknown[]]): void { + debugger; + clientLog.write("ERROR", ...msg); + clientLog.output.show(true); + } + + private write(label: string, ...messageParts: unknown[]): void { + const message = messageParts.map(clientLog.stringify).join(" "); + const dateTime = new Date().toLocaleString(); + clientLog.output.appendLine(`${label} [${dateTime}]: ${message}`); + } + + private stringify(val: unknown): string { + if (typeof val === "string") return val; + return inspect(val, { + colors: false, + depth: 6, // heuristic + }); + } +})(); + +const serverOutputChannel = vscode.window.createOutputChannel("Fe Analyzer Server"); + +export { + clientLog, + serverOutputChannel, +}; \ No newline at end of file diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 54f0452180..de694c22df 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,6 +1,6 @@ use super::state::ServerState; use anyhow::Result; -use lsp_server::Connection; +use lsp_server::{Connection, Notification}; use lsp_types::{ServerCapabilities, HoverProviderCapability}; fn server_capabilities() -> ServerCapabilities { @@ -30,6 +30,27 @@ pub fn run_server() -> Result<()> { let initialize_result = serde_json::to_value(initialize_result).unwrap(); connection.initialize_finish(request_id, initialize_result)?; + // send a "hello" message to the client + connection.sender.send( + lsp_server::Message::Notification(Notification { + method: String::from("window/showMessage"), + params: serde_json::to_value(lsp_types::ShowMessageParams { + typ: lsp_types::MessageType::INFO, + message: String::from("hello from the Fe language server"), + }).unwrap() + }) + )?; + + // log a startup message + connection.sender.send( + lsp_server::Message::Notification(Notification { + method: String::from("window/logMessage"), + params: serde_json::to_value(lsp_types::LogMessageParams { + typ: lsp_types::MessageType::INFO, + message: String::from("Fe language server started"), + }).unwrap() + }) + )?; io_threads.join().unwrap(); diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 7513894dcd..3baf562923 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -35,8 +35,11 @@ impl ServerState { } fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - // handle hover request if let lsp_server::Message::Request(req) = msg { + // log the request to the console + println!("request: {:?}", req); + + // handle hover request if req.method == lsp_types::request::HoverRequest::METHOD { let params = lsp_types::HoverParams::deserialize(req.params)?; // for now let's just return "hi" From 14199029aa79ee0952ed7136112a462515508435 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 17 May 2023 10:40:24 -0500 Subject: [PATCH 158/678] language server dummy hover notifications --- crates/language-server/src/server.rs | 6 +++--- crates/language-server/src/state.rs | 25 +++++++++++++++++++++++-- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index de694c22df..5777f0f88d 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -14,7 +14,6 @@ pub fn run_server() -> Result<()> { let (connection, io_threads) = Connection::stdio(); let (request_id, initialize_params) = connection.initialize_start()?; - // // todo: actually use initialization params let capabilities = server_capabilities(); @@ -52,7 +51,8 @@ pub fn run_server() -> Result<()> { }) )?; + let result = ServerState::new(connection.sender).run(connection.receiver); io_threads.join().unwrap(); - - ServerState::new(connection.sender).run(connection.receiver) + + result } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 3baf562923..1568427933 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -24,6 +24,11 @@ impl ServerState { } self.handle_message(msg)?; + + // debugging spam + // if (std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs() % 1) == 0 { + // self.log_info(String::from("hi"))?; + // } } Ok(()) } @@ -35,17 +40,20 @@ impl ServerState { } fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { + // log the message with `self.log_info` + self.log_info(format!("MESSAGE: {:?}", msg))?; + if let lsp_server::Message::Request(req) = msg { // log the request to the console - println!("request: {:?}", req); // handle hover request if req.method == lsp_types::request::HoverRequest::METHOD { + // log the hover request to the console let params = lsp_types::HoverParams::deserialize(req.params)?; // for now let's just return "hi" let result = lsp_types::Hover { - contents: lsp_types::HoverContents::Scalar(lsp_types::MarkedString::String(String::from("hi"))), + contents: lsp_types::HoverContents::Scalar(lsp_types::MarkedString::String(format!("{:?}", params))), range: None, }; @@ -61,4 +69,17 @@ impl ServerState { Ok(()) } + + fn log_info(&mut self, message: String) -> Result<()> { + self.sender.send( + lsp_server::Message::Notification(lsp_server::Notification { + method: String::from("window/logMessage"), + params: serde_json::to_value(lsp_types::LogMessageParams { + typ: lsp_types::MessageType::INFO, + message: message, + }).unwrap() + }) + )?; + Ok(()) + } } From 80cea27d40144cd380bec4da4aa251138564390f Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 17 May 2023 10:44:51 -0500 Subject: [PATCH 159/678] basic vscode extension dev notes --- .../language-server/editors/vscode/README.md | 76 +++---------------- 1 file changed, 10 insertions(+), 66 deletions(-) diff --git a/crates/language-server/editors/vscode/README.md b/crates/language-server/editors/vscode/README.md index 5a9b0e116a..d00336e7e6 100644 --- a/crates/language-server/editors/vscode/README.md +++ b/crates/language-server/editors/vscode/README.md @@ -1,71 +1,15 @@ # fe-analyzer README -This is the README for your extension "fe-analyzer". After writing up a brief description, we recommend including the following sections. +## Development and debugging +Run: +```bash +npm install +npm run build +``` -## Features +Then open this directory in VSCode and press `F5` to start debugging. -Describe specific features of your extension including screenshots of your extension in action. Image paths are relative to this README file. +A new VSCode window will open with the Fe test fixtures directory and this extension loaded. -For example if there is an image subfolder under your extension project workspace: - -\!\[feature X\]\(images/feature-x.png\) - -> Tip: Many popular extensions utilize animations. This is an excellent way to show off your extension! We recommend short, focused animations that are easy to follow. - -## Requirements - -If you have any requirements or dependencies, add a section describing those and how to install and configure them. - -## Extension Settings - -Include if your extension adds any VS Code settings through the `contributes.configuration` extension point. - -For example: - -This extension contributes the following settings: - -* `myExtension.enable`: Enable/disable this extension. -* `myExtension.thing`: Set to `blah` to do something. - -## Known Issues - -Calling out known issues can help limit users opening duplicate issues against your extension. - -## Release Notes - -Users appreciate release notes as you update your extension. - -### 1.0.0 - -Initial release of ... - -### 1.0.1 - -Fixed issue #. - -### 1.1.0 - -Added features X, Y, and Z. - ---- - -## Following extension guidelines - -Ensure that you've read through the extensions guidelines and follow the best practices for creating your extension. - -* [Extension Guidelines](https://code.visualstudio.com/api/references/extension-guidelines) - -## Working with Markdown - -You can author your README using Visual Studio Code. Here are some useful editor keyboard shortcuts: - -* Split the editor (`Cmd+\` on macOS or `Ctrl+\` on Windows and Linux). -* Toggle preview (`Shift+Cmd+V` on macOS or `Shift+Ctrl+V` on Windows and Linux). -* Press `Ctrl+Space` (Windows, Linux, macOS) to see a list of Markdown snippets. - -## For more information - -* [Visual Studio Code's Markdown Support](http://code.visualstudio.com/docs/languages/markdown) -* [Markdown Syntax Reference](https://help.github.com/articles/markdown-basics/) - -**Enjoy!** +## Building releases +### TODO \ No newline at end of file From d9977a7e19027bc01cdf0d79c1811f94dd36f307 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 17 May 2023 11:49:54 -0500 Subject: [PATCH 160/678] basic language-server readme --- crates/language-server/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 crates/language-server/README.md diff --git a/crates/language-server/README.md b/crates/language-server/README.md new file mode 100644 index 0000000000..8346a3e6c2 --- /dev/null +++ b/crates/language-server/README.md @@ -0,0 +1,8 @@ +# `fe-language-server` +### An LSP server for the Fe programming language + +## Development +To build the language server binary, run: +```bash +cargo build +``` \ No newline at end of file From c3aee0635cb2fb862f513976c95b7168d5a8e6ef Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 17 May 2023 11:50:03 -0500 Subject: [PATCH 161/678] hover formatting --- .../language-server/editors/vscode/README.md | 11 ++-- crates/language-server/src/state.rs | 52 +++++++++++++------ 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/crates/language-server/editors/vscode/README.md b/crates/language-server/editors/vscode/README.md index d00336e7e6..c3cc1bd79d 100644 --- a/crates/language-server/editors/vscode/README.md +++ b/crates/language-server/editors/vscode/README.md @@ -1,13 +1,16 @@ -# fe-analyzer README +# Fe LSP client VSCode extension +This needs a better name. -## Development and debugging -Run: +## Development/Debugging +Before running the VSCode extension, ensure the language server is built by following the instructions in [the `language-server` crate's README.md](../../README.md). + +Once you've built the language server binary, run: ```bash npm install npm run build ``` -Then open this directory in VSCode and press `F5` to start debugging. +Then open this directory in VSCode and press `F5` to run the extension and start the debugger. A new VSCode window will open with the Fe test fixtures directory and this extension loaded. diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 1568427933..826964bd87 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,3 +1,5 @@ +use std::io::BufRead; + use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; use lsp_server::{Message, Response}; @@ -10,11 +12,9 @@ pub struct ServerState { impl ServerState { pub fn new(sender: Sender) -> Self { - ServerState { - sender - } + ServerState { sender } } - + pub fn run(&mut self, receiver: Receiver) -> Result<()> { while let Some(msg) = self.next_message(&receiver) { if let lsp_server::Message::Notification(notification) = &msg { @@ -24,7 +24,7 @@ impl ServerState { } self.handle_message(msg)?; - + // debugging spam // if (std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs() % 1) == 0 { // self.log_info(String::from("hi"))?; @@ -32,13 +32,13 @@ impl ServerState { } Ok(()) } - + fn next_message(&self, receiver: &Receiver) -> Option { crossbeam_channel::select! { recv(receiver) -> msg => msg.ok() } } - + fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { // log the message with `self.log_info` self.log_info(format!("MESSAGE: {:?}", msg))?; @@ -50,10 +50,29 @@ impl ServerState { if req.method == lsp_types::request::HoverRequest::METHOD { // log the hover request to the console let params = lsp_types::HoverParams::deserialize(req.params)?; - // for now let's just return "hi" - + + // open the file and read the line at the given position + let file = std::fs::File::open( + ¶ms + .text_document_position_params + .text_document + .uri + .path(), + )?; + let reader = std::io::BufReader::new(file); + let line = reader + .lines() + .nth(params.text_document_position_params.position.line as usize) + .unwrap() + .unwrap(); + let result = lsp_types::Hover { - contents: lsp_types::HoverContents::Scalar(lsp_types::MarkedString::String(format!("{:?}", params))), + contents: lsp_types::HoverContents::Markup( + lsp_types::MarkupContent::from(lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: format!("### Hovering over:\n```{}```\n\n{}", &line, serde_json::to_string_pretty(¶ms).unwrap()), + }), + ), range: None, }; @@ -69,17 +88,18 @@ impl ServerState { Ok(()) } - + fn log_info(&mut self, message: String) -> Result<()> { - self.sender.send( - lsp_server::Message::Notification(lsp_server::Notification { + self.sender.send(lsp_server::Message::Notification( + lsp_server::Notification { method: String::from("window/logMessage"), params: serde_json::to_value(lsp_types::LogMessageParams { typ: lsp_types::MessageType::INFO, message: message, - }).unwrap() - }) - )?; + }) + .unwrap(), + }, + ))?; Ok(()) } } From b82d8c93e226111930aa2b8fe8fd3c97e8eb4f37 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 5 May 2023 01:11:44 +0200 Subject: [PATCH 162/678] Implement import resolution --- Cargo.lock | 2 +- crates/common2/src/diagnostics.rs | 19 +- crates/common2/src/lib.rs | 4 - crates/hir-analysis/Cargo.toml | 2 +- crates/hir-analysis/src/lib.rs | 14 +- .../src/name_resolution/diagnostics.rs | 126 ++ .../src/name_resolution/import_resolver.rs | 963 +++++++++++++- .../hir-analysis/src/name_resolution/mod.rs | 27 + .../src/name_resolution/name_resolver.rs | 1177 +++++++++++------ .../src/name_resolution/visibility_checker.rs | 53 +- crates/hir/src/hir_def/ident.rs | 2 +- crates/hir/src/hir_def/item.rs | 188 ++- crates/hir/src/hir_def/mod.rs | 29 +- crates/hir/src/hir_def/module_tree.rs | 17 +- crates/hir/src/hir_def/params.rs | 19 +- crates/hir/src/hir_def/scope_graph.rs | 191 ++- crates/hir/src/hir_def/use_tree.rs | 40 +- crates/hir/src/lib.rs | 60 +- crates/hir/src/lower/mod.rs | 26 +- crates/hir/src/lower/params.rs | 4 +- crates/hir/src/lower/parse.rs | 18 +- crates/hir/src/lower/scope_builder.rs | 50 +- crates/hir/src/span/expr.rs | 9 +- crates/hir/src/span/item.rs | 16 +- crates/hir/src/span/mod.rs | 45 +- crates/hir/src/span/pat.rs | 2 +- crates/hir/src/span/stmt.rs | 7 +- crates/hir/src/span/transition.rs | 26 +- 28 files changed, 2509 insertions(+), 627 deletions(-) create mode 100644 crates/hir-analysis/src/name_resolution/diagnostics.rs diff --git a/Cargo.lock b/Cargo.lock index 0225c30a38..3fc191b50e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -925,7 +925,7 @@ version = "0.20.0-alpha" dependencies = [ "derive_more", "either", - "fe-common", + "fe-common2", "fe-hir", "fe-macros", "rustc-hash", diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 3fed6dd8e0..3cbc974358 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -6,7 +6,7 @@ use crate::InputFile; pub struct CompleteDiagnostic { pub severity: Severity, pub message: String, - pub span: Span, + pub span: Option, pub sub_diagnostics: Vec, pub error_code: GlobalErrorCode, } @@ -15,7 +15,7 @@ impl CompleteDiagnostic { pub fn new( severity: Severity, message: String, - span: Span, + span: Option, sub_diagnostics: Vec, error_code: GlobalErrorCode, ) -> Self { @@ -45,7 +45,17 @@ impl GlobalErrorCode { pub struct SubDiagnostic { pub severity: Severity, pub message: String, - pub span: Span, + pub span: Option, +} + +impl SubDiagnostic { + pub fn new(severity: Severity, message: String, span: Option) -> Self { + Self { + severity, + message, + span, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -93,7 +103,10 @@ pub enum Severity { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AnalysisPass { Parse = 1, + + ImportResolution, NameResolution, + TyCheck, ExternalAnalysis(ExternalAnalysisKey) = u16::MAX, diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs index f0e991e7e5..203c0ad41f 100644 --- a/crates/common2/src/lib.rs +++ b/crates/common2/src/lib.rs @@ -8,7 +8,3 @@ pub struct Jar(InputIngot, InputFile); pub trait InputDb: salsa::DbWithJar {} impl InputDb for DB where DB: ?Sized + salsa::DbWithJar {} - -pub trait Upcast { - fn upcast(&self) -> &T; -} diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 79432bc5f4..170cecd66f 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -15,5 +15,5 @@ either = "1.8" derive_more = "0.99" hir = { path = "../hir", package = "fe-hir" } -common = { path = "../common", package = "fe-common" } +common = { path = "../common2", package = "fe-common2" } macros = { path = "../macros", package = "fe-macros" } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index feee0eb92d..5ed46f1cd6 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -1,11 +1,17 @@ -use common::db::Upcast; use hir::{span::DynLazySpan, HirDb}; #[salsa::jar(db = HirAnalysisDb)] -pub struct Jar(); +pub struct Jar( + name_resolution::resolve_imports, + name_resolution::diagnostics::ImportErrorAccumulator, +); -pub trait HirAnalysisDb: salsa::DbWithJar + Upcast {} -impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + Upcast {} +pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { + fn as_hir_db(&self) -> &dyn HirDb { + >::as_jar_db::<'_>(self) + } +} +impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + HirDb {} pub mod name_resolution; diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs new file mode 100644 index 0000000000..3f71a216ae --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -0,0 +1,126 @@ +use common::diagnostics::{ + AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, SubDiagnostic, +}; +use hir::{ + diagnostics::DiagnosticVoucher, + hir_def::IdentId, + span::{DynLazySpan, LazySpan}, + HirDb, +}; + +use super::name_resolver::NameRes; + +#[salsa::accumulator] +pub struct ImportErrorAccumulator(ImportError); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ImportError { + span: DynLazySpan, + kind: ImportErrorKind, +} + +impl ImportError { + pub fn new(span: DynLazySpan, kind: ImportErrorKind) -> Self { + Self { span, kind } + } + + pub fn conflict(span: DynLazySpan, conflict_with: DynLazySpan) -> Self { + Self::new(span, ImportErrorKind::Conflict(conflict_with)) + } + + pub fn not_found(span: DynLazySpan, ident: IdentId) -> Self { + Self::new(span, ImportErrorKind::NotFound(ident)) + } + + pub fn invisible(span: DynLazySpan, resolved: NameRes) -> Self { + Self::new(span, ImportErrorKind::Invisible(resolved)) + } + + pub fn ambiguous(span: DynLazySpan, ident: IdentId) -> Self { + Self::new(span, ImportErrorKind::Ambiguous(ident)) + } +} + +impl DiagnosticVoucher for ImportError { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(AnalysisPass::ImportResolution, self.kind.local_code()) + } + + fn to_complete(self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let span = self.span.resolve(db); + let message = self.kind.message(db.as_hir_db()); + let sub_diags = self.kind.sub_diagnostics(db); + + CompleteDiagnostic::new( + self.kind.severity(), + message, + span, + sub_diags, + self.error_code(), + ) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ImportErrorKind { + /// The import conflicts with another import. + Conflict(DynLazySpan), + /// The import path segment is not found. + NotFound(IdentId), + + /// The import path segment is not visible. + Invisible(NameRes), + + /// The import path segment is ambiguous. + Ambiguous(IdentId), +} + +impl ImportErrorKind { + fn local_code(&self) -> u16 { + match self { + ImportErrorKind::Conflict(_) => 0, + ImportErrorKind::NotFound(_) => 1, + ImportErrorKind::Invisible(..) => 2, + ImportErrorKind::Ambiguous(_) => 3, + } + } + + fn severity(&self) -> Severity { + Severity::Error + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + ImportErrorKind::Conflict(_) => "import conflicts with another import".to_string(), + ImportErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), + ImportErrorKind::Invisible(resolved) => { + format!( + "{} is not visible", + resolved.scope.name(db).unwrap().data(db) + ) + } + ImportErrorKind::Ambiguous(name) => format!("{} is ambiguous", name.data(db)), + } + } + + fn sub_diagnostics(&self, db: &dyn hir::SpannedHirDb) -> Vec { + match self { + ImportErrorKind::Conflict(conflict_with) => vec![SubDiagnostic::new( + Severity::Note, + "conflicts with this import".to_string(), + conflict_with.resolve(db), + )], + + ImportErrorKind::NotFound(_) | ImportErrorKind::Ambiguous(_) => vec![], + + ImportErrorKind::Invisible(resolved) => { + let span = resolved.scope.name_span(db.as_hir_db()).unwrap(); + vec![SubDiagnostic::new( + Severity::Note, + "not visible because of this declaration".to_string(), + span.resolve(db), + )] + } + } + } +} diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index b624188267..4c7c5636e2 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -1,45 +1,958 @@ -#![allow(dead_code)] -use hir::hir_def::{ - scope_graph::{ScopeEdge, ScopeId}, - IdentId, +//! This module implements import and export resolution for HIR. +use std::{ + collections::{hash_map::Entry, VecDeque}, + mem, }; -use rustc_hash::FxHashMap; -use super::name_resolver::{NameResolutionError, ResolvedNameSet}; +use hir::{ + hir_def::{scope_graph::ScopeId, IdentId, IngotId, Use}, + span::DynLazySpan, +}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; + +use super::{ + diagnostics::ImportError, + name_resolver::{ + NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResolutionError, + NameResolver, QueryDirective, + }, +}; + +pub struct ImportResolver<'db> { + db: &'db dyn HirAnalysisDb, + + /// The ingot that is being resolved. + ingot: IngotId, + + /// The set of imports that have been resolved. + resolved_imports: IntermediateResolvedImports, + + /// The uses that have resolution is work in progress. + intermediate_uses: FxHashMap>, + + /// The errors that have been accumulated during the import resolution. + accumulated_errors: Vec, + + /// The number of imported bindings. + /// This is used to judge if a import resolution doesn't change in each + /// iteration of fixed point calculation. + /// This check rely on the fact that the number of bindings is monotonically + /// increasing. + num_imported_res: FxHashMap, + + /// The set of imports that its resolution starts with an external ingot. + /// In this case, the use will turns out to be ambiguous after the import + /// resolution reaches the fixed point. + suspicious_imports: FxHashSet, +} +impl<'db> ImportResolver<'db> { + pub(crate) fn new(db: &'db dyn HirAnalysisDb, ingot: IngotId) -> Self { + Self { + db, + ingot, + resolved_imports: IntermediateResolvedImports::new(ingot), + intermediate_uses: FxHashMap::default(), + accumulated_errors: Vec::default(), + num_imported_res: FxHashMap::default(), + suspicious_imports: FxHashSet::default(), + } + } + + pub(crate) fn resolve_imports(mut self) -> (ResolvedImports, Vec) { + self.initialize_i_uses(); + + let mut changed = true; + let mut unresolved_scope: VecDeque<_> = self.intermediate_uses.keys().copied().collect(); + while changed { + changed = false; + let n_unresolved_scope = unresolved_scope.len(); + let mut scope_counter = 0; + + while scope_counter < n_unresolved_scope { + scope_counter += 1; + let scope = unresolved_scope.pop_front().unwrap(); + + let n_i_uses = self.intermediate_uses[&scope].len(); + let mut i_use_counter = 0; + while i_use_counter < n_i_uses { + i_use_counter += 1; + let i_use = self + .intermediate_uses + .get_mut(&scope) + .unwrap() + .pop_front() + .unwrap(); + + match self.resolve_i_use(i_use) { + (Some(updated_i_use), resolved) => { + changed = changed || resolved; + self.intermediate_uses + .get_mut(&scope) + .unwrap() + .push_back(updated_i_use); + } + + (None, resolved) => { + changed = changed || resolved; + } + } + } + + if self.scope_state(scope).is_closed() { + unresolved_scope.push_back(scope); + } + } + } + + for i_use in std::mem::take(&mut self.intermediate_uses) + .into_values() + .flatten() + { + // If the unresolved use is a glob and the base path is fully resolved, then we + // can regard the resolution for the glob as completed. + // This happens if the scope that glob is referring to is not closed, e.g., if + // there is a cycle in the import graph. + if i_use.is_glob(self.db) && i_use.is_base_resolved(self.db) { + continue; + } + + // If the unresolved use is not a glob and the number of imported bindings is + // not 0, then we can regard the resolution for the use as completed. + // This happens if the scope that the use is referring to is not closed. + if !i_use.is_glob(self.db) && *self.num_imported_res.entry(i_use.use_).or_default() != 0 + { + continue; + } + + self.register_error(&i_use, NameResolutionError::NotFound); + } + + for suspicious in mem::take(&mut self.suspicious_imports) { + self.verify_ambiguity(suspicious); + } + + ( + self.resolved_imports.resolved_imports, + self.accumulated_errors, + ) + } + + /// Try to resolve the given `IntermediateUse`. + /// + /// The first value of the returned tuple is the updated `IntermediateUse` + /// if the resolution is not fully completed. + /// + /// The second value of the returned tuple indicates whether the resolution + /// is progressed from the passed `IntermediateUse`. + fn resolve_i_use(&mut self, i_use: IntermediateUse) -> (Option, bool) { + if i_use.is_glob(self.db) { + self.resolve_glob(i_use) + } else { + self.resolve_named(i_use) + } + } + + /// Try to resolve the given named `IntermediateUse`. + fn resolve_named(&mut self, i_use: IntermediateUse) -> (Option, bool) { + let Some(i_use_res) = self.resolve_base_path(i_use.clone()) else { + return (None, true); + }; + + match i_use_res { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(base_path_resolved) => { + if self.try_finalize_named_use(base_path_resolved) { + (None, true) + } else { + let changed = !i_use.is_base_resolved(self.db); + (Some(i_use), changed) + } + } + + IUseResolution::Partial(i_use) => (Some(i_use), true), + + IUseResolution::Unchanged(i_use) => (Some(i_use), false), + } + } + + /// Try to resolve the given glob `IntermediateUse`. + fn resolve_glob(&mut self, i_use: IntermediateUse) -> (Option, bool) { + let (base_path_resolved, changed) = { + if i_use.is_base_resolved(self.db) { + (i_use, false) + } else { + let Some(i_use_res) = self.resolve_base_path(i_use) else { + return (None, true); + }; + match i_use_res { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(resolved) => (resolved, true), + + IUseResolution::Partial(i_use) => { + return (Some(i_use), true); + } + + IUseResolution::Unchanged(i_use) => { + return (Some(i_use), false); + } + } + } + }; + + let target_scope = base_path_resolved.current_scope(); + let original_scope = base_path_resolved.original_scope; + let use_ = base_path_resolved.use_; + + // Collect all unresolved named imports in the target scope to avoid binding a + // name to a wrong resolution being brought by a glob. + let unresolved_named_imports = match self.intermediate_uses.get(&target_scope) { + Some(i_uses) => i_uses + .iter() + .filter_map(|i_use_in_target| { + if !i_use_in_target.is_glob(self.db) + && is_use_visible(self.db, original_scope, use_) + { + i_use_in_target.imported_name(self.db) + } else { + None + } + }) + .collect(), + + None => FxHashSet::default(), + }; + + // Collect all bindings in the target scope. + let mut resolver = NameResolver::new(self.db, &self.resolved_imports); + let mut directive = QueryDirective::default(); + directive + .add_domain(NameDomain::Value) + .disallow_lex() + .disallow_external(); + let resolutions = resolver.collect_all_resolutions_for_glob( + target_scope, + original_scope, + directive, + unresolved_named_imports, + ); + + let is_decidable = self.is_decidable(&base_path_resolved); + let n_res = resolutions.iter().fold(0, |acc, bind| acc + bind.1.len()); + if *self.num_imported_res.entry(use_).or_default() == n_res { + if is_decidable { + return (None, true); + } else { + return (Some(base_path_resolved), changed); + } + } + + self.num_imported_res.insert(base_path_resolved.use_, n_res); + self.resolved_imports + .set_glob_resolutions(&base_path_resolved, resolutions); + + if is_decidable { + (None, true) + } else { + (Some(base_path_resolved), true) + } + } + + /// Resolves all segments of the given `IntermediateUse` except for the last + /// segment. + /// NOTE: `IUseResolution::Full` is never returned from this function. + /// + /// # Returns + /// - `Some(IUseResolution::BasePath(_))` if the base path is fully + /// resolved. + /// - `Some(IUseResolution::Partial(_))` if the base path is partially + /// resolved and the `IntermediateUse` is updated. + /// - `Some(IUseResolution::Unchanged(_))` if the resulted `IntermediateUse` + /// is unchanged. + /// - `None` if the error happens during the resolution, the error is + /// accumulated in the function. + fn resolve_base_path(&mut self, mut i_use: IntermediateUse) -> Option { + let mut changed = true; + if i_use.is_base_resolved(self.db) { + return Some(IUseResolution::BasePath(i_use)); + } + + loop { + match self.resolve_segment(&i_use)? { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(resolved) => { + return Some(IUseResolution::BasePath(resolved)); + } + + IUseResolution::Partial(updated_i_use) => { + changed = true; + i_use = updated_i_use; + } + + IUseResolution::Unchanged(i_use) => { + return if changed { + Some(IUseResolution::Partial(i_use)) + } else { + Some(IUseResolution::Unchanged(i_use)) + }; + } + } + } + } + + /// Resolves the segments of the given `IntermediateUse` one by one. + /// + /// # Returns + /// - `Some(IUseResolution::Full(_))` if the given use is fully resolved. + /// - `Some(IUseResolution::BasePath(_))` if the base path is fully + /// resolved. + /// - `Some(IUseResolution::Partial(_))` if the base path is partially + /// resolved and the `IntermediateUse` is updated. + /// - `Some(IUseResolution::Unchanged(_))` if the resulted `IntermediateUse` + /// is unchanged. + /// - `None` if the error happens during the resolution, the error is + /// accumulated in the function. + fn resolve_segment(&mut self, i_use: &IntermediateUse) -> Option { + // The segment is syntactically invalid. We can't perform name resolution + // anymore. + // We don't need to report the error here because the parser should have already + // reported it. + let Some(query) = self.make_query(i_use) else { + return None; + }; + + let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); + let resolved = match resolver.resolve_query(i_use.current_scope(), query) { + Ok(resolved) => resolved, + + Err(NameResolutionError::NotFound) if !self.is_decidable(i_use) => { + return Some(IUseResolution::Unchanged(i_use.clone())) + } + + Err(err) => { + self.register_error(i_use, err); + return None; + } + }; + + if i_use.is_base_resolved(self.db) { + return Some(IUseResolution::Full(resolved)); + } + + if resolved.contains_external_ingot(self.db, i_use) || resolved.contains_glob_imported() { + self.suspicious_imports.insert(i_use.use_); + } + + let next_i_use = i_use.proceed(resolved); + if next_i_use.is_base_resolved(self.db) { + Some(IUseResolution::BasePath(next_i_use)) + } else { + Some(IUseResolution::Partial(next_i_use)) + } + } + + fn initialize_i_uses(&mut self) { + let m_tree = self.ingot.module_tree(self.db.as_hir_db()); + + for top_mod in m_tree.all_modules() { + let s_graph = top_mod.scope_graph(self.db.as_hir_db()); + for &use_ in &s_graph.unresolved_uses { + let i_use = IntermediateUse::new(self.db, use_); + self.intermediate_uses + .entry(i_use.current_scope()) + .or_default() + .push_back(i_use); + } + } + } + + /// Returns `true` if the given `IntermediateUse` reaches the fixed point. + fn try_finalize_named_use(&mut self, i_use: IntermediateUse) -> bool { + debug_assert!(i_use.is_base_resolved(self.db)); + + let binding = match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(binding)) => binding, + Some(IUseResolution::Unchanged(_)) => { + return false; + } + + Some(_) => unreachable!(), + + None => { + return true; + } + }; + + let filtered = binding.filter_by_visibility(self.db, i_use.original_scope); + let n_res = filtered.len(); + let is_decidable = self.is_decidable(&i_use); + + if n_res == 0 && is_decidable { + self.register_error(&i_use, NameResolutionError::NotFound); + return true; + } + + if *self.num_imported_res.entry(i_use.use_).or_default() == n_res { + return is_decidable; + } + + self.num_imported_res.insert(i_use.use_, n_res); + if let Err(err) = self + .resolved_imports + .set_named_binds(self.db, &i_use, filtered) + { + self.accumulated_errors.push(err); + } + + is_decidable + } + + /// Check the ambiguity of the given suspicious `IntermediateUse` and report + /// an error if it is ambiguous. + /// An additional ambiguity check should be performed after the import + /// resolution reaches a fixed point. + fn verify_ambiguity(&mut self, use_: Use) { + let i_use = IntermediateUse::new(self.db, use_); + let first_segment_ident = i_use.current_segment_ident(self.db).unwrap(); + let scope = i_use.current_scope(); + let ingot = scope.ingot(self.db.as_hir_db()); + + // The ambiguity in the first segment possibly occurs when the segment is + // resolved to either a glob imported binding or an external ingot in the + // `i_use` resolution. + // + // This is because: + // 1. the resolution of the first segment changes depending on whether the + // dependent glob is resolved or not at the time of `i_use` resolution, + // 2. the order in which uses are resolved is nondeterministic. + // + // In normal name resolution rules, the name brought in by a glob always shadows + // the external ingot, so this ambiguity is inherent in import resolution. + // As a result, we need to add additional verification to check this kind of + // ambiguity. + match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(_)) => { + // The ambiguity about the final segment of the path is already verified during + // the fixed point calculation, so verification is not + // necessary. + return; + } + + Some(IUseResolution::BasePath(resolved) | IUseResolution::Partial(resolved)) => { + if matches!( + resolved.current_res.unwrap().derivation, + NameDerivation::GlobImported(_) + ) && ingot + .external_ingots(self.db.as_hir_db()) + .iter() + .any(|(ingot_name, _)| ingot_name == &first_segment_ident) + { + // The resolved scope is shadowed by an glob imports while originally + // the use might be resolved to an external ingot. This means there is an + // ambiguity between the external ingot and the name + // imported by the glob import. + self.register_error(&i_use, NameResolutionError::Ambiguous); + } + } + + Some(IUseResolution::Unchanged(_)) => {} + + None => { + return; + } + } + + // The ambiguity in the base path arises when multiple items of the same name + // are glob imported into the same scope. It is necessary to verify this + // after the fixed point is reached, since it cannot be assumed that all + // globs in that scope have been resolved at the time of `i_use` name + // resolution. + // + // This ambiguity can be detected by the normal shadowing rules , so it can be + // verified by calling `resolve_base_path`. + // + // The ambiguity about the final segment of the PATH can be verified during the + // fixed point calculation, so verification is not necessary. + self.resolve_base_path(i_use); + } + + fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { + match err { + // We treat `Conflict` as the same as `NotFound`. + // NOTE: The conflict error is happen in the `resolve_query` method, this means that the + // name conflict happens in the scope that is being imported. + NameResolutionError::NotFound | NameResolutionError::Conflict => { + self.accumulated_errors.push(ImportError::not_found( + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + )); + } + + NameResolutionError::Invalid => { + // Do nothing because the error is already reported in the + // parsing phase. + } + + NameResolutionError::Ambiguous => { + self.accumulated_errors.push(ImportError::ambiguous( + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + )); + } + + // `Invisible` is not expected to be returned from `resolve_query` since `NameResolver` + // doesn't care about visibility. + NameResolutionError::Invisible => { + unreachable!() + } + } + } + + /// Makes a query for the current segment of the intermediate use to be + /// resolved. + fn make_query(&self, i_use: &IntermediateUse) -> Option { + let seg_name = i_use.current_segment_ident(self.db)?; + let mut directive = QueryDirective::new(); + + // In the middle of the use path, disallow lexically scoped names and + // external names. + if !i_use.is_first_segment() { + directive.disallow_lex().disallow_external(); + } + + if self.does_named_import_exist_for( + seg_name, + i_use.current_scope(), + i_use.is_first_segment(), + ) { + directive.disallow_glob().disallow_external(); + } + + if i_use.is_base_resolved(self.db) { + directive.add_domain(NameDomain::Value); + } + + Some(NameQuery::with_directive(seg_name, directive)) + } + + /// Returns `true` if there is an unresolved named import for the given name + /// in the given scope or its lexical parent scope. + fn does_named_import_exist_for(&self, name: IdentId, scope: ScopeId, allow_lex: bool) -> bool { + let mut current_scope = Some(scope); + + while let Some(scope) = current_scope { + for i_use in self.intermediate_uses.get(&scope).into_iter().flatten() { + if i_use.imported_name(self.db) == Some(name) { + return true; + } + } + if !allow_lex { + break; + } + current_scope = scope.lex_parent(self.db.as_hir_db()); + } + + false + } + + /// Returns the current state of the scope. + fn scope_state(&self, scope: ScopeId) -> ScopeState { + if scope.ingot(self.db.as_hir_db()) != self.ingot { + return ScopeState::Closed; + } + let Some(i_uses) = self.intermediate_uses.get(&scope) else { + return ScopeState::Closed; + }; + + if i_uses.is_empty() { + return ScopeState::Closed; + } + for i_use in i_uses { + if i_use.is_exported(self.db) { + return ScopeState::Open; + } + } + + ScopeState::Semi + } + + /// Returns `true` if the `i_use` can be proceed further in + fn is_decidable(&self, i_use: &IntermediateUse) -> bool { + let target_scope = i_use.current_scope(); + + if i_use.is_first_segment() { + let mut target_scope = Some(target_scope); + while let Some(scope) = target_scope { + if self.scope_state(scope) != ScopeState::Closed { + return false; + } + target_scope = scope.lex_parent(self.db.as_hir_db()); + } + true + } else { + self.scope_state(target_scope) != ScopeState::Open + } + } +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct ResolvedImports { - pub resolved: FxHashMap, + pub named_resolved: FxHashMap, + pub glob_resolved: FxHashMap, + pub unnamed_resolved: Vec, } -pub struct ImportResolver { - resolved: FxHashMap, - glob_resolved: FxHashMap, - states: FxHashMap, +pub(super) trait Importer { + fn named_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a NamedImportSet>; + + fn glob_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a GlobImportSet>; + + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameBinding]; } -pub trait Importer { - fn named_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet>; - fn glob_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet>; +pub(super) struct DefaultImporter; + +impl Importer for DefaultImporter { + fn named_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a NamedImportSet> { + resolved_imports_for_scope(db, scope) + .named_resolved + .get(&scope) + } + + fn glob_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a GlobImportSet> { + resolved_imports_for_scope(db, scope) + .glob_resolved + .get(&scope) + } + + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameBinding] { + &resolved_imports_for_scope(db, scope).unnamed_resolved + } } -pub(super) type ResolvedImportSet = - FxHashMap>; +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ImportedBinding { + pub binding: NameBinding, + pub use_: Use, +} + +pub type NamedImportSet = FxHashMap; + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct GlobImportSet { + imported: FxHashMap>>, +} +impl GlobImportSet { + /// Returns imported resolutions for the given `name`. + pub fn name_res_for(&self, name: IdentId) -> impl Iterator { + self.imported + .values() + .flat_map(move |v| v.get(&name).into_iter().flatten()) + } + + pub fn iter(&self) -> impl Iterator>)> { + self.imported.iter() + } +} /// This is the state of import resolution for a given scope. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, Copy, PartialEq, Eq)] enum ScopeState { - // The scope is open, meaning that the scope needs further processing. + // The scope is open, meaning that the scope needs further resolution. Open, - // The scope is closed, meaning that the scope is fully resolved. - Close, + + /// The scope is partially resolved, meaning that the exports in the scope + /// is fully resolved but the imports are partially resolved. + Semi, + + /// The scope is closed, meaning that the all imports in the scope is fully + /// resolved. + Closed, +} + +impl ScopeState { + fn is_closed(self) -> bool { + matches!(self, ScopeState::Closed) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct IntermediateUse { + use_: Use, + current_res: Option, + original_scope: ScopeId, + resolved_until: usize, +} + +impl IntermediateUse { + fn new(db: &dyn HirAnalysisDb, use_: Use) -> Self { + let scope = ScopeId::from_item(db.as_hir_db(), use_.into()) + .lex_parent(db.as_hir_db()) + .unwrap(); + Self { + use_, + current_res: None, + original_scope: scope, + resolved_until: 0, + } + } + + /// Returns the scope that this intermediate use is contained. + fn current_scope(&self) -> ScopeId { + if let Some(current_res) = self.current_res.as_ref() { + current_res.scope + } else { + self.original_scope + } + } + + fn is_exported(&self, db: &dyn HirAnalysisDb) -> bool { + self.use_.vis(db.as_hir_db()).is_pub() + } + + fn is_glob(&self, db: &dyn HirAnalysisDb) -> bool { + self.use_.is_glob(db.as_hir_db()) + } + + /// Proceed the resolution of the use path to the next segment. + /// The binding must contain exactly one resolution. + fn proceed(&self, binding: NameBinding) -> Self { + debug_assert_eq!(binding.len(), 1); + let current_res = binding.into_iter().next(); + Self { + use_: self.use_, + current_res, + original_scope: self.original_scope, + resolved_until: self.resolved_until + 1, + } + } + + /// Returns the span of the current segment of the use. + fn current_segment_span(&self) -> DynLazySpan { + self.use_ + .lazy_span() + .path() + .segment(self.resolved_until) + .into() + } + + fn current_segment_ident(&self, db: &dyn HirAnalysisDb) -> Option { + let segments = self + .use_ + .path(db.as_hir_db()) + .to_opt()? + .segments(db.as_hir_db()); + + let seg_idx = self.resolved_until; + let segment = segments[seg_idx].to_opt()?; + segment.ident() + } + + fn imported_name(&self, db: &dyn HirAnalysisDb) -> Option { + self.use_.imported_name(db.as_hir_db()) + } + + fn segment_len(&self, db: &dyn HirAnalysisDb) -> Option { + self.use_ + .path(db.as_hir_db()) + .to_opt() + .map(|p| p.segment_len(db.as_hir_db())) + } + + fn is_first_segment(&self) -> bool { + self.resolved_until == 0 + } + + /// Returns `true` if the use path except the last segment is fully + /// resolved. + fn is_base_resolved(&self, db: &dyn HirAnalysisDb) -> bool { + let Some(segment_len) = self.segment_len(db) else { + return false; + }; + + self.resolved_until + 1 == segment_len + } +} + +enum IUseResolution { + /// The all segments are resolved. + Full(NameBinding), + + /// The all path segments except the last one are resolved. + BasePath(IntermediateUse), + + /// The intermediate use was partially resolved, but still needs further + /// resolution. + Partial(IntermediateUse), + + /// There was no change to the intermediate use. + Unchanged(IntermediateUse), +} + +struct IntermediateResolvedImports { + resolved_imports: ResolvedImports, + ingot: IngotId, +} + +impl IntermediateResolvedImports { + fn new(ingot: IngotId) -> Self { + Self { + resolved_imports: ResolvedImports::default(), + ingot, + } + } + + fn set_named_binds( + &mut self, + db: &dyn HirAnalysisDb, + i_use: &IntermediateUse, + mut bind: NameBinding, + ) -> Result<(), ImportError> { + let scope = i_use.original_scope; + bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); + + let imported_name = match i_use.imported_name(db) { + Some(name) => name, + None => { + self.resolved_imports.unnamed_resolved.push(bind); + return Ok(()); + } + }; + + let imported_set = self + .resolved_imports + .named_resolved + .entry(scope) + .or_default(); + + match imported_set.entry(imported_name) { + Entry::Occupied(mut e) => match e.get_mut().binding.merge(bind.iter()) { + Some(already_found) => { + return Err(ImportError::conflict( + i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + already_found.derived_from(db).unwrap(), + )) + } + None => Ok(()), + }, + + Entry::Vacant(e) => { + let import_bind = ImportedBinding { + binding: bind, + use_: i_use.use_, + }; + e.insert(import_bind); + Ok(()) + } + } + } + + fn set_glob_resolutions( + &mut self, + i_use: &IntermediateUse, + mut resolutions: FxHashMap>, + ) { + let scope = i_use.original_scope; + for res in resolutions.values_mut().flatten() { + res.derivation = NameDerivation::GlobImported(i_use.use_); + } + + self.resolved_imports + .glob_resolved + .entry(scope) + .or_default() + .imported + .insert(i_use.use_, resolutions); + } +} + +impl Importer for IntermediateResolvedImports { + fn named_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a NamedImportSet> { + if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + resolved_imports_for_scope(db, scope) + .named_resolved + .get(&scope) + } else { + self.resolved_imports.named_resolved.get(&scope) + } + } + + fn glob_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> Option<&'a GlobImportSet> { + if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + resolved_imports_for_scope(db, scope) + .glob_resolved + .get(&scope) + } else { + self.resolved_imports.glob_resolved.get(&scope) + } + } + + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameBinding] { + if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + &resolved_imports_for_scope(db, scope).unnamed_resolved + } else { + &self.resolved_imports.unnamed_resolved + } + } +} + +fn resolved_imports_for_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> &ResolvedImports { + let ingot = scope.ingot(db.as_hir_db()); + super::resolve_imports(db, ingot) } -impl Importer for ImportResolver { - fn named_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet> { - self.resolved.get(&scope) +impl NameBinding { + fn contains_external_ingot(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { + let current_ingot = i_use.current_scope().ingot(db.as_hir_db()); + self.resolutions + .values() + .any(|r| r.scope.ingot(db.as_hir_db()) != current_ingot) } - fn glob_imports(&self, scope: ScopeId) -> Option<&ResolvedImportSet> { - self.glob_resolved.get(&scope) + fn contains_glob_imported(&self) -> bool { + self.resolutions + .values() + .any(|r| matches!(r.derivation, NameDerivation::GlobImported(_))) } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index ede8568475..dfda5dea45 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -1,3 +1,30 @@ +use hir::hir_def::IngotId; + +use crate::HirAnalysisDb; + +use self::{diagnostics::ImportErrorAccumulator, import_resolver::ResolvedImports}; + +pub mod diagnostics; pub mod import_resolver; pub mod name_resolver; pub mod visibility_checker; + +#[salsa::tracked(return_ref)] +pub fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImports { + let resolver = import_resolver::ImportResolver::new(db, ingot); + let (imports, import_error) = resolver.resolve_imports(); + for error in import_error { + ImportErrorAccumulator::push(db, error); + } + + imports +} + +pub fn resolve_imports_with_diag( + db: &dyn HirAnalysisDb, + ingot: IngotId, +) -> (&ResolvedImports, Vec) { + let imports = resolve_imports(db, ingot); + let diagnostics = resolve_imports::accumulated::(db, ingot); + (imports, diagnostics) +} diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 2f0c1d2a2d..6b01a784b8 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -1,15 +1,18 @@ -use std::{collections::hash_map::IntoValues, fmt}; +use std::{ + cmp, + collections::hash_map::{Entry, IntoValues}, + fmt, mem, +}; use either::Either; use hir::{ hir_def::{ - kw, scope_graph::{ - AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, - ScopeEdge, ScopeId, ScopeKind, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, - ValueEdge, VariantEdge, + AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, + ScopeKind, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, + VariantEdge, }, - IdentId, ItemKind, Partial, PathId, + IdentId, ItemKind, Partial, PathId, Use, }, span::DynLazySpan, }; @@ -17,7 +20,10 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::HirAnalysisDb; -use super::import_resolver::Importer; +use super::{ + import_resolver::Importer, + visibility_checker::{is_scope_visible, is_use_visible}, +}; pub struct NameResolver<'db, 'a> { db: &'db dyn HirAnalysisDb, @@ -25,22 +31,44 @@ pub struct NameResolver<'db, 'a> { cache_store: ResolvedQueryCacheStore, } +impl<'db, 'a> NameResolver<'db, 'a> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { + Self { + db, + importer, + cache_store: Default::default(), + } + } + + pub(super) fn new_no_cache(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { + let cache_store = ResolvedQueryCacheStore { + no_cache: true, + ..Default::default() + }; + Self { + db, + importer, + cache_store, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub enum ResolvedPath { - Full(ResolvedNameSet), + Full(NameBinding), /// The path is partially resolved; this means that the `resolved` is a type /// and the following segments depend on type to resolve. /// These unresolved parts are resolved in the later type inference and /// trait solving phases. Partial { - resolved: ResolvedName, + resolved: NameRes, unresolved_from: usize, }, } impl ResolvedPath { - pub fn partial(resolved: ResolvedName, unresolved_from: usize) -> Self { + pub fn partial(resolved: NameRes, unresolved_from: usize) -> Self { Self::Partial { resolved, unresolved_from, @@ -54,7 +82,6 @@ pub struct PathResolutionError { pub kind: NameResolutionError, pub failed_at: usize, } - impl PathResolutionError { fn new(kind: NameResolutionError, failed_at: usize) -> Self { Self { kind, failed_at } @@ -75,7 +102,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// name domains. /// /// For example, the `foo::FOO` can be resolved to both `const - /// FOO` and `struct FOO` in the following code: + /// FOO` and `struct FOO` in the following code without any error: /// ```fe /// use foo::FOO /// @@ -89,13 +116,17 @@ impl<'db, 'a> NameResolver<'db, 'a> { path: PathId, scope: ScopeId, ) -> Result { - let segments = path.segments(self.db.upcast()); + let segments = path.segments(self.db.as_hir_db()); if segments.is_empty() { return Err(PathResolutionError::invalid(0)); } // Set pred segment to the current scope. - let mut pred = ResolvedName::new_scope(scope, None, NameDomain::from_scope(self.db, scope)); + let mut pred = NameRes::new_scope( + scope, + NameDomain::from_scope(self.db, scope), + NameDerivation::Def, + ); let seg_len = segments.len(); for (i, seg) in segments[0..seg_len - 1].iter().enumerate() { @@ -119,112 +150,246 @@ impl<'db, 'a> NameResolver<'db, 'a> { &mut self, scope: ScopeId, query: NameQuery, - ) -> Result { + ) -> Result { // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(scope, query) { - return resolved; + return resolved.clone(); }; + let mut binding = NameBinding::default(); + // The shadowing rule is - // `$ = NamedImports > GlobImports > Lex > external ingot > builtin types`, + // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, // where `$` means current scope. - // This ordering means that greater one shadows lower ones having the same name - // in the same name context. - let mut resolutions = Vec::new(); + // This ordering means that greater one shadows lower ones in the same domain. let mut parent = None; - // 1. Look for the name in the current scope and named imports. + + // 1. Look for the name in the current scope. let mut found_scopes = FxHashSet::default(); - for edge in self.edges(scope) { + for edge in scope.edges(self.db.as_hir_db()) { match edge.kind.propagate(query) { - PropagatedQuery::Terminated => { + PropagationResult::Terminated => { if found_scopes.insert(edge.dest) { - resolutions.push(ResolvedName::new_scope( + let res = NameRes::new_scope( edge.dest, - None, NameDomain::from_scope(self.db, edge.dest), - )); + NameDerivation::Def, + ); + if binding.push(&res).is_some() { + return Err(NameResolutionError::Ambiguous); + } } } - PropagatedQuery::Continuation => { + PropagationResult::Continuation => { debug_assert!(parent.is_none()); parent = Some(edge.dest); } - PropagatedQuery::UnPropagated => {} + PropagationResult::UnPropagated => {} } } + + // 2. Look for the name in the named imports of the current scope. if let Some(imported) = self .importer - .named_imports(scope) + .named_imports(self.db, scope) .and_then(|imports| imports.get(&query.name)) { - match imported { - Ok(imported) => { - resolutions.extend(imported.iter().cloned()); + self.try_merge(&mut binding, &imported.binding, scope, query)?; + } + + // 3. Look for the name in the glob imports. + if query.directive.allow_glob { + if let Some(imported) = self.importer.glob_imports(self.db, scope) { + for res in imported.name_res_for(query.name) { + self.try_push(&mut binding, res, scope, query)?; + } + } + } + + // 4. Look for the name in the lexical scope if it exists. + if let Some(parent) = parent { + match self.resolve_query(parent, query) { + Ok(mut resolved) => { + resolved.lexed(); + self.try_merge(&mut binding, &resolved, scope, query)?; } - Err(_) => { - let err = NameResolutionError::InvalidImport; - self.cache_store.cache_result(scope, query, Err(err)); + + Err(NameResolutionError::NotFound) => {} + Err(err) => { + self.cache_store + .cache_result(scope, query, Err(err.clone())); return Err(err); } } } - if let Some(result) = self.store_result_opt(scope, query, resolutions) { - return result; + + if !query.directive.allow_external { + return self.finalize_query_result(scope, query, binding); } - // 2. Look for the name in the glob imports. - if let Some(imported) = self - .importer - .named_imports(scope) - .and_then(|imports| imports.get(&query.name)) - { - let imported = imported - .clone() - .map_err(|_| NameResolutionError::InvalidImport); - self.cache_store - .cache_result(scope, query, imported.clone()); - return imported; + // 5. Look for the name in the external ingots. + if query.directive.is_allowed_domain(NameDomain::Item as u8) { + scope + .top_mod() + .ingot(self.db.as_hir_db()) + .external_ingots(self.db.as_hir_db()) + .iter() + .for_each(|(name, root_mod)| { + if *name == query.name { + binding.push(&NameRes::new_scope( + ScopeId::root(*root_mod), + NameDomain::Item, + NameDerivation::External, + )); + } + }); } - // 3. Look for the name in the lexical scope if it exists. - if let Some(parent) = parent { - self.cache_store.cache_delegated(scope, query, parent); - return self.resolve_query(parent, query); - } - - // 4. Look for the name in the external ingots. - let resolutions: Vec<_> = scope - .top_mod - .external_ingots(self.db.upcast()) - .iter() - .filter_map(|(name, root_mod)| { - if *name == query.name { - Some(ResolvedName::new_scope( - ScopeId::root(*root_mod), - None, - NameDomain::Item, - )) - } else { - None + // 6. Look for the name in the builtin types. + // todo: Add db.builtin_scopes() and use it here. + // if let Some(builtin) = BuiltinName::lookup_for(query.name) { + // resolved_set.push_name(ResolvedName::new_builtin(builtin, + // builtin.domain())) }; + + self.finalize_query_result(scope, query, binding) + } + + /// Collect all visible resolutions in the given `target` scope. + /// + /// The function follows the shadowing rule, meaning the same name in the + /// same domain is properly shadowed. Also, this function guarantees that + /// the collected resolutions are unique in terms of its name and resolved + /// scope. + /// + /// On the other hand, the function doesn't cause any error and collect all + /// resolutions even if they are in the same domain. The reason + /// for this is + /// - Ambiguous error should be reported lazily, meaning it should be + /// reported when the resolution is actually used. + /// - The function is used for glob imports, so it's necessary to return + /// monotonously increasing results. Also, we can't arbitrarily choose the + /// possible resolution from multiple candidates to avoid hiding + /// ambiguity. That's why we can't use `NameBinding` and + /// `NameBinding::merge` in this function. + /// + /// The below examples demonstrates the second point. + /// We need to report ambiguous error at `const C: S = S` because `S` is + /// ambiguous. + /// + /// ```fe + /// use foo::* + /// const C: S = S + /// + /// mod foo { + /// pub use inner1::*; + /// pub use inner2::*; + /// mod inner1 { + /// pub struct S {} + /// } + /// mod inner2 { + /// pub struct S {} + /// } + /// } + /// ``` + pub(super) fn collect_all_resolutions_for_glob( + &mut self, + target: ScopeId, + ref_scope: ScopeId, + directive: QueryDirective, + unresolved_named_imports: FxHashSet, + ) -> FxHashMap> { + let mut res_collection: FxHashMap> = FxHashMap::default(); + let mut seen_domains: FxHashMap = FxHashMap::default(); + let mut seen_scope: FxHashSet<(IdentId, ScopeId)> = FxHashSet::default(); + + for edge in target.edges(self.db.as_hir_db()) { + let scope = match edge.kind.propagate_glob(directive) { + PropagationResult::Terminated => edge.dest, + _ => { + continue; } - }) - .collect(); + }; - // Ensure that all names of external ingots don't conflict with each other. - debug_assert!(resolutions.len() < 2); - if let Some(result) = self.store_result_opt(scope, query, resolutions) { - return result; + let name = scope.name(self.db.as_hir_db()).unwrap(); + if !seen_scope.insert((name, scope)) { + continue; + } + let res = NameRes::new_scope( + scope, + NameDomain::from_scope(self.db, scope), + NameDerivation::Def, + ); + + *seen_domains.entry(name).or_default() |= res.domain as u8; + res_collection.entry(name).or_default().push(res); } - // 5. Look for the name in the builtin types. - let result = if let Some(builtin) = BuiltinName::lookup_for(query.name) { - Ok(ResolvedName::new_builtin(builtin, builtin.domain()).into()) - } else { + let mut seen_domains_after_named = seen_domains.clone(); + if let Some(named_imports) = self.importer.named_imports(self.db, target) { + for (&name, import) in named_imports { + if !is_use_visible(self.db, ref_scope, import.use_) { + continue; + } + + let seen_domain = seen_domains.get(&name).copied().unwrap_or_default(); + for res in import.binding.iter() { + if (seen_domain & res.domain as u8 != 0) + || !seen_scope.insert((name, res.scope)) + { + continue; + } + + *seen_domains_after_named.entry(name).or_default() |= res.domain as u8; + + res_collection.entry(name).or_default().push(res.clone()); + } + } + } + + if let Some(glob_imports) = self.importer.glob_imports(self.db, target) { + for (&use_, resolutions) in glob_imports.iter() { + if !is_use_visible(self.db, ref_scope, use_) { + continue; + } + for (&name, res_for_name) in resolutions.iter() { + if unresolved_named_imports.contains(&name) { + continue; + } + + for res in res_for_name.iter() { + let seen_domain = seen_domains_after_named + .get(&name) + .copied() + .unwrap_or_default(); + + if (seen_domain & res.domain as u8 != 0) + || !seen_scope.insert((name, res.scope)) + { + continue; + } + res_collection.entry(name).or_default().push(res.clone()); + } + } + } + } + + res_collection + } + + /// Finalize the query result and cache it to the cache store. + fn finalize_query_result( + &mut self, + scope: ScopeId, + query: NameQuery, + resolved_set: NameBinding, + ) -> Result { + let result = if resolved_set.is_empty() { Err(NameResolutionError::NotFound) + } else { + Ok(resolved_set) }; - self.cache_store.cache_result(scope, query, result.clone()); result } @@ -265,25 +430,16 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// resolved. fn resolve_segment( &mut self, - pred: ResolvedName, + pred: NameRes, segment: Partial, seg_idx: usize, is_last: bool, - ) -> Result, PathResolutionError> { + ) -> Result, PathResolutionError> { let Partial::Present(seg) = segment else { return Err(PathResolutionError::invalid(seg_idx)); }; - let Some(scope) = pred.scope() - else { - // If pred is a builtin type, then the path resolution is done. - if pred.is_type(self.db) { - return Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); - } else { - return Err(PathResolutionError::not_found(seg_idx)); - } - }; - + let scope = pred.scope; let query = NameQuery::new(seg); let resolved_set = match self.resolve_query(scope, query) { Ok(resolved) => resolved, @@ -299,7 +455,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if is_last { Ok(Either::Left(ResolvedPath::Full(resolved_set))) - } else if resolved_set.num() > 1 { + } else if resolved_set.len() > 1 { // Case a. is already handled above. // Handles case b. here. return Err(PathResolutionError::not_found(seg_idx)); @@ -308,292 +464,487 @@ impl<'db, 'a> NameResolver<'db, 'a> { } } - /// Convert the `resolutions` into `ResolvedNameSet` and store it to the - /// cache store. - /// If the `resolutions` is empty return `None` instead of - /// returning an error. - fn store_result_opt( + fn try_merge( &mut self, + target: &mut NameBinding, + from: &NameBinding, scope: ScopeId, query: NameQuery, - resolutions: Vec, - ) -> Option> { - if !resolutions.is_empty() { - let result = ResolvedNameSet::from_resolutions(resolutions); - self.cache_store.cache_result(scope, query, result.clone()); - Some(result) + ) -> Result<(), NameResolutionError> { + if target + .merge(from.filter_by_domain(query.directive.domain)) + .is_none() + { + Ok(()) } else { - None + let err = NameResolutionError::Ambiguous; + self.cache_store + .cache_result(scope, query, Err(err.clone())); + Err(err) } } - fn edges(&self, scope: ScopeId) -> &'db [ScopeEdge] { - let graph = scope.top_mod.module_scope_graph(self.db.upcast()); - graph.edges(scope.local_id) + fn try_push( + &mut self, + target: &mut NameBinding, + res: &NameRes, + scope: ScopeId, + query: NameQuery, + ) -> Result<(), NameResolutionError> { + if target.push(res).is_none() { + Ok(()) + } else { + let err = NameResolutionError::Ambiguous; + self.cache_store + .cache_result(scope, query, Err(err.clone())); + Err(err) + } } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct NameQuery { name: IdentId, - option: QueryOption, + directive: QueryDirective, } impl NameQuery { - /// Create a new name query with the default query option. + /// Create a new name query with the default query directive. pub fn new(name: IdentId) -> Self { Self { name, - option: Default::default(), + directive: Default::default(), } } - /// Create a new name query with the given query option. - pub fn with_option(name: IdentId, option: QueryOption) -> Self { - Self { name, option } + /// Create a new name query with the given query directive. + pub fn with_directive(name: IdentId, directive: QueryDirective) -> Self { + Self { name, directive } + } + + pub fn name(&self) -> IdentId { + self.name } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct QueryOption { - /// If `allow_lex` is true, then the query will be propagated to the lexical - /// scope if the name is not found in the current scope. +pub struct QueryDirective { + /// If `allow_lex` is `true`, then the query will be propagated to the + /// lexical scope if the name is not found in the current scope. allow_lex: bool, + + /// If `allow_external` is `true`, then the query will be propagated to the + /// external ingot and builtin types as well. + allow_external: bool, + + /// If `allow_glob` is `true`, then the resolver uses the glob import to + /// resolve the name. + allow_glob: bool, + + domain: u8, } -impl QueryOption { +impl QueryDirective { + /// Make a new query directive with the default settings. + /// The default setting is to allow lexical scope lookup and look up names + /// in the `Item` domain. pub fn new() -> Self { - Self { allow_lex: true } + Self { + allow_lex: true, + allow_external: true, + allow_glob: true, + domain: NameDomain::Item as u8, + } + } + + /// Set the `domain` to lookup, the allowed domain set that are already set + /// will be overwritten. + pub fn set_domain(&mut self, domain: NameDomain) -> &mut Self { + self.domain = domain as u8; + self + } + + /// Append the `domain` to the allowed domain set. + pub fn add_domain(&mut self, domain: NameDomain) -> &mut Self { + self.domain |= domain as u8; + self } + /// Disallow lexical scope lookup. pub fn disallow_lex(&mut self) -> &mut Self { self.allow_lex = false; self } + + pub(super) fn disallow_external(&mut self) -> &mut Self { + self.allow_external = false; + self + } + + pub(super) fn disallow_glob(&mut self) -> &mut Self { + self.allow_glob = false; + self + } + + /// Returns true if the `domain` is allowed to lookup in the current + /// setting. + pub(super) fn is_allowed_domain(&self, domain: u8) -> bool { + self.domain & domain != 0 + } } -impl Default for QueryOption { +impl Default for QueryDirective { fn default() -> Self { Self::new() } } /// The struct contains the lookup result of a name query. -/// The results can contain more than one resolved names which belong to +/// The results can contain more than one resolved items which belong to /// different name domains. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct ResolvedNameSet { - names: FxHashMap, +#[derive(Clone, Debug, PartialEq, Eq, Default)] +pub struct NameBinding { + pub(super) resolutions: FxHashMap, } -impl ResolvedNameSet { - /// Returns the number of resolved names. - pub fn num(&self) -> usize { - self.names.len() +impl NameBinding { + /// Returns the number of resolutions. + pub fn len(&self) -> usize { + self.resolutions.len() } - pub fn iter(&self) -> impl Iterator { - self.names.values() + pub fn is_empty(&self) -> bool { + self.resolutions.is_empty() } - /// Returns the resolved name of the given `domain`. - pub fn name_by_domain(&self, domain: NameDomain) -> Option<&ResolvedName> { - self.names.get(&domain) + pub fn iter(&self) -> impl Iterator { + self.resolutions.values() } - fn from_resolutions(resolutions: Vec) -> Result { - if resolutions.is_empty() { - return Err(NameResolutionError::NotFound); + pub fn filter_by_visibility(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> Self { + let mut resolutions = FxHashMap::default(); + for (domain, res) in &self.resolutions { + if res.is_visible(db, from) { + resolutions.insert(*domain, res.clone()); + } } - let mut names = FxHashMap::default(); - for resolution in resolutions { - let domain = resolution.domain; - if names.insert(domain, resolution).is_some() { - return Err(NameResolutionError::Conflict); + Self { resolutions } + } + + /// Returns the resolution of the given `domain`. + pub fn name_by_domain(&self, domain: NameDomain) -> Option<&NameRes> { + self.resolutions.get(&domain) + } + + /// Merge the `resolutions` into the set. If name conflict happens, the old + /// resolution will be returned, otherwise `None` will be returned. + pub(super) fn merge<'a>( + &mut self, + resolutions: impl Iterator, + ) -> Option { + for res in resolutions { + if let Some(conflict) = self.push(res) { + return Some(conflict); } } - Ok(Self { names }) + None + } + + pub(super) fn set_derivation(&mut self, derivation: NameDerivation) { + for res in self.resolutions.values_mut() { + res.derivation = derivation.clone(); + } + } + + /// Push the `res` into the set. If name conflict happens, the old + /// resolution will be returned, otherwise `None` will be returned. + fn push(&mut self, res: &NameRes) -> Option { + let domain = res.domain; + match self.resolutions.entry(domain) { + Entry::Occupied(mut e) => { + let old_derivation = e.get().derivation.clone(); + if old_derivation < res.derivation { + e.insert(res.clone()); + None + } else if res.derivation < old_derivation { + None + } else { + let old = e.get().clone(); + e.insert(res.clone()); + Some(old) + } + } + + Entry::Vacant(e) => { + e.insert(res.clone()); + None + } + } + } + + fn filter_by_domain(&self, domain: u8) -> impl Iterator { + self.resolutions + .values() + .filter(move |res| ((res.domain as u8) & domain) != 0) + } + + fn lexed(&mut self) { + for res in self.resolutions.values_mut() { + res.derivation.lexed() + } } } -impl IntoIterator for ResolvedNameSet { - type Item = ResolvedName; - type IntoIter = IntoValues; +impl IntoIterator for NameBinding { + type Item = NameRes; + type IntoIter = IntoValues; fn into_iter(self) -> Self::IntoIter { - self.names.into_values() + self.resolutions.into_values() } } -impl From for ResolvedNameSet { - fn from(resolution: ResolvedName) -> Self { +impl From for NameBinding { + fn from(resolution: NameRes) -> Self { let mut names = FxHashMap::default(); names.insert(resolution.domain, resolution); - Self { names } + Self { resolutions: names } } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ResolvedName { - pub kind: ResolvedNameKind, +pub struct NameRes { + pub scope: ScopeId, pub domain: NameDomain, + pub derivation: NameDerivation, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum NameResolutionError { - /// The name is not found. - NotFound, - /// Multiple candidates are found, no need to report the error from the use - /// site because it's should be emitted from a def site. - /// The associated value is the first candidate. - Conflict, +impl NameRes { + pub fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { + self.scope.is_type(db.as_hir_db()) + } - /// The name is found as the imported name, but the name resolution for the - /// import itself failed. No need to report the error from the use site - /// because it's should be emitted from a import resolution phase. - InvalidImport, + pub fn is_visible(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> bool { + let scope_or_use = match self.derivation { + NameDerivation::Def | NameDerivation::Builtin | NameDerivation::External => { + Either::Left(self.scope) + } + NameDerivation::NamedImported(use_) | NameDerivation::GlobImported(use_) => { + Either::Right(use_) + } + NameDerivation::Lex(ref inner) => { + let mut inner = inner; + while let NameDerivation::Lex(parent) = inner.as_ref() { + inner = parent; + } - /// The name is invalid in parsing. Basically, no need to report it because - /// the error is already emitted from parsing phase. - Invalid, -} + return Self { + derivation: inner.as_ref().clone(), + ..self.clone() + } + .is_visible(db, from); + } + }; -impl ResolvedName { - pub fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { - match self.kind { - ResolvedNameKind::Builtin(builtin) => builtin.is_type(), - ResolvedNameKind::Scope { scope, .. } => scope.is_type(db.upcast()), + match scope_or_use { + Either::Left(target_scope) => is_scope_visible(db, from, target_scope), + Either::Right(use_) => is_use_visible(db, from, use_), } } - pub fn scope(&self) -> Option { - match self.kind { - ResolvedNameKind::Builtin(_) => None, - ResolvedNameKind::Scope { scope, .. } => Some(scope), + pub(super) fn derived_from(&self, db: &dyn HirAnalysisDb) -> Option { + match self.derivation { + NameDerivation::Def | NameDerivation::Builtin | NameDerivation::External => { + self.scope.name_span(db.as_hir_db()) + } + NameDerivation::NamedImported(use_) => use_.imported_name_span(db.as_hir_db()), + NameDerivation::GlobImported(use_) => use_.glob_span(db.as_hir_db()), + NameDerivation::Lex(ref inner) => { + let mut inner = inner; + while let NameDerivation::Lex(parent) = inner.as_ref() { + inner = parent; + } + Self { + derivation: inner.as_ref().clone(), + ..self.clone() + } + .derived_from(db) + } } } - fn new_scope(scope: ScopeId, import_span: Option, domain: NameDomain) -> Self { + fn new_scope(scope: ScopeId, domain: NameDomain, derivation: NameDerivation) -> Self { Self { - kind: ResolvedNameKind::Scope { scope, import_span }, + scope, + derivation, domain, } } +} - fn new_builtin(builtin: BuiltinName, domain: NameDomain) -> Self { - Self { - kind: ResolvedNameKind::Builtin(builtin), - domain, +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum NameDerivation { + Def, + NamedImported(Use), + GlobImported(Use), + Lex(Box), + External, + Builtin, +} + +impl NameDerivation { + fn lexed(&mut self) { + let inner = mem::replace(self, NameDerivation::Def); + *self = NameDerivation::Lex(Box::new(inner)); + } +} + +impl PartialOrd for NameDerivation { + fn partial_cmp(&self, other: &Self) -> Option { + match (self, other) { + (NameDerivation::Def, NameDerivation::Def) => Some(cmp::Ordering::Equal), + (NameDerivation::Def, _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::Def) => Some(cmp::Ordering::Less), + + (NameDerivation::NamedImported(_), NameDerivation::NamedImported(_)) => { + Some(cmp::Ordering::Equal) + } + (NameDerivation::NamedImported(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::NamedImported(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::GlobImported(_), NameDerivation::GlobImported(_)) => { + Some(cmp::Ordering::Equal) + } + (NameDerivation::GlobImported(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::GlobImported(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::Lex(lhs), NameDerivation::Lex(rhs)) => lhs.partial_cmp(rhs), + (NameDerivation::Lex(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::Lex(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::External, NameDerivation::External) => Some(cmp::Ordering::Equal), + (NameDerivation::External, _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::External) => Some(cmp::Ordering::Less), + + (NameDerivation::Builtin, NameDerivation::Builtin) => Some(cmp::Ordering::Equal), } } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum NameResolutionError { + /// The name is not found. + NotFound, + + /// Multiple candidates are found. + Conflict, + + /// The name is invalid in parsing. Basically, no need to report it because + /// the error is already emitted from parsing phase. + Invalid, + + /// The name is found, but it's not visible from the reference site. + Invisible, + + /// The name is found, but it's ambiguous. + Ambiguous, +} + impl fmt::Display for NameResolutionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { NameResolutionError::NotFound => write!(f, "name not found"), NameResolutionError::Conflict => write!(f, "multiple candidates found"), - NameResolutionError::InvalidImport => write!(f, "invalid import"), NameResolutionError::Invalid => write!(f, "invalid name"), + NameResolutionError::Invisible => write!(f, "name is not visible"), + NameResolutionError::Ambiguous => write!(f, "name is ambiguous"), } } } impl std::error::Error for NameResolutionError {} -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum ResolvedNameKind { - Builtin(BuiltinName), - Scope { - scope: ScopeId, - import_span: Option, - }, -} - -#[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] -pub enum BuiltinName { - Bool, - U8, - U16, - U32, - U64, - U128, - U256, - I8, - I16, - I32, - I64, - I128, - I256, -} - -impl BuiltinName { - /// Returns the builtin name if the `name` is a builtin name. - pub fn lookup_for(name: IdentId) -> Option { - match name { - kw::BOOL => Self::Bool, - kw::U8 => Self::U8, - kw::U16 => Self::U16, - kw::U32 => Self::U32, - kw::U64 => Self::U64, - kw::U128 => Self::U128, - kw::U256 => Self::U256, - kw::I8 => Self::I8, - kw::I16 => Self::I16, - kw::I32 => Self::I32, - kw::I64 => Self::I64, - kw::I128 => Self::I128, - kw::I256 => Self::I256, - _ => return None, - } - .into() - } - - pub fn domain(self) -> NameDomain { - // Currently all builtin belong to the item domain. - match self { - Self::Bool - | Self::U8 - | Self::U16 - | Self::U32 - | Self::U64 - | Self::U128 - | Self::U256 - | Self::I8 - | Self::I16 - | Self::I32 - | Self::I64 - | Self::I128 - | Self::I256 => NameDomain::Item, - } - } - - pub fn is_type(self) -> bool { - // Currently all builtin names are types. - match self { - Self::Bool - | Self::U8 - | Self::U16 - | Self::U32 - | Self::U64 - | Self::U128 - | Self::U256 - | Self::I8 - | Self::I16 - | Self::I32 - | Self::I64 - | Self::I128 - | Self::I256 => true, - } - } -} +// #[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] +// pub enum BuiltinName { +// Bool, +// U8, +// U16, +// U32, +// U64, +// U128, +// U256, +// I8, +// I16, +// I32, +// I64, +// I128, +// I256, +// } +// +// impl BuiltinName { +// /// Returns the builtin name if the `name` is a builtin name. +// pub fn lookup_for(name: IdentId) -> Option { +// match name { +// kw::BOOL => Self::Bool, +// kw::U8 => Self::U8, +// kw::U16 => Self::U16, +// kw::U32 => Self::U32, +// kw::U64 => Self::U64, +// kw::U128 => Self::U128, +// kw::U256 => Self::U256, +// kw::I8 => Self::I8, +// kw::I16 => Self::I16, +// kw::I32 => Self::I32, +// kw::I64 => Self::I64, +// kw::I128 => Self::I128, +// kw::I256 => Self::I256, +// _ => return None, +// } +// .into() +// } +// +// pub fn domain(self) -> NameDomain { +// // Currently all builtin belong to the item domain. +// match self { +// Self::Bool +// | Self::U8 +// | Self::U16 +// | Self::U32 +// | Self::U64 +// | Self::U128 +// | Self::U256 +// | Self::I8 +// | Self::I16 +// | Self::I32 +// | Self::I64 +// | Self::I128 +// | Self::I256 => NameDomain::Item, +// } +// } +// +// pub fn is_type(self) -> bool { +// // Currently all builtin names are types. +// match self { +// Self::Bool +// | Self::U8 +// | Self::U16 +// | Self::U32 +// | Self::U64 +// | Self::U128 +// | Self::U256 +// | Self::I8 +// | Self::I16 +// | Self::I32 +// | Self::I64 +// | Self::I128 +// | Self::I256 => true, +// } +// } +// } #[derive(Default)] struct ResolvedQueryCacheStore { - cache: FxHashMap< - (ScopeId, NameQuery), - Either, ScopeId>, - >, + cache: FxHashMap<(ScopeId, NameQuery), Result>, no_cache: bool, } @@ -602,31 +953,20 @@ impl ResolvedQueryCacheStore { &mut self, scope: ScopeId, query: NameQuery, - result: Result, + result: Result, ) { if self.no_cache { return; } - self.cache.insert((scope, query), Either::Left(result)); - } - - fn cache_delegated(&mut self, scope: ScopeId, query: NameQuery, parent: ScopeId) { - if self.no_cache { - return; - } - self.cache.insert((scope, query), Either::Right(parent)); + self.cache.insert((scope, query), result); } fn get( &self, scope: ScopeId, query: NameQuery, - ) -> Option> { - match self.cache.get(&(scope, query)) { - Some(Either::Left(resolved)) => Some(resolved.clone()), - Some(Either::Right(delegated)) => Some(self.get(*delegated, query)?), - _ => None, - } + ) -> Option<&Result> { + self.cache.get(&(scope, query)) } } @@ -648,251 +988,298 @@ impl ResolvedQueryCacheStore { pub enum NameDomain { /// The domain is associated with all items except for items that belongs to /// the `Value` domain. - Item, + Item = 0b1, /// The domain is associated with a local variable and items that are - /// guaranteed not to have associated names. e.g., `fn` or `const`. - Value, + /// guaranteed not to have associated names. e.g., `fn`, `const` or enum + /// variables. + Value = 0b10, /// The domain is associated with struct fields. - Field, - /// The domain is associated with enum variants. - Variant, + Field = 0b100, } impl NameDomain { fn from_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> Self { - match scope.data(db.upcast()).kind { + match scope.data(db.as_hir_db()).kind { ScopeKind::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeKind::FnParam(_) => { Self::Value } ScopeKind::Item(_) | ScopeKind::GenericParam(_) => Self::Item, ScopeKind::Field(_) => Self::Field, - ScopeKind::Variant(_) => Self::Variant, + ScopeKind::Variant(_) => Self::Value, } } } trait QueryPropagator { - fn propagate(&self, query: NameQuery) -> PropagatedQuery; - fn propagate_glob(&self) -> PropagatedQuery; + fn propagate(&self, query: NameQuery) -> PropagationResult { + if query.directive.is_allowed_domain(Self::ALLOWED_DOMAIN) { + self.propagate_impl(query) + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(&self, directive: QueryDirective) -> PropagationResult { + if directive.is_allowed_domain(Self::ALLOWED_DOMAIN) { + self.propagate_glob_impl() + } else { + PropagationResult::UnPropagated + } + } + + const ALLOWED_DOMAIN: u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult; + fn propagate_glob_impl(&self) -> PropagationResult; } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum PropagatedQuery { +enum PropagationResult { Terminated, Continuation, UnPropagated, } impl QueryPropagator for LexEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { - if query.option.allow_lex { - PropagatedQuery::Continuation + const ALLOWED_DOMAIN: u8 = ALL_DOMAINS; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + if query.directive.allow_lex { + PropagationResult::Continuation } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for ModEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::Terminated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::Terminated } } impl QueryPropagator for TypeEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::Terminated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::Terminated } } impl QueryPropagator for TraitEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::Terminated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::Terminated } } impl QueryPropagator for ValueEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Value as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::Terminated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::Terminated } } impl QueryPropagator for GenericParamEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for FieldEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Field as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for VariantEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if self.0 == query.name { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::Terminated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::Terminated } } impl QueryPropagator for SuperEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if query.name.is_super() { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for IngotEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if query.name.is_ingot() { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for SelfTyEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if query.name.is_self_ty() { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for SelfEdge { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { if query.name.is_self() { - PropagatedQuery::Terminated + PropagationResult::Terminated } else { - PropagatedQuery::UnPropagated + PropagationResult::UnPropagated } } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for AnonEdge { - fn propagate(&self, _query: NameQuery) -> PropagatedQuery { - PropagatedQuery::UnPropagated + const ALLOWED_DOMAIN: u8 = 0; + + fn propagate_impl(&self, _query: NameQuery) -> PropagationResult { + PropagationResult::UnPropagated } - fn propagate_glob(&self) -> PropagatedQuery { - PropagatedQuery::UnPropagated + fn propagate_glob_impl(&self) -> PropagationResult { + PropagationResult::UnPropagated } } impl QueryPropagator for EdgeKind { - fn propagate(&self, query: NameQuery) -> PropagatedQuery { + const ALLOWED_DOMAIN: u8 = ALL_DOMAINS; + + fn propagate_impl(&self, query: NameQuery) -> PropagationResult { match self { - EdgeKind::Lex(edge) => edge.propagate(query), - EdgeKind::Mod(edge) => edge.propagate(query), - EdgeKind::Type(edge) => edge.propagate(query), - EdgeKind::Trait(edge) => edge.propagate(query), - EdgeKind::GenericParam(edge) => edge.propagate(query), - EdgeKind::Value(edge) => edge.propagate(query), - EdgeKind::Field(edge) => edge.propagate(query), - EdgeKind::Variant(edge) => edge.propagate(query), - EdgeKind::Super(edge) => edge.propagate(query), - EdgeKind::Ingot(edge) => edge.propagate(query), - EdgeKind::Self_(edge) => edge.propagate(query), - EdgeKind::SelfTy(edge) => edge.propagate(query), - EdgeKind::Anon(edge) => edge.propagate(query), - } - } - - fn propagate_glob(&self) -> PropagatedQuery { + EdgeKind::Lex(edge) => edge.propagate_impl(query), + EdgeKind::Mod(edge) => edge.propagate_impl(query), + EdgeKind::Type(edge) => edge.propagate_impl(query), + EdgeKind::Trait(edge) => edge.propagate_impl(query), + EdgeKind::GenericParam(edge) => edge.propagate_impl(query), + EdgeKind::Value(edge) => edge.propagate_impl(query), + EdgeKind::Field(edge) => edge.propagate_impl(query), + EdgeKind::Variant(edge) => edge.propagate_impl(query), + EdgeKind::Super(edge) => edge.propagate_impl(query), + EdgeKind::Ingot(edge) => edge.propagate_impl(query), + EdgeKind::Self_(edge) => edge.propagate_impl(query), + EdgeKind::SelfTy(edge) => edge.propagate_impl(query), + EdgeKind::Anon(edge) => edge.propagate_impl(query), + } + } + + fn propagate_glob_impl(&self) -> PropagationResult { match self { - EdgeKind::Lex(edge) => edge.propagate_glob(), - EdgeKind::Mod(edge) => edge.propagate_glob(), - EdgeKind::Type(edge) => edge.propagate_glob(), - EdgeKind::Trait(edge) => edge.propagate_glob(), - EdgeKind::GenericParam(edge) => edge.propagate_glob(), - EdgeKind::Value(edge) => edge.propagate_glob(), - EdgeKind::Field(edge) => edge.propagate_glob(), - EdgeKind::Variant(edge) => edge.propagate_glob(), - EdgeKind::Super(edge) => edge.propagate_glob(), - EdgeKind::Ingot(edge) => edge.propagate_glob(), - EdgeKind::Self_(edge) => edge.propagate_glob(), - EdgeKind::SelfTy(edge) => edge.propagate_glob(), - EdgeKind::Anon(edge) => edge.propagate_glob(), + EdgeKind::Lex(edge) => edge.propagate_glob_impl(), + EdgeKind::Mod(edge) => edge.propagate_glob_impl(), + EdgeKind::Type(edge) => edge.propagate_glob_impl(), + EdgeKind::Trait(edge) => edge.propagate_glob_impl(), + EdgeKind::GenericParam(edge) => edge.propagate_glob_impl(), + EdgeKind::Value(edge) => edge.propagate_glob_impl(), + EdgeKind::Field(edge) => edge.propagate_glob_impl(), + EdgeKind::Variant(edge) => edge.propagate_glob_impl(), + EdgeKind::Super(edge) => edge.propagate_glob_impl(), + EdgeKind::Ingot(edge) => edge.propagate_glob_impl(), + EdgeKind::Self_(edge) => edge.propagate_glob_impl(), + EdgeKind::SelfTy(edge) => edge.propagate_glob_impl(), + EdgeKind::Anon(edge) => edge.propagate_glob_impl(), } } } + +const ALL_DOMAINS: u8 = NameDomain::Item as u8 | NameDomain::Value as u8 | NameDomain::Field as u8; diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index 70971b32dc..6aad8fa021 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -1,50 +1,43 @@ -use hir::hir_def::scope_graph::ScopeId; +use hir::hir_def::{ + scope_graph::{ScopeId, ScopeKind}, + Use, +}; use crate::HirAnalysisDb; -use super::name_resolver::{NameDomain, ResolvedName, ResolvedNameKind}; - -/// Return `true` if the given `resolved` is visible from the `ref_scope`. +/// Return `true` if the given `target_scope` is visible from the `ref_scope`. /// The resolved name is visible from `ref_scope` if /// 1. It is declared as public, or -/// 2. The `ref_scope` is a child or the same scope of the scope where the -/// resolved name is defined. -pub fn check_visibility( - db: &dyn HirAnalysisDb, - ref_scope: ScopeId, - resolved: &ResolvedName, -) -> bool { - let ResolvedNameKind::Scope{scope, .. } = resolved.kind else { - // If resolved is a builtin name, then it's always visible . - return true; - }; - +/// 2. The `ref_scope` is a transitive reflexive child of the scope where the +/// name is defined. +pub fn is_scope_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, target_scope: ScopeId) -> bool { // If resolved is public, then it is visible. - if scope.data(db.upcast()).vis.is_pub() { + if target_scope.data(db.as_hir_db()).vis.is_pub() { return true; } - let Some(def_scope) = (if resolved.domain == NameDomain::Field { + let Some(def_scope) = (if matches!(ref_scope.kind(db.as_hir_db()), ScopeKind::Field(_)) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. - scope.parent(db.upcast()).and_then(|scope| scope.parent(db.upcast())) + target_scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) } else { - scope.parent(db.upcast()) + target_scope.parent(db.as_hir_db()) }) else { return false; }; - // If ref scope is a child scope or the same scope of the def scope, then it is - // visible. - let mut parent = Some(ref_scope); - while let Some(scope) = parent { - if scope == def_scope { - return true; - } else { - parent = scope.parent(db.upcast()); - } + ref_scope.is_transitive_child_of(db.as_hir_db(), def_scope) +} + +/// Return `true` if the given `use_` is visible from the `ref_scope`. +pub(super) fn is_use_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, use_: Use) -> bool { + let use_scope = ScopeId::from_item(db.as_hir_db(), use_.into()); + + if use_scope.data(db.as_hir_db()).vis.is_pub() { + return true; } - false + let use_def_scope = use_scope.parent(db.as_hir_db()).unwrap(); + ref_scope.is_transitive_child_of(db.as_hir_db(), use_def_scope) } diff --git a/crates/hir/src/hir_def/ident.rs b/crates/hir/src/hir_def/ident.rs index 810c7a82aa..ef4bb73d02 100644 --- a/crates/hir/src/hir_def/ident.rs +++ b/crates/hir/src/hir_def/ident.rs @@ -1,6 +1,6 @@ #[salsa::interned] pub struct IdentId { - data: String, + pub data: String, } impl IdentId { pub fn is_super(self) -> bool { diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 5304e788ec..3d40e64880 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -3,11 +3,10 @@ // that may take many arguments depending on the number of fields in the struct. #![allow(clippy::too_many_arguments)] -use common::{InputFile, InputIngot}; +use common::InputFile; use parser::ast; use crate::{ - external_ingots_impl, hir_def::TraitRef, lower, span::{ @@ -16,14 +15,15 @@ use crate::{ LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, }, - HirOrigin, + params::LazyGenericParamListSpan, + DynLazySpan, HirOrigin, }, HirDb, }; use super::{ - module_tree_impl, scope_graph::ScopeGraph, AttrListId, Body, FnParamListId, GenericParamListId, - IdentId, ModuleTree, Partial, TypeId, UseAlias, WhereClauseId, + scope_graph::ScopeGraph, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, IngotId, + Partial, TypeId, UseAlias, WhereClauseId, }; #[derive( @@ -56,7 +56,88 @@ pub enum ItemKind { Body(Body), } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] +pub enum GenericParamOwner { + Func(Func), + Struct(Struct), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), +} + +impl GenericParamOwner { + pub fn params(&self, db: &dyn HirDb) -> GenericParamListId { + match self { + GenericParamOwner::Func(func) => func.generic_params(db), + GenericParamOwner::Struct(struct_) => struct_.generic_params(db), + GenericParamOwner::Enum(enum_) => enum_.generic_params(db), + GenericParamOwner::TypeAlias(type_alias) => type_alias.generic_params(db), + GenericParamOwner::Impl(impl_) => impl_.generic_params(db), + GenericParamOwner::Trait(trait_) => trait_.generic_params(db), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.generic_params(db), + } + } + + pub fn params_span(&self) -> LazyGenericParamListSpan { + match self { + GenericParamOwner::Func(func) => func.lazy_span().generic_params(), + GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params(), + GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params(), + GenericParamOwner::TypeAlias(type_alias) => type_alias.lazy_span().generic_params(), + GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params(), + GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params(), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.lazy_span().generic_params(), + } + } + + pub fn from_item_opt(item: ItemKind) -> Option { + match item { + ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), + ItemKind::Struct(struct_) => Some(GenericParamOwner::Struct(struct_)), + ItemKind::Enum(enum_) => Some(GenericParamOwner::Enum(enum_)), + ItemKind::TypeAlias(type_alias) => Some(GenericParamOwner::TypeAlias(type_alias)), + ItemKind::Impl(impl_) => Some(GenericParamOwner::Impl(impl_)), + ItemKind::Trait(trait_) => Some(GenericParamOwner::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(GenericParamOwner::ImplTrait(impl_trait)), + _ => None, + } + } +} + impl ItemKind { + pub fn name(self, db: &dyn HirDb) -> Option { + use ItemKind::*; + match self { + TopMod(top_mod) => Some(top_mod.name(db)), + Mod(mod_) => mod_.name(db).to_opt(), + Func(func_) => func_.name(db).to_opt(), + Struct(struct_) => struct_.name(db).to_opt(), + Contract(contract_) => contract_.name(db).to_opt(), + Enum(enum_) => enum_.name(db).to_opt(), + TypeAlias(alias) => alias.name(db).to_opt(), + Trait(trait_) => trait_.name(db).to_opt(), + Const(const_) => const_.name(db).to_opt(), + Use(_) | Body(_) | Impl(_) | ImplTrait(_) => None, + } + } + + pub fn name_span(self) -> Option { + use ItemKind::*; + match self { + Mod(mod_) => Some(mod_.lazy_span().name().into()), + Func(func_) => Some(func_.lazy_span().name().into()), + Struct(struct_) => Some(struct_.lazy_span().name().into()), + Contract(contract_) => Some(contract_.lazy_span().name().into()), + Enum(enum_) => Some(enum_.lazy_span().name().into()), + TypeAlias(alias) => Some(alias.lazy_span().alias().into()), + Trait(trait_) => Some(trait_.lazy_span().name().into()), + Const(const_) => Some(const_.lazy_span().name().into()), + TopMod(_) | Use(_) | Body(_) | Impl(_) | ImplTrait(_) => None, + } + } + pub fn vis(self, db: &dyn HirDb) -> Visibility { use ItemKind::*; match self { @@ -74,6 +155,29 @@ impl ItemKind { } } + pub fn ingot(self, db: &dyn HirDb) -> IngotId { + let top_mod = self.top_mod(db); + top_mod.ingot(db) + } + + pub fn top_mod(self, db: &dyn HirDb) -> TopLevelMod { + match self { + ItemKind::TopMod(top_mod) => top_mod, + ItemKind::Mod(mod_) => mod_.top_mod(db), + ItemKind::Func(func) => func.top_mod(db), + ItemKind::Struct(struct_) => struct_.top_mod(db), + ItemKind::Contract(contract) => contract.top_mod(db), + ItemKind::Enum(enum_) => enum_.top_mod(db), + ItemKind::TypeAlias(type_) => type_.top_mod(db), + ItemKind::Trait(trait_) => trait_.top_mod(db), + ItemKind::Impl(impl_) => impl_.top_mod(db), + ItemKind::ImplTrait(impl_trait) => impl_trait.top_mod(db), + ItemKind::Const(const_) => const_.top_mod(db), + ItemKind::Use(use_) => use_.top_mod(db), + ItemKind::Body(body) => body.top_mod(db), + } + } + pub fn is_type(self) -> bool { matches!( self, @@ -88,7 +192,7 @@ pub struct TopLevelMod { // of `module_scope_graph`. pub name: IdentId, - pub(crate) ingot: InputIngot, + pub ingot: IngotId, pub(crate) file: InputFile, } impl TopLevelMod { @@ -96,25 +200,17 @@ impl TopLevelMod { LazyTopLevelModSpan::new(self) } - pub fn module_scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { + pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { lower::scope_graph_impl(db, self) } - pub fn module_tree(self, db: &dyn HirDb) -> &ModuleTree { - module_tree_impl(db, self.ingot(db)) - } - - pub fn ingot_root(self, db: &dyn HirDb) -> TopLevelMod { - self.module_tree(db).root_data().top_mod - } - pub fn parent(self, db: &dyn HirDb) -> Option { - let module_tree = self.module_tree(db); + let module_tree = self.ingot(db).module_tree(db); module_tree.parent(self) } pub fn children(self, db: &dyn HirDb) -> impl Iterator + '_ { - let module_tree = self.module_tree(db); + let module_tree = self.ingot(db).module_tree(db); module_tree.children(self) } @@ -123,12 +219,6 @@ impl TopLevelMod { // Please change here if we introduce it. Visibility::Public } - - /// Returns the root modules and names of external ingots that this module - /// depends on. - pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, TopLevelMod)] { - external_ingots_impl(db, self.ingot(db)).as_slice() - } } #[salsa::tracked] @@ -365,6 +455,58 @@ impl Use { pub fn lazy_span(self) -> LazyUseSpan { LazyUseSpan::new(self) } + + /// Returns imported name if it is present and not a glob. + pub fn imported_name(&self, db: &dyn HirDb) -> Option { + if let Some(alias) = self.alias(db) { + return match alias { + Partial::Present(UseAlias::Ident(name)) => Some(name), + _ => None, + }; + } + + self.path(db).to_opt()?.last_ident(db) + } + + /// Returns the span of imported name span if the use is not a glob. + /// The returned span is + /// 1. If the use has an alias, the span of the alias. + /// 2. If the use has no alias, the span of the last segment of the path. + pub fn imported_name_span(&self, db: &dyn HirDb) -> Option { + if self.is_glob(db) { + return None; + } + + if self.alias(db).is_some() { + Some(self.lazy_span().alias().into()) + } else { + let segment_len = self.path(db).to_opt()?.segment_len(db); + Some(self.lazy_span().path().segment(segment_len - 1).into()) + } + } + + pub fn glob_span(&self, db: &dyn HirDb) -> Option { + if !self.is_glob(db) { + return None; + } + + let segment_len = self.path(db).to_opt()?.segment_len(db); + Some(self.lazy_span().path().segment(segment_len - 1).into()) + } + + pub fn is_glob(&self, db: &dyn HirDb) -> bool { + self.path(db) + .to_opt() + .map_or(false, |path| path.is_glob(db)) + } + + pub fn is_unnamed(&self, db: &dyn HirDb) -> bool { + if let Some(alias) = self.alias(db) { + !matches!(alias, Partial::Present(UseAlias::Ident(_))) + } else { + false + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 4d739f5eec..af5e440164 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -15,10 +15,11 @@ pub(crate) mod module_tree; pub use attr::*; pub use body::*; +use common::{input::IngotKind, InputIngot}; pub use expr::*; pub use ident::*; pub use item::*; -use num_bigint::BigUint; +pub use module_tree::*; pub use params::*; pub use pat::*; pub use path::*; @@ -26,7 +27,31 @@ pub use stmt::*; pub use types::*; pub use use_tree::*; -pub use module_tree::*; +use num_bigint::BigUint; + +use crate::{external_ingots_impl, HirDb}; + +#[salsa::tracked] +pub struct IngotId { + inner: InputIngot, +} +impl IngotId { + pub fn module_tree(self, db: &dyn HirDb) -> &ModuleTree { + module_tree_impl(db, self.inner(db)) + } + + pub fn root_mod(self, db: &dyn HirDb) -> TopLevelMod { + self.module_tree(db).root_data().top_mod + } + + pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, TopLevelMod)] { + external_ingots_impl(db, self.inner(db)).as_slice() + } + + pub fn kind(self, db: &dyn HirDb) -> IngotKind { + self.inner(db).kind(db.as_input_db()) + } +} #[salsa::interned] pub struct IntegerId { diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index f8c0f8d917..e28be7a98d 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -171,7 +171,7 @@ impl<'db> ModuleTreeBuilder<'db> { self.set_modules(); self.build_tree(); - let root_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.upcast())); + let root_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.as_input_db())); let root = self.mod_map[&root_mod]; ModuleTree { root, @@ -182,27 +182,28 @@ impl<'db> ModuleTreeBuilder<'db> { } fn set_modules(&mut self) { - for &file in self.ingot.files(self.db.upcast()) { + for &file in self.ingot.files(self.db.as_input_db()) { let top_mod = map_file_to_mod_impl(self.db, file); let module_id = self.module_tree.push(ModuleTreeNode::new(top_mod)); - self.path_map.insert(file.path(self.db.upcast()), module_id); + self.path_map + .insert(file.path(self.db.as_input_db()), module_id); self.mod_map.insert(top_mod, module_id); } } fn build_tree(&mut self) { - let root = self.ingot.root_file(self.db.upcast()); + let root = self.ingot.root_file(self.db.as_input_db()); - for &child in self.ingot.files(self.db.upcast()) { + for &child in self.ingot.files(self.db.as_input_db()) { // Ignore the root file because it has no parent. if child == root { continue; } - let root_path = root.path(self.db.upcast()); + let root_path = root.path(self.db.as_input_db()); let root_mod = map_file_to_mod_impl(self.db, root); - let child_path = child.path(self.db.upcast()); + let child_path = child.path(self.db.as_input_db()); let child_mod = map_file_to_mod_impl(self.db, child); // If the file is in the same directory as the root file, the file is a direct @@ -227,7 +228,7 @@ impl<'db> ModuleTreeBuilder<'db> { } fn parent_module(&self, file: InputFile) -> Option { - let file_path = file.path(self.db.upcast()); + let file_path = file.path(self.db.as_input_db()); let file_dir = file_path.parent()?; let parent_dir = file_dir.parent()?; diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 173e3f8414..69cc7a49b0 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -76,6 +76,15 @@ pub struct FnParam { pub ty: Partial, } +impl FnParam { + pub fn name(&self) -> Option { + match self.name.to_opt()? { + FnParamName::Ident(name) => Some(name), + _ => None, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct WherePredicate { pub ty: Partial, @@ -91,11 +100,19 @@ pub enum FnParamLabel { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum FnParamName { /// `self` parameter. - Self_, Ident(IdentId), Underscore, } +impl FnParamName { + pub fn as_name(&self) -> Option { + match self { + FnParamName::Ident(name) => Some(*name), + _ => None, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeBound { /// The path to the trait. diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 3675c03ffd..3db546e9d1 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,17 +1,16 @@ use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::HirDb; +use crate::{hir_def::GenericParamOwner, span::DynLazySpan, HirDb}; -use super::{IdentId, ItemKind, TopLevelMod, Use, Visibility}; +use super::{Enum, Func, IdentId, IngotId, ItemKind, Struct, TopLevelMod, Use, Visibility}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { pub top_mod: TopLevelMod, pub scopes: PrimaryMap, pub item_map: FxHashMap, - pub unresolved_imports: FxHashMap>, - pub unresolved_exports: FxHashMap>, + pub unresolved_uses: Vec, } impl ScopeGraph { @@ -78,15 +77,22 @@ pub struct LocalScope { pub kind: ScopeKind, pub edges: Vec, pub parent_module: Option, + pub parent_scope: Option, pub vis: Visibility, } impl LocalScope { - pub fn new(kind: ScopeKind, parent_module: Option, vis: Visibility) -> Self { + pub fn new( + kind: ScopeKind, + parent_module: Option, + parent_scope: Option, + vis: Visibility, + ) -> Self { Self { kind, edges: vec![], parent_module, + parent_scope, vis, } } @@ -109,8 +115,8 @@ pub struct ScopeEdge { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ScopeId { - pub top_mod: TopLevelMod, - pub local_id: LocalScopeId, + top_mod: TopLevelMod, + local_id: LocalScopeId, } impl ScopeId { @@ -118,14 +124,67 @@ impl ScopeId { Self { top_mod, local_id } } + pub fn from_item(db: &dyn HirDb, item: ItemKind) -> Self { + let top_mod = item.top_mod(db); + let scope_graph = top_mod.scope_graph(db); + Self::new(top_mod, scope_graph.item_scope(item)) + } + pub fn root(top_mod: TopLevelMod) -> Self { Self::new(top_mod, LocalScopeId::root()) } - pub fn data(self, db: &dyn HirDb) -> &LocalScope { + /// Returns the scope graph containing this scope. + pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { + self.top_mod.scope_graph(db) + } + + /// Returns the local id of the scope graph. + pub fn to_local(self) -> LocalScopeId { + self.local_id + } + + pub fn edges(self, db: &dyn HirDb) -> &[ScopeEdge] { + self.scope_graph(db).edges(self.local_id) + } + + /// Returns `true` if `scope` is reachable from `self` by following only + /// lexical edges. + pub fn is_lex_child(self, db: &dyn HirDb, parent: ScopeId) -> bool { + if self.top_mod != parent.top_mod { + return false; + } + + let scope_graph = self.scope_graph(db); + self.local_id.is_lex_child(scope_graph, parent.local_id) + } + + /// Returns true if `self` is a transitive reflexive child of `of`. + pub fn is_transitive_child_of(self, db: &dyn HirDb, of: ScopeId) -> bool { + let mut current = Some(self); + + while let Some(scope) = current { + if scope == of { + return true; + } + current = scope.parent(db); + } + + false + } + + /// Returns the `TopLevelMod` containing the scope . + pub fn top_mod(self) -> TopLevelMod { self.top_mod - .module_scope_graph(db) - .scope_data(self.local_id) + } + + /// Return the `IngotId` containing the scope. + pub fn ingot(self, db: &dyn HirDb) -> IngotId { + self.top_mod.ingot(db) + } + + pub fn data(self, db: &dyn HirDb) -> &LocalScope { + self.top_mod.scope_graph(db).scope_data(self.local_id) } pub fn kind(self, db: &dyn HirDb) -> ScopeKind { @@ -140,6 +199,14 @@ impl ScopeId { .map(|e| e.dest) } + pub fn lex_parent(self, db: &dyn HirDb) -> Option { + self.data(db) + .edges + .iter() + .find(|e| matches!(e.kind, EdgeKind::Lex(_))) + .map(|e| e.dest) + } + pub fn parent_module(self, db: &dyn HirDb) -> Option { self.data(db).parent_module } @@ -151,6 +218,16 @@ impl ScopeId { _ => false, } } + + pub fn name(self, db: &dyn HirDb) -> Option { + let s_graph = self.top_mod.scope_graph(db); + self.local_id.name(db, s_graph) + } + + pub fn name_span(self, db: &dyn HirDb) -> Option { + let s_graph = self.top_mod.scope_graph(db); + self.local_id.name_span(s_graph) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -268,6 +345,100 @@ pub struct LocalScopeId(u32); entity_impl!(LocalScopeId); impl LocalScopeId { + pub fn to_global(self, top_mod: TopLevelMod) -> ScopeId { + ScopeId::new(top_mod, self) + } + + /// Returns `true` if `scope` is reachable from `self` by following only + /// lexical edges. + pub fn is_lex_child(self, s_graph: &ScopeGraph, scope: LocalScopeId) -> bool { + let data = self.data(s_graph); + match data.parent_scope { + Some(parent) => { + if parent == scope { + return true; + } + parent.is_lex_child(s_graph, scope) + } + None => false, + } + } + + pub fn data(self, s_graph: &ScopeGraph) -> &LocalScope { + &s_graph.scopes[self] + } + + pub fn name(self, db: &dyn HirDb, s_graph: &ScopeGraph) -> Option { + match self.data(s_graph).kind { + ScopeKind::Item(item) => item.name(db), + + ScopeKind::Variant(idx) => { + let parent: Enum = self.parent_item(s_graph).unwrap().try_into().unwrap(); + parent.variants(db).data(db)[idx].name.to_opt() + } + + ScopeKind::Field(idx) => { + let parent: Struct = self.parent_item(s_graph).unwrap().try_into().unwrap(); + parent.fields(db).data(db)[idx].name.to_opt() + } + + ScopeKind::FnParam(idx) => { + let parent: Func = self.parent_item(s_graph).unwrap().try_into().unwrap(); + parent.params(db).to_opt()?.data(db)[idx].name() + } + + ScopeKind::GenericParam(idx) => { + let parent = + GenericParamOwner::from_item_opt(self.parent_item(s_graph).unwrap()).unwrap(); + + let params = &parent.params(db).data(db)[idx]; + params.name().to_opt() + } + } + } + + pub fn name_span(self, s_graph: &ScopeGraph) -> Option { + match self.data(s_graph).kind { + ScopeKind::Item(item) => item.name_span(), + + ScopeKind::Variant(idx) => { + let parent: Enum = self.parent_item(s_graph).unwrap().try_into().unwrap(); + Some(parent.lazy_span().variants().variant(idx).name().into()) + } + + ScopeKind::Field(idx) => { + let parent: Struct = self.parent_item(s_graph).unwrap().try_into().unwrap(); + Some(parent.lazy_span().fields().field(idx).name().into()) + } + + ScopeKind::FnParam(idx) => { + let parent: Func = self.parent_item(s_graph).unwrap().try_into().unwrap(); + Some(parent.lazy_span().params().param(idx).name().into()) + } + + ScopeKind::GenericParam(idx) => { + let parent = + GenericParamOwner::from_item_opt(self.parent_item(s_graph).unwrap()).unwrap(); + + Some(parent.params_span().param(idx).into()) + } + } + } + + pub fn parent(self, s_graph: &ScopeGraph) -> Option { + self.data(s_graph).parent_scope + } + + pub fn parent_item(self, s_graph: &ScopeGraph) -> Option { + match self.data(s_graph).kind { + ScopeKind::Item(item) => Some(item), + _ => { + let parent = self.parent(s_graph)?; + parent.parent_item(s_graph) + } + } + } + pub(crate) fn root() -> Self { LocalScopeId(0) } diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index f1ea7c367d..59a9290a86 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -1,4 +1,4 @@ -use crate::hir_def::Partial; +use crate::{hir_def::Partial, HirDb}; use super::IdentId; @@ -8,6 +8,26 @@ pub struct UsePathId { pub segments: Vec>, } +impl UsePathId { + pub fn is_glob(&self, db: &dyn HirDb) -> bool { + self.segments(db) + .last() + .and_then(|seg| seg.to_opt()) + .map_or(false, |seg| seg.is_glob()) + } + + pub fn last_ident(&self, db: &dyn HirDb) -> Option { + self.segments(db) + .last() + .and_then(|seg| seg.to_opt()) + .and_then(|seg| seg.ident()) + } + + pub fn segment_len(&self, db: &dyn HirDb) -> usize { + self.segments(db).len() + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum UsePathSegment { Ident(IdentId), @@ -15,6 +35,24 @@ pub enum UsePathSegment { Glob, } +impl UsePathSegment { + /// Returns the ident of the last path segment. + /// If the last segment is a glob, returns `None`. + pub fn ident(self) -> Option { + match self { + UsePathSegment::Ident(ident) => Some(ident), + UsePathSegment::Glob => None, + } + } + + pub fn is_glob(self) -> bool { + match self { + UsePathSegment::Ident(_) => false, + UsePathSegment::Glob => true, + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum UseAlias { Ident(IdentId), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index a87fe584e1..e1fa8a56e3 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,9 +1,9 @@ -use common::{InputDb, InputIngot, Upcast}; +use common::{InputDb, InputIngot}; use hir_def::{module_tree_impl, IdentId, TopLevelMod}; -pub use lower::parse::ParseDiagnostic; +pub use lower::parse::ParserError; use lower::{ map_file_to_mod_impl, - parse::{parse_file_impl, ParseDiagnosticAccumulator}, + parse::{parse_file_impl, ParseErrorAccumulator}, scope_graph_impl, }; @@ -14,6 +14,7 @@ pub mod span; #[salsa::jar(db = HirDb)] pub struct Jar( + hir_def::IngotId, // Tracked Hir items. hir_def::TopLevelMod, hir_def::Mod, @@ -44,7 +45,7 @@ pub struct Jar( hir_def::TypeId, hir_def::UsePathId, /// Accumulated diagnostics. - ParseDiagnosticAccumulator, + ParseErrorAccumulator, /// Private tracked functions. These are not part of the public API, and /// thus, can't be accessed from outside of the crate without implementing /// [`LowerHirDb`] marker trait. @@ -67,7 +68,7 @@ pub(crate) fn external_ingots_impl( ingot: InputIngot, ) -> Vec<(IdentId, TopLevelMod)> { let mut res = Vec::new(); - for dep in ingot.external_ingots(db.upcast()) { + for dep in ingot.external_ingots(db.as_input_db()) { let name = IdentId::new(db, dep.name.to_string()); let root = module_tree_impl(db, dep.ingot).root_data().top_mod; res.push((name, root)) @@ -75,22 +76,37 @@ pub(crate) fn external_ingots_impl( res } -pub trait HirDb: salsa::DbWithJar + InputDb + Upcast { +pub trait HirDb: salsa::DbWithJar + InputDb { fn prefill(&self) where Self: Sized, { IdentId::prefill(self) } + + fn as_input_db(&self) -> &dyn InputDb { + >::as_jar_db::<'_>(self) + } +} +impl HirDb for DB +where + DB: ?Sized + salsa::DbWithJar + InputDb, +{ + fn as_input_db(&self) -> &dyn InputDb { + >::as_jar_db::<'_>(self) + } } -impl HirDb for DB where DB: ?Sized + salsa::DbWithJar + InputDb + Upcast {} /// `LowerHirDb` is a marker trait for lowering AST to HIR items. /// All code that requires [`LowerHirDb`] is considered have a possibility to /// invalidate the cache in salsa when a revision is updated. Therefore, /// implementations relying on `LowerHirDb` are prohibited in all /// Analysis phases. -pub trait LowerHirDb: HirDb + Upcast {} +pub trait LowerHirDb: HirDb { + fn as_hir_db(&self) -> &dyn HirDb { + >::as_jar_db::<'_>(self) + } +} /// `SpannedHirDb` is a marker trait for extracting span-dependent information /// from HIR Items. @@ -103,7 +119,11 @@ pub trait LowerHirDb: HirDb + Upcast {} /// generate [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from /// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). /// See also `[LazySpan]`[`crate::span::LazySpan`] for more details. -pub trait SpannedHirDb: HirDb + Upcast {} +pub trait SpannedHirDb: HirDb { + fn as_hir_db(&self) -> &dyn HirDb { + >::as_jar_db::<'_>(self) + } +} #[cfg(test)] mod test_db { @@ -112,7 +132,7 @@ mod test_db { use common::{ input::{IngotKind, Version}, - InputFile, InputIngot, Upcast, + InputFile, InputIngot, }; use crate::{ @@ -150,18 +170,12 @@ mod test_db { }) } } - impl Upcast for TestDb { - fn upcast(&self) -> &(dyn common::InputDb + 'static) { - self - } - } - impl Upcast for TestDb { - fn upcast(&self) -> &(dyn crate::HirDb + 'static) { - self - } - } impl TestDb { + pub fn as_hir_db(&self) -> &dyn HirDb { + >::as_jar_db::<'_>(self) + } + pub fn parse_source(&mut self, text: &str) -> &ScopeGraph { let file = self.standalone_file(text); let top_mod = map_file_to_mod(self, file); @@ -189,9 +203,9 @@ mod test_db { } pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { - let range = span.resolve(self).range; - let file = top_mod.file(self.upcast()); - let text = file.text(self.upcast()); + let range = span.resolve(self).unwrap().range; + let file = top_mod.file(self.as_hir_db()); + let text = file.text(self.as_hir_db().as_input_db()); &text[range.start().into()..range.end().into()] } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index ef289d7f13..c303bb6e10 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -8,15 +8,15 @@ use parser::{ use crate::{ hir_def::{ - module_tree_impl, scope_graph::ScopeGraph, IdentId, IntegerId, ItemKind, LitKind, + module_tree_impl, scope_graph::ScopeGraph, IdentId, IngotId, IntegerId, ItemKind, LitKind, ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, }, - HirDb, LowerHirDb, ParseDiagnostic, + HirDb, LowerHirDb, ParserError, }; use self::{ item::lower_module_items, - parse::{parse_file_impl, ParseDiagnosticAccumulator}, + parse::{parse_file_impl, ParseErrorAccumulator}, scope_builder::ScopeGraphBuilder, }; @@ -39,12 +39,12 @@ mod use_tree; /// any parsing or lowering. /// To perform the actual lowering, use `module_item_tree` function. pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { - map_file_to_mod_impl(db.upcast(), file) + map_file_to_mod_impl(db.as_hir_db(), file) } /// Returns the item tree of the given top-level module. pub fn scope_graph(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ScopeGraph { - scope_graph_impl(db.upcast(), top_mod) + scope_graph_impl(db.as_hir_db(), top_mod) } /// Returns the root node of the given top-level module. @@ -52,31 +52,31 @@ pub fn scope_graph(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ScopeGraph { pub fn parse_file_with_diag( db: &dyn LowerHirDb, top_mod: TopLevelMod, -) -> (GreenNode, Vec) { +) -> (GreenNode, Vec) { ( - parse_file_impl(db.upcast(), top_mod), - parse_file_impl::accumulated::(db.upcast(), top_mod), + parse_file_impl(db.as_hir_db(), top_mod), + parse_file_impl::accumulated::(db.as_hir_db(), top_mod), ) } /// Returns the root node of the given top-level module. /// If diagnostics are needed, use [`parse_file_with_diag`] instead. pub fn parse_file(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> GreenNode { - parse_file_impl(db.upcast(), top_mod) + parse_file_impl(db.as_hir_db(), top_mod) } /// Returns the ingot module tree of the given ingot. pub fn module_tree(db: &dyn LowerHirDb, ingot: InputIngot) -> &ModuleTree { - module_tree_impl(db.upcast(), ingot) + module_tree_impl(db.as_hir_db(), ingot) } #[salsa::tracked] pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelMod { - let path = file.path(db.upcast()); + let path = file.path(db.as_input_db()); let name = path.file_stem().unwrap(); let mod_name = IdentId::new(db, name.to_string()); - let ingot = file.ingot(db.upcast()); - TopLevelMod::new(db, mod_name, ingot, file) + let ingot = file.ingot(db.as_input_db()); + TopLevelMod::new(db, mod_name, IngotId::new(db, ingot), file) } #[salsa::tracked(return_ref)] diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 4e63cf2bf0..b6ff20b3b8 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,6 +1,6 @@ use parser::ast::{self}; -use crate::hir_def::{params::*, Body, IdentId, PathId, TypeId}; +use crate::hir_def::{kw, params::*, Body, IdentId, PathId, TypeId}; use super::FileLowerCtxt; @@ -186,7 +186,7 @@ impl FnParamName { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamName) -> Self { match ast { ast::FnParamName::Ident(name) => FnParamName::Ident(IdentId::lower_token(ctxt, name)), - ast::FnParamName::SelfParam(_) => FnParamName::Self_, + ast::FnParamName::SelfParam(_) => FnParamName::Ident(kw::SELF), ast::FnParamName::Underscore(_) => FnParamName::Underscore, } } diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index 93b43bb68a..b4ceec444f 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -9,21 +9,21 @@ use crate::{diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, HirDb, Spanned #[salsa::tracked] pub(crate) fn parse_file_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> GreenNode { let file = top_mod.file(db); - let text = file.text(db.upcast()); + let text = file.text(db.as_input_db()); let (node, parse_errors) = parser::parse_source_file(text); for error in parse_errors { - ParseDiagnosticAccumulator::push(db, ParseDiagnostic { file, error }); + ParseErrorAccumulator::push(db, ParserError { file, error }); } node } #[doc(hidden)] #[salsa::accumulator] -pub struct ParseDiagnosticAccumulator(ParseDiagnostic); +pub struct ParseErrorAccumulator(ParserError); #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ParseDiagnostic { +pub struct ParserError { file: InputFile, error: parser::ParseError, } @@ -31,7 +31,7 @@ pub struct ParseDiagnostic { // `ParseError` has span information, but this is not a problem because the // parsing procedure itself depends on the file content, and thus span // information. -impl DiagnosticVoucher for ParseDiagnostic { +impl DiagnosticVoucher for ParserError { fn error_code(&self) -> GlobalErrorCode { GlobalErrorCode::new(AnalysisPass::Parse, 0) } @@ -39,6 +39,12 @@ impl DiagnosticVoucher for ParseDiagnostic { fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { let error_code = self.error_code(); let span = Span::new(self.file, self.error.range, SpanKind::Original); - CompleteDiagnostic::new(Severity::Error, self.error.msg, span, vec![], error_code) + CompleteDiagnostic::new( + Severity::Error, + self.error.msg, + span.into(), + vec![], + error_code, + ) } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 8f2e1f05b9..e656ffa6a7 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -26,8 +26,7 @@ impl<'db> ScopeGraphBuilder<'db> { top_mod, scopes: Default::default(), item_map: Default::default(), - unresolved_imports: Default::default(), - unresolved_exports: Default::default(), + unresolved_uses: Default::default(), }, scope_stack: Default::default(), module_stack: Default::default(), @@ -66,7 +65,11 @@ impl<'db> ScopeGraphBuilder<'db> { debug_assert!(self.scope_stack.is_empty()); self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); - self.add_global_edge(item_scope, top_mod.ingot_root(self.db), EdgeKind::ingot()); + self.add_global_edge( + item_scope, + top_mod.ingot(self.db).root_mod(self.db), + EdgeKind::ingot(), + ); for child in top_mod.children(self.db) { let child_name = child.name(self.db); let edge = EdgeKind::mod_(child_name); @@ -92,7 +95,7 @@ impl<'db> ScopeGraphBuilder<'db> { ); self.add_global_edge( item_scope, - self.top_mod.ingot_root(self.db), + self.top_mod.ingot(self.db).root_mod(self.db), EdgeKind::ingot(), ); self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); @@ -195,12 +198,7 @@ impl<'db> ScopeGraphBuilder<'db> { } Use(use_) => { - let import_map = if use_.vis(self.db).is_pub() { - &mut self.graph.unresolved_exports - } else { - &mut self.graph.unresolved_imports - }; - import_map.entry(parent_scope).or_default().push(use_); + self.graph.unresolved_uses.push(use_); self.add_lex_edge(item_scope, parent_scope); EdgeKind::anon() @@ -219,7 +217,12 @@ impl<'db> ScopeGraphBuilder<'db> { fn add_field_scope(&mut self, current_scope: LocalScopeId, fields: RecordFieldListId) { for (i, field) in fields.data(self.db).iter().enumerate() { - let scope = LocalScope::new(ScopeKind::Field(i), self.parent_module_id(), field.vis); + let scope = LocalScope::new( + ScopeKind::Field(i), + self.parent_module_id(), + Some(current_scope), + field.vis, + ); let field_scope = self.graph.scopes.push(scope); self.add_lex_edge(field_scope, current_scope); let kind = field @@ -236,6 +239,7 @@ impl<'db> ScopeGraphBuilder<'db> { let scope = LocalScope::new( ScopeKind::Variant(i), self.parent_module_id(), + Some(current_scope), Visibility::Public, ); let variant_scope = self.graph.scopes.push(scope); @@ -254,6 +258,7 @@ impl<'db> ScopeGraphBuilder<'db> { let scope = LocalScope::new( ScopeKind::FnParam(i), self.parent_module_id(), + Some(current_scope), Visibility::Private, ); let generic_param_scope = self.graph.scopes.push(scope); @@ -262,7 +267,6 @@ impl<'db> ScopeGraphBuilder<'db> { .name .to_opt() .map(|name| match name { - FnParamName::Self_ => EdgeKind::self_(), FnParamName::Ident(ident) => EdgeKind::value(ident), FnParamName::Underscore => EdgeKind::anon(), }) @@ -276,6 +280,7 @@ impl<'db> ScopeGraphBuilder<'db> { let scope = LocalScope::new( ScopeKind::GenericParam(i), self.parent_module_id(), + Some(current_scope), Visibility::Private, ); let generic_param_scope = self.graph.scopes.push(scope); @@ -290,12 +295,12 @@ impl<'db> ScopeGraphBuilder<'db> { } fn dummy_scope(&self) -> LocalScope { - LocalScope { - kind: ScopeKind::Item(self.top_mod.into()), - edges: Vec::new(), - parent_module: self.parent_module_id(), - vis: Visibility::Public, - } + LocalScope::new( + ScopeKind::Item(self.top_mod.into()), + self.parent_module_id(), + None, + Visibility::Public, + ) } fn parent_module_id(&self) -> Option { @@ -308,6 +313,11 @@ impl<'db> ScopeGraphBuilder<'db> { } } + fn add_lex_edge(&mut self, child: LocalScopeId, parent: LocalScopeId) { + self.add_local_edge(child, parent, EdgeKind::lex()); + self.graph.scopes[child].parent_scope = Some(parent); + } + fn add_local_edge(&mut self, source: LocalScopeId, dest: LocalScopeId, kind: EdgeKind) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(self.top_mod, dest), @@ -315,10 +325,6 @@ impl<'db> ScopeGraphBuilder<'db> { }); } - fn add_lex_edge(&mut self, source: LocalScopeId, dest: LocalScopeId) { - self.add_local_edge(source, dest, EdgeKind::lex()); - } - fn add_global_edge(&mut self, source: LocalScopeId, dest: TopLevelMod, kind: EdgeKind) { self.graph.scopes[source].edges.push(ScopeEdge { dest: ScopeId::new(dest, LocalScopeId::root()), diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index ae559cd313..94cf9d7d19 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -171,7 +171,7 @@ impl ChainInitiator for ExprRoot { fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); let origin = source_map.expr_map.node_to_source(self.expr); - let top_mod = self.body.top_mod(db.upcast()); + let top_mod = self.body.top_mod(db.as_hir_db()); ResolvedOrigin::resolve(db, top_mod, origin) } } @@ -182,7 +182,6 @@ mod tests { hir_def::{Body, Expr, Stmt}, test_db::TestDb, }; - use common::Upcast; #[test] fn aug_assign() { @@ -195,16 +194,16 @@ mod tests { }"#; let body: Body = db.expect_item::(text); - let bin_expr = match body.stmts(db.upcast()).values().next().unwrap().unwrap() { + let bin_expr = match body.stmts(db.as_hir_db()).values().next().unwrap().unwrap() { Stmt::Assign(_, rhs) => *rhs, _ => unreachable!(), }; - let (lhs, rhs) = match body.exprs(db.upcast())[bin_expr].unwrap() { + let (lhs, rhs) = match body.exprs(db.as_hir_db())[bin_expr].unwrap() { Expr::Bin(lhs, rhs, _) => (lhs, rhs), _ => unreachable!(), }; - let top_mod = body.top_mod(db.upcast()); + let top_mod = body.top_mod(db.as_hir_db()); assert_eq!("x += 1", db.text_at(top_mod, &bin_expr.lazy_span(body))); assert_eq!("x", db.text_at(top_mod, &lhs.lazy_span(body))); assert_eq!("1", db.text_at(top_mod, &rhs.lazy_span(body))); diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index a0e658dcfa..856ba41174 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -283,8 +283,6 @@ define_lazy_span_node!( #[cfg(test)] mod tests { - use common::Upcast; - use crate::{ hir_def::{Enum, Func, Mod, Struct, TypeAlias, Use}, test_db::TestDb, @@ -321,7 +319,7 @@ mod tests { "#; let mod_ = db.expect_item::(text); - let top_mod = mod_.top_mod(db.upcast()); + let top_mod = mod_.top_mod(db.as_hir_db()); let mod_span = mod_.lazy_span(); assert_eq!( r#"mod foo { @@ -342,7 +340,7 @@ mod tests { "#; let fn_ = db.expect_item::(text); - let top_mod = fn_.top_mod(db.upcast()); + let top_mod = fn_.top_mod(db.as_hir_db()); let fn_span = fn_.lazy_span(); assert_eq!("my_func", db.text_at(top_mod, &fn_span.name())); @@ -393,7 +391,7 @@ mod tests { }"#; let struct_ = db.expect_item::(text); - let top_mod = struct_.top_mod(db.upcast()); + let top_mod = struct_.top_mod(db.as_hir_db()); let struct_span = struct_.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &struct_span.name())); @@ -420,7 +418,7 @@ mod tests { }"#; let enum_ = db.expect_item::(text); - let top_mod = enum_.top_mod(db.upcast()); + let top_mod = enum_.top_mod(db.as_hir_db()); let enum_span = enum_.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &enum_span.name())); @@ -442,7 +440,7 @@ mod tests { "#; let type_alias = db.expect_item::(text); - let top_mod = type_alias.top_mod(db.upcast()); + let top_mod = type_alias.top_mod(db.as_hir_db()); let type_alias_span = type_alias.lazy_span(); assert_eq!("Foo", db.text_at(top_mod, &type_alias_span.alias())); assert_eq!("u32", db.text_at(top_mod, &type_alias_span.ty())); @@ -459,7 +457,7 @@ mod tests { let use_ = db.expect_item::(text); - let top_mod = use_.top_mod(db.upcast()); + let top_mod = use_.top_mod(db.as_hir_db()); let use_span = use_.lazy_span(); let use_path_span = use_span.path(); assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); @@ -481,7 +479,7 @@ mod tests { let uses = db.expect_items::(text); assert_eq!(uses.len(), 2); - let top_mod = uses[0].top_mod(db.upcast()); + let top_mod = uses[0].top_mod(db.as_hir_db()); let use_span = uses[0].lazy_span(); let use_path_span = use_span.path(); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 578f0bc4de..152a123989 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -31,10 +31,19 @@ mod transition; /// LazySpan` usage because it doesn't implement `Clone` and `Eq` which leads to /// a lot of difficulties in salsa integration #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct DynLazySpan(SpanTransitionChain); +pub struct DynLazySpan(Option); +impl DynLazySpan { + pub fn invalid_span() -> Self { + Self(None) + } +} impl LazySpan for DynLazySpan { - fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { - self.0.resolve(db) + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { + if let Some(chain) = &self.0 { + chain.resolve(db) + } else { + None + } } } @@ -42,63 +51,63 @@ impl LazySpan for DynLazySpan { /// types which don't have a span information directly, but can be resolved into /// a span lazily. pub trait LazySpan { - fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span; + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option; } pub fn toplevel_ast(db: &dyn SpannedHirDb, item: TopLevelMod) -> HirOrigin { - HirOrigin::raw(&top_mod_ast(db.upcast(), item)) + HirOrigin::raw(&top_mod_ast(db.as_hir_db(), item)) } pub fn mod_ast(db: &dyn SpannedHirDb, item: Mod) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn func_ast(db: &dyn SpannedHirDb, item: Func) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn struct_ast(db: &dyn SpannedHirDb, item: Struct) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn contract_ast(db: &dyn SpannedHirDb, item: Contract) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn enum_ast(db: &dyn SpannedHirDb, item: Enum) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn type_alias_ast(db: &dyn SpannedHirDb, item: TypeAlias) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn impl_ast(db: &dyn SpannedHirDb, item: Impl) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn trait_ast(db: &dyn SpannedHirDb, item: Trait) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn impl_trait_ast(db: &dyn SpannedHirDb, item: ImplTrait) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn const_ast(db: &dyn SpannedHirDb, item: Const) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn use_ast(db: &dyn SpannedHirDb, item: Use) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn body_ast(db: &dyn SpannedHirDb, item: Body) -> &HirOrigin { - item.origin(db.upcast()) + item.origin(db.as_hir_db()) } pub fn body_source_map(db: &dyn SpannedHirDb, item: Body) -> &crate::hir_def::BodySourceMap { - item.source_map(db.upcast()) + item.source_map(db.as_hir_db()) } /// This enum represents the origin of the HIR node in a file. diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 61b559bd21..02a6774398 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -82,7 +82,7 @@ impl ChainInitiator for PatRoot { fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); let origin = source_map.pat_map.node_to_source(self.pat); - let top_mod = self.body.top_mod(db.upcast()); + let top_mod = self.body.top_mod(db.as_hir_db()); ResolvedOrigin::resolve(db, top_mod, origin) } } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index 9f7570f8ba..dbf23c32db 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -41,7 +41,7 @@ impl ChainInitiator for StmtRoot { fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { let source_map = body_source_map(db, self.body); let origin = source_map.stmt_map.node_to_source(self.stmt); - let top_mod = self.body.top_mod(db.upcast()); + let top_mod = self.body.top_mod(db.as_hir_db()); ResolvedOrigin::resolve(db, top_mod, origin) } } @@ -49,7 +49,6 @@ impl ChainInitiator for StmtRoot { #[cfg(test)] mod tests { use crate::{hir_def::Body, test_db::TestDb}; - use common::Upcast; #[test] fn aug_assign() { @@ -63,8 +62,8 @@ mod tests { }"#; let body: Body = db.expect_item::(text); - let top_mod = body.top_mod(db.upcast()); - for (i, stmt) in body.stmts(db.upcast()).keys().enumerate() { + let top_mod = body.top_mod(db.as_hir_db()); + for (i, stmt) in body.stmts(db.as_hir_db()).keys().enumerate() { match i { 0 => { let span = stmt.lazy_span(body); diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index a493539580..561715fb24 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -85,7 +85,7 @@ impl ResolvedOrigin { where T: AstNode, { - let root = top_mod_ast(db.upcast(), top_mod).syntax().clone(); + let root = top_mod_ast(db.as_hir_db(), top_mod).syntax().clone(); let kind = match origin { HirOrigin::Raw(ptr) => ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root)), HirOrigin::Expanded(ptr) => ResolvedOriginKind::Expanded(ptr.to_node(&root)), @@ -95,7 +95,7 @@ impl ResolvedOrigin { HirOrigin::None => ResolvedOriginKind::None, }; - ResolvedOrigin::new(top_mod.file(db.upcast()), kind) + ResolvedOrigin::new(top_mod.file(db.as_hir_db()), kind) } pub(crate) fn map(self, f: F) -> Self @@ -180,14 +180,14 @@ impl SpanTransitionChain { } impl LazySpan for SpanTransitionChain { - fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Span { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { let mut resolved = self.root.init(db); for LazyTransitionFn { f, arg } in &self.chain { resolved = f(resolved, *arg); } - match resolved.kind { + Some(match resolved.kind { ResolvedOriginKind::Node(node) => { Span::new(resolved.file, node.text_range(), SpanKind::Original) } @@ -200,12 +200,8 @@ impl LazySpan for SpanTransitionChain { ResolvedOriginKind::Desugared(root, desugared) => { desugared.resolve(db, root, resolved.file) } - ResolvedOriginKind::None => Span::new( - resolved.file, - TextRange::new(0.into(), 0.into()), - SpanKind::NotFound, - ), - } + ResolvedOriginKind::None => return None, + }) } } @@ -217,8 +213,8 @@ pub(crate) trait ChainInitiator { impl ChainInitiator for TopLevelMod { fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { - let file = self.file(db.upcast()); - let ast = top_mod_ast(db.upcast(), *self); + let file = self.file(db.as_hir_db()); + let ast = top_mod_ast(db.as_hir_db(), *self); ResolvedOrigin::new(file, ResolvedOriginKind::Node(ast.syntax().clone())) } } @@ -228,7 +224,7 @@ macro_rules! impl_chain_root { $( impl ChainInitiator for $ty { fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { - let top_mod = self.top_mod(db.upcast()); + let top_mod = self.top_mod(db.as_hir_db()); let origin = $fn(db, *self); ResolvedOrigin::resolve(db, top_mod, origin) } @@ -339,14 +335,14 @@ macro_rules! define_lazy_span_node { impl crate::span::LazySpan for $name { - fn resolve(&self, db: &dyn crate::SpannedHirDb) -> common::diagnostics::Span { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { self.0.resolve(db) } } impl From<$name> for crate::span::DynLazySpan { fn from(val: $name) -> Self { - Self(val.0) + Self(val.0.into()) } } }; From 030280cabf9ba062a235cb45a16dfa4c8b7e1ca9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 20 May 2023 13:20:18 +0200 Subject: [PATCH 163/678] Change query to have the scope where the query is resolved --- .../src/name_resolution/import_resolver.rs | 16 ++- .../src/name_resolution/name_resolver.rs | 116 +++++++++--------- 2 files changed, 68 insertions(+), 64 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 4c7c5636e2..978eb71da8 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -35,14 +35,14 @@ pub struct ImportResolver<'db> { /// The errors that have been accumulated during the import resolution. accumulated_errors: Vec, - /// The number of imported bindings. + /// The number of imported resolutions. /// This is used to judge if a import resolution doesn't change in each /// iteration of fixed point calculation. - /// This check rely on the fact that the number of bindings is monotonically - /// increasing. + /// This check rely on the fact that the number of resolutions is + /// monotonically increasing. num_imported_res: FxHashMap, - /// The set of imports that its resolution starts with an external ingot. + /// The set of imports that are suspicious to be ambiguous. /// In this case, the use will turns out to be ambiguous after the import /// resolution reaches the fixed point. suspicious_imports: FxHashSet, @@ -326,7 +326,7 @@ impl<'db> ImportResolver<'db> { }; let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); - let resolved = match resolver.resolve_query(i_use.current_scope(), query) { + let resolved = match resolver.resolve_query(query) { Ok(resolved) => resolved, Err(NameResolutionError::NotFound) if !self.is_decidable(i_use) => { @@ -536,7 +536,11 @@ impl<'db> ImportResolver<'db> { directive.add_domain(NameDomain::Value); } - Some(NameQuery::with_directive(seg_name, directive)) + Some(NameQuery::with_directive( + seg_name, + i_use.current_scope(), + directive, + )) } /// Returns `true` if there is an unresolved named import for the given name diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 6b01a784b8..0faeecf057 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -146,13 +146,9 @@ impl<'db, 'a> NameResolver<'db, 'a> { } } - pub fn resolve_query( - &mut self, - scope: ScopeId, - query: NameQuery, - ) -> Result { + pub fn resolve_query(&mut self, query: NameQuery) -> Result { // If the query is already resolved, return the cached result. - if let Some(resolved) = self.cache_store.get(scope, query) { + if let Some(resolved) = self.cache_store.get(query) { return resolved.clone(); }; @@ -166,7 +162,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 1. Look for the name in the current scope. let mut found_scopes = FxHashSet::default(); - for edge in scope.edges(self.db.as_hir_db()) { + for edge in query.scope.edges(self.db.as_hir_db()) { match edge.kind.propagate(query) { PropagationResult::Terminated => { if found_scopes.insert(edge.dest) { @@ -193,45 +189,45 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 2. Look for the name in the named imports of the current scope. if let Some(imported) = self .importer - .named_imports(self.db, scope) + .named_imports(self.db, query.scope) .and_then(|imports| imports.get(&query.name)) { - self.try_merge(&mut binding, &imported.binding, scope, query)?; + self.try_merge(&mut binding, &imported.binding, query)?; } // 3. Look for the name in the glob imports. if query.directive.allow_glob { - if let Some(imported) = self.importer.glob_imports(self.db, scope) { + if let Some(imported) = self.importer.glob_imports(self.db, query.scope) { for res in imported.name_res_for(query.name) { - self.try_push(&mut binding, res, scope, query)?; + self.try_push(&mut binding, res, query)?; } } } // 4. Look for the name in the lexical scope if it exists. if let Some(parent) = parent { - match self.resolve_query(parent, query) { + match self.resolve_query(query.clone_with_scope(parent)) { Ok(mut resolved) => { resolved.lexed(); - self.try_merge(&mut binding, &resolved, scope, query)?; + self.try_merge(&mut binding, &resolved, query)?; } Err(NameResolutionError::NotFound) => {} Err(err) => { - self.cache_store - .cache_result(scope, query, Err(err.clone())); + self.cache_store.cache_result(query, Err(err.clone())); return Err(err); } } } if !query.directive.allow_external { - return self.finalize_query_result(scope, query, binding); + return self.finalize_query_result(query, binding); } // 5. Look for the name in the external ingots. if query.directive.is_allowed_domain(NameDomain::Item as u8) { - scope + query + .scope .top_mod() .ingot(self.db.as_hir_db()) .external_ingots(self.db.as_hir_db()) @@ -253,7 +249,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // resolved_set.push_name(ResolvedName::new_builtin(builtin, // builtin.domain())) }; - self.finalize_query_result(scope, query, binding) + self.finalize_query_result(query, binding) } /// Collect all visible resolutions in the given `target` scope. @@ -276,15 +272,17 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// /// The below examples demonstrates the second point. /// We need to report ambiguous error at `const C: S = S` because `S` is - /// ambiguous. + /// ambiguous, on the other hand, we need NOT to report ambiguous error in + /// `foo` modules because `S` is not referred to in the module. /// /// ```fe /// use foo::* /// const C: S = S /// /// mod foo { - /// pub use inner1::*; - /// pub use inner2::*; + /// pub use inner1::* + /// pub use inner2::* + /// /// mod inner1 { /// pub struct S {} /// } @@ -301,8 +299,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { unresolved_named_imports: FxHashSet, ) -> FxHashMap> { let mut res_collection: FxHashMap> = FxHashMap::default(); - let mut seen_domains: FxHashMap = FxHashMap::default(); - let mut seen_scope: FxHashSet<(IdentId, ScopeId)> = FxHashSet::default(); + let mut found_domains: FxHashMap = FxHashMap::default(); + let mut found_scopes: FxHashSet<(IdentId, ScopeId)> = FxHashSet::default(); for edge in target.edges(self.db.as_hir_db()) { let scope = match edge.kind.propagate_glob(directive) { @@ -313,7 +311,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { }; let name = scope.name(self.db.as_hir_db()).unwrap(); - if !seen_scope.insert((name, scope)) { + if !found_scopes.insert((name, scope)) { continue; } let res = NameRes::new_scope( @@ -322,26 +320,26 @@ impl<'db, 'a> NameResolver<'db, 'a> { NameDerivation::Def, ); - *seen_domains.entry(name).or_default() |= res.domain as u8; + *found_domains.entry(name).or_default() |= res.domain as u8; res_collection.entry(name).or_default().push(res); } - let mut seen_domains_after_named = seen_domains.clone(); + let mut found_domains_after_named = found_domains.clone(); if let Some(named_imports) = self.importer.named_imports(self.db, target) { for (&name, import) in named_imports { if !is_use_visible(self.db, ref_scope, import.use_) { continue; } - let seen_domain = seen_domains.get(&name).copied().unwrap_or_default(); + let seen_domain = found_domains.get(&name).copied().unwrap_or_default(); for res in import.binding.iter() { if (seen_domain & res.domain as u8 != 0) - || !seen_scope.insert((name, res.scope)) + || !found_scopes.insert((name, res.scope)) { continue; } - *seen_domains_after_named.entry(name).or_default() |= res.domain as u8; + *found_domains_after_named.entry(name).or_default() |= res.domain as u8; res_collection.entry(name).or_default().push(res.clone()); } @@ -359,13 +357,13 @@ impl<'db, 'a> NameResolver<'db, 'a> { } for res in res_for_name.iter() { - let seen_domain = seen_domains_after_named + let seen_domain = found_domains_after_named .get(&name) .copied() .unwrap_or_default(); if (seen_domain & res.domain as u8 != 0) - || !seen_scope.insert((name, res.scope)) + || !found_scopes.insert((name, res.scope)) { continue; } @@ -381,7 +379,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// Finalize the query result and cache it to the cache store. fn finalize_query_result( &mut self, - scope: ScopeId, query: NameQuery, resolved_set: NameBinding, ) -> Result { @@ -390,7 +387,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } else { Ok(resolved_set) }; - self.cache_store.cache_result(scope, query, result.clone()); + self.cache_store.cache_result(query, result.clone()); result } @@ -440,8 +437,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { }; let scope = pred.scope; - let query = NameQuery::new(seg); - let resolved_set = match self.resolve_query(scope, query) { + let query = NameQuery::new(seg, scope); + let resolved_set = match self.resolve_query(query) { Ok(resolved) => resolved, Err(NameResolutionError::NotFound) if pred.is_type(self.db) => { // If the parent scope of the current segment is a type and the segment is not @@ -468,7 +465,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { &mut self, target: &mut NameBinding, from: &NameBinding, - scope: ScopeId, query: NameQuery, ) -> Result<(), NameResolutionError> { if target @@ -478,8 +474,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { Ok(()) } else { let err = NameResolutionError::Ambiguous; - self.cache_store - .cache_result(scope, query, Err(err.clone())); + self.cache_store.cache_result(query, Err(err.clone())); Err(err) } } @@ -488,15 +483,13 @@ impl<'db, 'a> NameResolver<'db, 'a> { &mut self, target: &mut NameBinding, res: &NameRes, - scope: ScopeId, query: NameQuery, ) -> Result<(), NameResolutionError> { if target.push(res).is_none() { Ok(()) } else { let err = NameResolutionError::Ambiguous; - self.cache_store - .cache_result(scope, query, Err(err.clone())); + self.cache_store.cache_result(query, Err(err.clone())); Err(err) } } @@ -504,27 +497,43 @@ impl<'db, 'a> NameResolver<'db, 'a> { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct NameQuery { + /// The name to be resolved. name: IdentId, + /// The scope where the name is resolved. + scope: ScopeId, directive: QueryDirective, } impl NameQuery { /// Create a new name query with the default query directive. - pub fn new(name: IdentId) -> Self { + pub fn new(name: IdentId, scope: ScopeId) -> Self { Self { name, + scope, directive: Default::default(), } } /// Create a new name query with the given query directive. - pub fn with_directive(name: IdentId, directive: QueryDirective) -> Self { - Self { name, directive } + pub fn with_directive(name: IdentId, scope: ScopeId, directive: QueryDirective) -> Self { + Self { + name, + scope, + directive, + } } pub fn name(&self) -> IdentId { self.name } + + fn clone_with_scope(self, scope: ScopeId) -> Self { + Self { + name: self.name, + scope, + directive: self.directive, + } + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct QueryDirective { @@ -944,29 +953,20 @@ impl std::error::Error for NameResolutionError {} #[derive(Default)] struct ResolvedQueryCacheStore { - cache: FxHashMap<(ScopeId, NameQuery), Result>, + cache: FxHashMap>, no_cache: bool, } impl ResolvedQueryCacheStore { - fn cache_result( - &mut self, - scope: ScopeId, - query: NameQuery, - result: Result, - ) { + fn cache_result(&mut self, query: NameQuery, result: Result) { if self.no_cache { return; } - self.cache.insert((scope, query), result); + self.cache.insert(query, result); } - fn get( - &self, - scope: ScopeId, - query: NameQuery, - ) -> Option<&Result> { - self.cache.get(&(scope, query)) + fn get(&self, query: NameQuery) -> Option<&Result> { + self.cache.get(&query) } } From 7b1082f02dd002e756e39618647d44d59a368d8b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 20 May 2023 18:35:54 +0200 Subject: [PATCH 164/678] Add `PrimTy` to HIR definition --- .../src/name_resolution/diagnostics.rs | 35 +-- .../src/name_resolution/import_resolver.rs | 97 ++++--- .../src/name_resolution/name_resolver.rs | 268 ++++++++---------- crates/hir/src/hir_def/mod.rs | 1 + crates/hir/src/hir_def/prim_ty.rs | 82 ++++++ 5 files changed, 277 insertions(+), 206 deletions(-) create mode 100644 crates/hir/src/hir_def/prim_ty.rs diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 3f71a216ae..b0786f13e7 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -8,6 +8,8 @@ use hir::{ HirDb, }; +use crate::HirAnalysisDb; + use super::name_resolver::NameRes; #[salsa::accumulator] @@ -32,8 +34,10 @@ impl ImportError { Self::new(span, ImportErrorKind::NotFound(ident)) } - pub fn invisible(span: DynLazySpan, resolved: NameRes) -> Self { - Self::new(span, ImportErrorKind::Invisible(resolved)) + pub fn invisible(db: &dyn HirAnalysisDb, span: DynLazySpan, resolved: NameRes) -> Self { + let name = resolved.kind.name(db).unwrap(); + let name_span = resolved.kind.name_span(db); + Self::new(span, ImportErrorKind::Invisible(name, name_span)) } pub fn ambiguous(span: DynLazySpan, ident: IdentId) -> Self { @@ -69,7 +73,7 @@ pub enum ImportErrorKind { NotFound(IdentId), /// The import path segment is not visible. - Invisible(NameRes), + Invisible(IdentId, Option), /// The import path segment is ambiguous. Ambiguous(IdentId), @@ -93,11 +97,8 @@ impl ImportErrorKind { match self { ImportErrorKind::Conflict(_) => "import conflicts with another import".to_string(), ImportErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), - ImportErrorKind::Invisible(resolved) => { - format!( - "{} is not visible", - resolved.scope.name(db).unwrap().data(db) - ) + ImportErrorKind::Invisible(name, _) => { + format!("{} is not visible", name.data(db),) } ImportErrorKind::Ambiguous(name) => format!("{} is ambiguous", name.data(db)), } @@ -113,14 +114,16 @@ impl ImportErrorKind { ImportErrorKind::NotFound(_) | ImportErrorKind::Ambiguous(_) => vec![], - ImportErrorKind::Invisible(resolved) => { - let span = resolved.scope.name_span(db.as_hir_db()).unwrap(); - vec![SubDiagnostic::new( - Severity::Note, - "not visible because of this declaration".to_string(), - span.resolve(db), - )] - } + ImportErrorKind::Invisible(_, span) => span + .as_ref() + .map(|span| { + vec![SubDiagnostic::new( + Severity::Note, + "not visible because of this declaration".to_string(), + span.resolve(db), + )] + }) + .unwrap_or_default(), } } } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 978eb71da8..961b9e8821 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -15,8 +15,8 @@ use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; use super::{ diagnostics::ImportError, name_resolver::{ - NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResolutionError, - NameResolver, QueryDirective, + NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, + NameResolutionError, NameResolver, QueryDirective, }, }; @@ -203,7 +203,10 @@ impl<'db> ImportResolver<'db> { } }; - let target_scope = base_path_resolved.current_scope(); + let Some(target_scope) = base_path_resolved.current_scope() else { + return (None, true); + }; + let original_scope = base_path_resolved.original_scope; let use_ = base_path_resolved.use_; @@ -321,8 +324,12 @@ impl<'db> ImportResolver<'db> { // anymore. // We don't need to report the error here because the parser should have already // reported it. - let Some(query) = self.make_query(i_use) else { - return None; + let query = match self.make_query(i_use) { + Ok(query) => query, + Err(err) => { + self.register_error(i_use, err); + return None; + } }; let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); @@ -343,7 +350,7 @@ impl<'db> ImportResolver<'db> { return Some(IUseResolution::Full(resolved)); } - if resolved.contains_external_ingot(self.db, i_use) || resolved.contains_glob_imported() { + if resolved.contains_external(self.db, i_use) || resolved.contains_glob_imported() { self.suspicious_imports.insert(i_use.use_); } @@ -363,7 +370,7 @@ impl<'db> ImportResolver<'db> { for &use_ in &s_graph.unresolved_uses { let i_use = IntermediateUse::new(self.db, use_); self.intermediate_uses - .entry(i_use.current_scope()) + .entry(i_use.original_scope) .or_default() .push_back(i_use); } @@ -418,7 +425,7 @@ impl<'db> ImportResolver<'db> { fn verify_ambiguity(&mut self, use_: Use) { let i_use = IntermediateUse::new(self.db, use_); let first_segment_ident = i_use.current_segment_ident(self.db).unwrap(); - let scope = i_use.current_scope(); + let scope = i_use.original_scope; let ingot = scope.ingot(self.db.as_hir_db()); // The ambiguity in the first segment possibly occurs when the segment is @@ -475,7 +482,7 @@ impl<'db> ImportResolver<'db> { // This ambiguity can be detected by the normal shadowing rules , so it can be // verified by calling `resolve_base_path`. // - // The ambiguity about the final segment of the PATH can be verified during the + // The ambiguity about the final segment of the path can be verified during the // fixed point calculation, so verification is not necessary. self.resolve_base_path(i_use); } @@ -514,9 +521,15 @@ impl<'db> ImportResolver<'db> { /// Makes a query for the current segment of the intermediate use to be /// resolved. - fn make_query(&self, i_use: &IntermediateUse) -> Option { - let seg_name = i_use.current_segment_ident(self.db)?; + fn make_query(&self, i_use: &IntermediateUse) -> Result { + let Some(seg_name) = i_use.current_segment_ident(self.db) else { + return Err(NameResolutionError::Invalid); + }; + let mut directive = QueryDirective::new(); + let Some(current_scope) = i_use.current_scope() else { + return Err(NameResolutionError::NotFound); + }; // In the middle of the use path, disallow lexically scoped names and // external names. @@ -524,11 +537,7 @@ impl<'db> ImportResolver<'db> { directive.disallow_lex().disallow_external(); } - if self.does_named_import_exist_for( - seg_name, - i_use.current_scope(), - i_use.is_first_segment(), - ) { + if self.does_named_import_exist_for(seg_name, current_scope, i_use.is_first_segment()) { directive.disallow_glob().disallow_external(); } @@ -536,15 +545,15 @@ impl<'db> ImportResolver<'db> { directive.add_domain(NameDomain::Value); } - Some(NameQuery::with_directive( + Ok(NameQuery::with_directive( seg_name, - i_use.current_scope(), + current_scope, directive, )) } /// Returns `true` if there is an unresolved named import for the given name - /// in the given scope or its lexical parent scope. + /// in the given scope or its lexical parents(if `allow_lex` is `true`). fn does_named_import_exist_for(&self, name: IdentId, scope: ScopeId, allow_lex: bool) -> bool { let mut current_scope = Some(scope); @@ -585,9 +594,12 @@ impl<'db> ImportResolver<'db> { ScopeState::Semi } - /// Returns `true` if the `i_use` can be proceed further in + /// Returns `true` if the next segment of the intermediate use is + /// deterministically resolvable. fn is_decidable(&self, i_use: &IntermediateUse) -> bool { - let target_scope = i_use.current_scope(); + let Some(target_scope) = i_use.current_scope() else { + return true; + }; if i_use.is_first_segment() { let mut target_scope = Some(target_scope); @@ -714,7 +726,7 @@ struct IntermediateUse { use_: Use, current_res: Option, original_scope: ScopeId, - resolved_until: usize, + unresolved_from: usize, } impl IntermediateUse { @@ -726,16 +738,19 @@ impl IntermediateUse { use_, current_res: None, original_scope: scope, - resolved_until: 0, + unresolved_from: 0, } } - /// Returns the scope that this intermediate use is contained. - fn current_scope(&self) -> ScopeId { + /// Returns the scope that the current resolution is pointed to. + fn current_scope(&self) -> Option { if let Some(current_res) = self.current_res.as_ref() { - current_res.scope + match current_res.kind { + NameResKind::Scope(scope) => Some(scope), + NameResKind::Prim(_) => None, + } } else { - self.original_scope + self.original_scope.into() } } @@ -756,7 +771,7 @@ impl IntermediateUse { use_: self.use_, current_res, original_scope: self.original_scope, - resolved_until: self.resolved_until + 1, + unresolved_from: self.unresolved_from + 1, } } @@ -765,7 +780,7 @@ impl IntermediateUse { self.use_ .lazy_span() .path() - .segment(self.resolved_until) + .segment(self.unresolved_from) .into() } @@ -776,7 +791,7 @@ impl IntermediateUse { .to_opt()? .segments(db.as_hir_db()); - let seg_idx = self.resolved_until; + let seg_idx = self.unresolved_from; let segment = segments[seg_idx].to_opt()?; segment.ident() } @@ -792,8 +807,10 @@ impl IntermediateUse { .map(|p| p.segment_len(db.as_hir_db())) } + /// Returns `true` if the segment that should be resolved next is the first + /// segment. fn is_first_segment(&self) -> bool { - self.resolved_until == 0 + self.unresolved_from == 0 } /// Returns `true` if the use path except the last segment is fully @@ -803,7 +820,7 @@ impl IntermediateUse { return false; }; - self.resolved_until + 1 == segment_len + self.unresolved_from + 1 == segment_len } } @@ -947,13 +964,19 @@ fn resolved_imports_for_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> &Resolv } impl NameBinding { - fn contains_external_ingot(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { - let current_ingot = i_use.current_scope().ingot(db.as_hir_db()); - self.resolutions - .values() - .any(|r| r.scope.ingot(db.as_hir_db()) != current_ingot) + /// Returns true if the binding contains an resolution that is not in the + /// same ingot as the current resolution of the `i_use`. + fn contains_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { + let Some(current_ingot) = i_use.current_scope().map(|scope| scope.ingot(db.as_hir_db())) else { + return false; + }; + self.resolutions.values().any(|r| match r.kind { + NameResKind::Scope(scope) => scope.ingot(db.as_hir_db()) != current_ingot, + NameResKind::Prim(_) => true, + }) } + /// Returns true if the binding contains a glob import. fn contains_glob_imported(&self) -> bool { self.resolutions .values() diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 0faeecf057..d8e24d0a4d 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -7,6 +7,7 @@ use std::{ use either::Either; use hir::{ hir_def::{ + prim_ty::PrimTy, scope_graph::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, ScopeKind, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, @@ -87,10 +88,6 @@ impl PathResolutionError { Self { kind, failed_at } } - fn not_found(failed_at: usize) -> Self { - Self::new(NameResolutionError::NotFound, failed_at) - } - fn invalid(failed_at: usize) -> Self { Self::new(NameResolutionError::Invalid, failed_at) } @@ -206,7 +203,11 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 4. Look for the name in the lexical scope if it exists. if let Some(parent) = parent { - match self.resolve_query(query.clone_with_scope(parent)) { + let mut query_for_parent = query; + query_for_parent.scope = parent; + query_for_parent.directive.disallow_external(); + + match self.resolve_query(query_for_parent) { Ok(mut resolved) => { resolved.lexed(); self.try_merge(&mut binding, &resolved, query)?; @@ -244,10 +245,11 @@ impl<'db, 'a> NameResolver<'db, 'a> { } // 6. Look for the name in the builtin types. - // todo: Add db.builtin_scopes() and use it here. - // if let Some(builtin) = BuiltinName::lookup_for(query.name) { - // resolved_set.push_name(ResolvedName::new_builtin(builtin, - // builtin.domain())) }; + for &prim in PrimTy::all_types() { + if query.name == prim.name() { + binding.push(&NameRes::new_prim(prim)); + } + } self.finalize_query_result(query, binding) } @@ -267,7 +269,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// - The function is used for glob imports, so it's necessary to return /// monotonously increasing results. Also, we can't arbitrarily choose the /// possible resolution from multiple candidates to avoid hiding - /// ambiguity. That's why we can't use `NameBinding` and + /// ambiguity. That's also the reason why we can't use `NameBinding` and /// `NameBinding::merge` in this function. /// /// The below examples demonstrates the second point. @@ -300,7 +302,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { ) -> FxHashMap> { let mut res_collection: FxHashMap> = FxHashMap::default(); let mut found_domains: FxHashMap = FxHashMap::default(); - let mut found_scopes: FxHashSet<(IdentId, ScopeId)> = FxHashSet::default(); + let mut found_kinds: FxHashSet<(IdentId, NameResKind)> = FxHashSet::default(); for edge in target.edges(self.db.as_hir_db()) { let scope = match edge.kind.propagate_glob(directive) { @@ -311,7 +313,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { }; let name = scope.name(self.db.as_hir_db()).unwrap(); - if !found_scopes.insert((name, scope)) { + if !found_kinds.insert((name, scope.into())) { continue; } let res = NameRes::new_scope( @@ -331,10 +333,10 @@ impl<'db, 'a> NameResolver<'db, 'a> { continue; } - let seen_domain = found_domains.get(&name).copied().unwrap_or_default(); + let found_domain = found_domains.get(&name).copied().unwrap_or_default(); for res in import.binding.iter() { - if (seen_domain & res.domain as u8 != 0) - || !found_scopes.insert((name, res.scope)) + if (found_domain & res.domain as u8 != 0) + || !found_kinds.insert((name, res.kind)) { continue; } @@ -363,7 +365,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { .unwrap_or_default(); if (seen_domain & res.domain as u8 != 0) - || !found_scopes.insert((name, res.scope)) + || !found_kinds.insert((name, res.kind)) { continue; } @@ -427,38 +429,39 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// resolved. fn resolve_segment( &mut self, - pred: NameRes, - segment: Partial, - seg_idx: usize, - is_last: bool, + _pred: NameRes, + _segment: Partial, + _seg_idx: usize, + _is_last: bool, ) -> Result, PathResolutionError> { - let Partial::Present(seg) = segment else { - return Err(PathResolutionError::invalid(seg_idx)); - }; - - let scope = pred.scope; - let query = NameQuery::new(seg, scope); - let resolved_set = match self.resolve_query(query) { - Ok(resolved) => resolved, - Err(NameResolutionError::NotFound) if pred.is_type(self.db) => { - // If the parent scope of the current segment is a type and the segment is not - // found, then it should be resolved in the trait solving phase. - return Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); - } - Err(e) => { - return Err(PathResolutionError::new(e, seg_idx)); - } - }; - - if is_last { - Ok(Either::Left(ResolvedPath::Full(resolved_set))) - } else if resolved_set.len() > 1 { - // Case a. is already handled above. - // Handles case b. here. - return Err(PathResolutionError::not_found(seg_idx)); - } else { - Ok(Either::Right(resolved_set.into_iter().next().unwrap())) - } + todo!() + // let Partial::Present(seg) = segment else { + // return Err(PathResolutionError::invalid(seg_idx)); + // }; + + // let scope = pred.scope; + // let query = NameQuery::new(seg, scope); + // let resolved_set = match self.resolve_query(query) { + // Ok(resolved) => resolved, + // Err(NameResolutionError::NotFound) if pred.is_type(self.db) => { + // // If the parent scope of the current segment is a type and + // the segment is not // found, then it should be + // resolved in the trait solving phase. return + // Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); } + // Err(e) => { + // return Err(PathResolutionError::new(e, seg_idx)); + // } + // }; + + // if is_last { + // Ok(Either::Left(ResolvedPath::Full(resolved_set))) + // } else if resolved_set.len() > 1 { + // // Case a. is already handled above. + // // Handles case b. here. + // return Err(PathResolutionError::not_found(seg_idx)); + // } else { + // Ok(Either::Right(resolved_set.into_iter().next().unwrap())) + // } } fn try_merge( @@ -526,14 +529,6 @@ impl NameQuery { pub fn name(&self) -> IdentId { self.name } - - fn clone_with_scope(self, scope: ScopeId) -> Self { - Self { - name: self.name, - scope, - directive: self.directive, - } - } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct QueryDirective { @@ -665,22 +660,25 @@ impl NameBinding { } } - /// Push the `res` into the set. If name conflict happens, the old - /// resolution will be returned, otherwise `None` will be returned. + /// Push the `res` into the set. fn push(&mut self, res: &NameRes) -> Option { let domain = res.domain; match self.resolutions.entry(domain) { Entry::Occupied(mut e) => { let old_derivation = e.get().derivation.clone(); - if old_derivation < res.derivation { - e.insert(res.clone()); - None - } else if res.derivation < old_derivation { - None - } else { - let old = e.get().clone(); - e.insert(res.clone()); - Some(old) + match res.derivation.cmp(&old_derivation) { + cmp::Ordering::Less => None, + cmp::Ordering::Equal => { + if e.get().kind == res.kind { + None + } else { + Some(res.clone()) + } + } + cmp::Ordering::Greater => { + e.insert(res.clone()); + None + } } } @@ -723,20 +721,26 @@ impl From for NameBinding { #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct NameRes { - pub scope: ScopeId, + pub kind: NameResKind, pub domain: NameDomain, pub derivation: NameDerivation, } impl NameRes { pub fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { - self.scope.is_type(db.as_hir_db()) + match self.kind { + NameResKind::Prim(_) => true, + NameResKind::Scope(scope) => scope.is_type(db.as_hir_db()), + } } pub fn is_visible(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> bool { let scope_or_use = match self.derivation { - NameDerivation::Def | NameDerivation::Builtin | NameDerivation::External => { - Either::Left(self.scope) + NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { + match self.kind { + NameResKind::Scope(scope) => Either::Left(scope), + NameResKind::Prim(_) => return true, + } } NameDerivation::NamedImported(use_) | NameDerivation::GlobImported(use_) => { Either::Right(use_) @@ -763,8 +767,8 @@ impl NameRes { pub(super) fn derived_from(&self, db: &dyn HirAnalysisDb) -> Option { match self.derivation { - NameDerivation::Def | NameDerivation::Builtin | NameDerivation::External => { - self.scope.name_span(db.as_hir_db()) + NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { + self.kind.name_span(db) } NameDerivation::NamedImported(use_) => use_.imported_name_span(db.as_hir_db()), NameDerivation::GlobImported(use_) => use_.glob_span(db.as_hir_db()), @@ -784,11 +788,41 @@ impl NameRes { fn new_scope(scope: ScopeId, domain: NameDomain, derivation: NameDerivation) -> Self { Self { - scope, + kind: scope.into(), derivation, domain, } } + + fn new_prim(prim: PrimTy) -> Self { + Self { + kind: prim.into(), + derivation: NameDerivation::Prim, + domain: NameDomain::Item, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, derive_more::From)] +pub enum NameResKind { + Scope(ScopeId), + Prim(PrimTy), +} + +impl NameResKind { + pub fn name_span(self, db: &dyn HirAnalysisDb) -> Option { + match self { + NameResKind::Scope(scope) => scope.name_span(db.as_hir_db()), + NameResKind::Prim(_) => None, + } + } + + pub fn name(self, db: &dyn HirAnalysisDb) -> Option { + match self { + NameResKind::Scope(scope) => scope.name(db.as_hir_db()), + NameResKind::Prim(prim) => prim.name().into(), + } + } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -798,7 +832,7 @@ pub enum NameDerivation { GlobImported(Use), Lex(Box), External, - Builtin, + Prim, } impl NameDerivation { @@ -835,11 +869,17 @@ impl PartialOrd for NameDerivation { (NameDerivation::External, _) => Some(cmp::Ordering::Greater), (_, NameDerivation::External) => Some(cmp::Ordering::Less), - (NameDerivation::Builtin, NameDerivation::Builtin) => Some(cmp::Ordering::Equal), + (NameDerivation::Prim, NameDerivation::Prim) => Some(cmp::Ordering::Equal), } } } +impl Ord for NameDerivation { + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.partial_cmp(other).unwrap() + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum NameResolutionError { /// The name is not found. @@ -873,84 +913,6 @@ impl fmt::Display for NameResolutionError { impl std::error::Error for NameResolutionError {} -// #[derive(Clone, Debug, Copy, PartialEq, Eq, Hash)] -// pub enum BuiltinName { -// Bool, -// U8, -// U16, -// U32, -// U64, -// U128, -// U256, -// I8, -// I16, -// I32, -// I64, -// I128, -// I256, -// } -// -// impl BuiltinName { -// /// Returns the builtin name if the `name` is a builtin name. -// pub fn lookup_for(name: IdentId) -> Option { -// match name { -// kw::BOOL => Self::Bool, -// kw::U8 => Self::U8, -// kw::U16 => Self::U16, -// kw::U32 => Self::U32, -// kw::U64 => Self::U64, -// kw::U128 => Self::U128, -// kw::U256 => Self::U256, -// kw::I8 => Self::I8, -// kw::I16 => Self::I16, -// kw::I32 => Self::I32, -// kw::I64 => Self::I64, -// kw::I128 => Self::I128, -// kw::I256 => Self::I256, -// _ => return None, -// } -// .into() -// } -// -// pub fn domain(self) -> NameDomain { -// // Currently all builtin belong to the item domain. -// match self { -// Self::Bool -// | Self::U8 -// | Self::U16 -// | Self::U32 -// | Self::U64 -// | Self::U128 -// | Self::U256 -// | Self::I8 -// | Self::I16 -// | Self::I32 -// | Self::I64 -// | Self::I128 -// | Self::I256 => NameDomain::Item, -// } -// } -// -// pub fn is_type(self) -> bool { -// // Currently all builtin names are types. -// match self { -// Self::Bool -// | Self::U8 -// | Self::U16 -// | Self::U32 -// | Self::U64 -// | Self::U128 -// | Self::U256 -// | Self::I8 -// | Self::I16 -// | Self::I32 -// | Self::I64 -// | Self::I128 -// | Self::I256 => true, -// } -// } -// } - #[derive(Default)] struct ResolvedQueryCacheStore { cache: FxHashMap>, diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index af5e440164..f93f0eca42 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -6,6 +6,7 @@ pub mod item; pub mod params; pub mod pat; pub mod path; +pub mod prim_ty; pub mod scope_graph; pub mod stmt; pub mod types; diff --git a/crates/hir/src/hir_def/prim_ty.rs b/crates/hir/src/hir_def/prim_ty.rs new file mode 100644 index 0000000000..392c762588 --- /dev/null +++ b/crates/hir/src/hir_def/prim_ty.rs @@ -0,0 +1,82 @@ +use super::{kw, IdentId}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum PrimTy { + Bool, + Int(IntTy), + Uint(UintTy), +} + +impl PrimTy { + pub fn name(self) -> IdentId { + match self { + PrimTy::Bool => kw::BOOL, + PrimTy::Int(ty) => ty.name(), + PrimTy::Uint(ty) => ty.name(), + } + } + + pub fn all_types() -> &'static [PrimTy] { + &[ + PrimTy::Bool, + PrimTy::Int(IntTy::I8), + PrimTy::Int(IntTy::I16), + PrimTy::Int(IntTy::I32), + PrimTy::Int(IntTy::I64), + PrimTy::Int(IntTy::I128), + PrimTy::Int(IntTy::I256), + PrimTy::Uint(UintTy::U8), + PrimTy::Uint(UintTy::U16), + PrimTy::Uint(UintTy::U32), + PrimTy::Uint(UintTy::U64), + PrimTy::Uint(UintTy::U128), + PrimTy::Uint(UintTy::U256), + ] + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum IntTy { + I8, + I16, + I32, + I64, + I128, + I256, +} + +impl IntTy { + pub fn name(self) -> IdentId { + match self { + IntTy::I8 => kw::I8, + IntTy::I16 => kw::I16, + IntTy::I32 => kw::I32, + IntTy::I64 => kw::I64, + IntTy::I128 => kw::I128, + IntTy::I256 => kw::I256, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum UintTy { + U8, + U16, + U32, + U64, + U128, + U256, +} + +impl UintTy { + pub fn name(self) -> IdentId { + match self { + UintTy::U8 => kw::U8, + UintTy::U16 => kw::U16, + UintTy::U32 => kw::U32, + UintTy::U64 => kw::U64, + UintTy::U128 => kw::U128, + UintTy::U256 => kw::U256, + } + } +} From 0c5d073c22f2f8f37de7bd95fc5abc7c509f573b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 20 May 2023 23:28:24 +0200 Subject: [PATCH 165/678] Remove unnecessary duplication of `IngotId` and clean up db cast --- crates/common2/src/lib.rs | 6 +- crates/hir-analysis/src/lib.rs | 4 +- .../src/name_resolution/import_resolver.rs | 1 + .../src/name_resolution/visibility_checker.rs | 2 +- crates/hir/src/hir_def/module_tree.rs | 30 ++++++---- crates/hir/src/hir_def/scope_graph.rs | 15 +++-- crates/hir/src/lib.rs | 31 +++++------ crates/hir/src/lower/mod.rs | 10 ++-- crates/hir/src/lower/scope_builder.rs | 55 +++++++++++-------- crates/hir/src/span/expr.rs | 1 + crates/hir/src/span/item.rs | 1 + crates/hir/src/span/stmt.rs | 2 +- 12 files changed, 91 insertions(+), 67 deletions(-) diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs index 203c0ad41f..c2694c6967 100644 --- a/crates/common2/src/lib.rs +++ b/crates/common2/src/lib.rs @@ -6,5 +6,9 @@ pub use input::{InputFile, InputIngot}; #[salsa::jar(db = InputDb)] pub struct Jar(InputIngot, InputFile); -pub trait InputDb: salsa::DbWithJar {} +pub trait InputDb: salsa::DbWithJar { + fn as_input_db(&self) -> &dyn InputDb { + >::as_jar_db::<'_>(self) + } +} impl InputDb for DB where DB: ?Sized + salsa::DbWithJar {} diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 5ed46f1cd6..424af1508e 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -7,8 +7,8 @@ pub struct Jar( ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { - fn as_hir_db(&self) -> &dyn HirDb { - >::as_jar_db::<'_>(self) + fn as_hir_analysis_db(&self) -> &dyn HirAnalysisDb { + >::as_jar_db::<'_>(self) } } impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + HirDb {} diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 961b9e8821..11ca618dc0 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -824,6 +824,7 @@ impl IntermediateUse { } } +#[derive(Debug, Clone, PartialEq, Eq)] enum IUseResolution { /// The all segments are resolved. Full(NameBinding), diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index 6aad8fa021..7b2e7fb825 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -16,7 +16,7 @@ pub fn is_scope_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, target_scope return true; } - let Some(def_scope) = (if matches!(ref_scope.kind(db.as_hir_db()), ScopeKind::Field(_)) { + let Some(def_scope) = (if matches!(target_scope.kind(db.as_hir_db()), ScopeKind::Field(_) | ScopeKind::Variant(_)) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. target_scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index e28be7a98d..2a436fe650 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -6,7 +6,7 @@ use cranelift_entity::{entity_impl, PrimaryMap}; use crate::{lower::map_file_to_mod_impl, HirDb}; -use super::{IdentId, TopLevelMod}; +use super::{IdentId, IngotId, TopLevelMod}; /// This tree represents the structure of an ingot. /// Internal modules are not included in this tree, instead, they are included @@ -60,7 +60,7 @@ pub struct ModuleTree { pub(crate) module_tree: PrimaryMap, pub(crate) mod_map: BTreeMap, - pub(crate) ingot: InputIngot, + pub ingot: IngotId, } impl ModuleTree { @@ -113,7 +113,7 @@ impl ModuleTree { /// top level modules. This function only depends on an ingot structure and /// external ingot dependency, and not depends on file contents. #[salsa::tracked(return_ref)] -pub fn module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> ModuleTree { +pub(crate) fn module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> ModuleTree { ModuleTreeBuilder::new(db, ingot).build() } @@ -150,7 +150,8 @@ entity_impl!(ModuleTreeNodeId); struct ModuleTreeBuilder<'db> { db: &'db dyn HirDb, - ingot: InputIngot, + input_ingot: InputIngot, + ingot: IngotId, module_tree: PrimaryMap, mod_map: BTreeMap, path_map: BTreeMap<&'db Utf8Path, ModuleTreeNodeId>, @@ -160,7 +161,8 @@ impl<'db> ModuleTreeBuilder<'db> { fn new(db: &'db dyn HirDb, ingot: InputIngot) -> Self { Self { db, - ingot, + input_ingot: ingot, + ingot: IngotId::new(db, ingot), module_tree: PrimaryMap::default(), mod_map: BTreeMap::default(), path_map: BTreeMap::default(), @@ -171,7 +173,11 @@ impl<'db> ModuleTreeBuilder<'db> { self.set_modules(); self.build_tree(); - let root_mod = map_file_to_mod_impl(self.db, self.ingot.root_file(self.db.as_input_db())); + let root_mod = map_file_to_mod_impl( + self.db, + self.ingot, + self.input_ingot.root_file(self.db.as_input_db()), + ); let root = self.mod_map[&root_mod]; ModuleTree { root, @@ -182,8 +188,8 @@ impl<'db> ModuleTreeBuilder<'db> { } fn set_modules(&mut self) { - for &file in self.ingot.files(self.db.as_input_db()) { - let top_mod = map_file_to_mod_impl(self.db, file); + for &file in self.input_ingot.files(self.db.as_input_db()) { + let top_mod = map_file_to_mod_impl(self.db, self.ingot, file); let module_id = self.module_tree.push(ModuleTreeNode::new(top_mod)); self.path_map @@ -193,18 +199,18 @@ impl<'db> ModuleTreeBuilder<'db> { } fn build_tree(&mut self) { - let root = self.ingot.root_file(self.db.as_input_db()); + let root = self.input_ingot.root_file(self.db.as_input_db()); - for &child in self.ingot.files(self.db.as_input_db()) { + for &child in self.input_ingot.files(self.db.as_input_db()) { // Ignore the root file because it has no parent. if child == root { continue; } let root_path = root.path(self.db.as_input_db()); - let root_mod = map_file_to_mod_impl(self.db, root); + let root_mod = map_file_to_mod_impl(self.db, self.ingot, root); let child_path = child.path(self.db.as_input_db()); - let child_mod = map_file_to_mod_impl(self.db, child); + let child_mod = map_file_to_mod_impl(self.db, self.ingot, child); // If the file is in the same directory as the root file, the file is a direct // child of the root. diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 3db546e9d1..ba4fbcf3c9 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -192,11 +192,16 @@ impl ScopeId { } pub fn parent(self, db: &dyn HirDb) -> Option { - self.data(db) - .edges - .iter() - .find(|e| matches!(e.kind, EdgeKind::Lex(_) | EdgeKind::Super(_))) - .map(|e| e.dest) + let mut super_dest = None; + for edge in self.edges(db) { + if let EdgeKind::Lex(_) = edge.kind { + return Some(edge.dest); + } + if let EdgeKind::Super(_) = edge.kind { + super_dest = Some(edge.dest); + } + } + super_dest } pub fn lex_parent(self, db: &dyn HirDb) -> Option { diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index e1fa8a56e3..50ff240d10 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -84,16 +84,8 @@ pub trait HirDb: salsa::DbWithJar + InputDb { IdentId::prefill(self) } - fn as_input_db(&self) -> &dyn InputDb { - >::as_jar_db::<'_>(self) - } -} -impl HirDb for DB -where - DB: ?Sized + salsa::DbWithJar + InputDb, -{ - fn as_input_db(&self) -> &dyn InputDb { - >::as_jar_db::<'_>(self) + fn as_hir_db(&self) -> &dyn HirDb { + >::as_jar_db::<'_>(self) } } @@ -103,8 +95,11 @@ where /// implementations relying on `LowerHirDb` are prohibited in all /// Analysis phases. pub trait LowerHirDb: HirDb { - fn as_hir_db(&self) -> &dyn HirDb { - >::as_jar_db::<'_>(self) + fn as_lower_hir_db(&self) -> &dyn LowerHirDb + where + Self: Sized, + { + self } } @@ -120,8 +115,11 @@ pub trait LowerHirDb: HirDb { /// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). /// See also `[LazySpan]`[`crate::span::LazySpan`] for more details. pub trait SpannedHirDb: HirDb { - fn as_hir_db(&self) -> &dyn HirDb { - >::as_jar_db::<'_>(self) + fn as_spanned_hir_db(&self) -> &dyn SpannedHirDb + where + Self: Sized, + { + self } } @@ -156,6 +154,7 @@ mod test_db { db } } + impl HirDb for TestDb {} impl SpannedHirDb for TestDb {} impl LowerHirDb for TestDb {} impl salsa::Database for TestDb { @@ -172,10 +171,6 @@ mod test_db { } impl TestDb { - pub fn as_hir_db(&self) -> &dyn HirDb { - >::as_jar_db::<'_>(self) - } - pub fn parse_source(&mut self, text: &str) -> &ScopeGraph { let file = self.standalone_file(text); let top_mod = map_file_to_mod(self, file); diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index c303bb6e10..18300715e7 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -37,9 +37,10 @@ mod use_tree; /// Maps the given file to a top-level module. /// This function just maps the file to a top-level module, and doesn't perform /// any parsing or lowering. -/// To perform the actual lowering, use `module_item_tree` function. +/// To perform the actual lowering, use [`scope_graph`] instead. pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { - map_file_to_mod_impl(db.as_hir_db(), file) + let ingot = module_tree_impl(db.as_hir_db(), file.ingot(db.as_input_db())).ingot; + map_file_to_mod_impl(db.as_hir_db(), ingot, file) } /// Returns the item tree of the given top-level module. @@ -71,12 +72,11 @@ pub fn module_tree(db: &dyn LowerHirDb, ingot: InputIngot) -> &ModuleTree { } #[salsa::tracked] -pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, file: InputFile) -> TopLevelMod { +pub(crate) fn map_file_to_mod_impl(db: &dyn HirDb, ingot: IngotId, file: InputFile) -> TopLevelMod { let path = file.path(db.as_input_db()); let name = path.file_stem().unwrap(); let mod_name = IdentId::new(db, name.to_string()); - let ingot = file.ingot(db.as_input_db()); - TopLevelMod::new(db, mod_name, IngotId::new(db, ingot), file) + TopLevelMod::new(db, mod_name, ingot, file) } #[salsa::tracked(return_ref)] diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index e656ffa6a7..9df8fa3e3d 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -58,8 +58,7 @@ impl<'db> ScopeGraphBuilder<'db> { use ItemKind::*; let item_scope = self.scope_stack.pop().unwrap(); - self.graph.scopes[item_scope].kind = ScopeKind::Item(item); - self.graph.item_map.insert(item, item_scope); + self.initialize_item_scope(item_scope, item); if let ItemKind::TopMod(top_mod) = item { debug_assert!(self.scope_stack.is_empty()); @@ -77,8 +76,8 @@ impl<'db> ScopeGraphBuilder<'db> { } if let Some(parent) = top_mod.parent(self.db) { - let parent_edge = EdgeKind::super_(); - self.add_global_edge(item_scope, parent, parent_edge); + let edge = EdgeKind::super_(); + self.add_global_edge(item_scope, parent, edge); } self.module_stack.pop().unwrap(); @@ -144,7 +143,8 @@ impl<'db> ScopeGraphBuilder<'db> { Enum(inner) => { self.add_lex_edge(item_scope, parent_scope); - self.add_variant_scope(item_scope, inner.variants(self.db)); + let vis = inner.vis(self.db); + self.add_variant_scope(item_scope, vis, inner.variants(self.db)); self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); inner .name(self.db) @@ -215,54 +215,65 @@ impl<'db> ScopeGraphBuilder<'db> { self.add_local_edge(parent_scope, item_scope, parent_to_child_edge); } - fn add_field_scope(&mut self, current_scope: LocalScopeId, fields: RecordFieldListId) { + fn initialize_item_scope(&mut self, scope: LocalScopeId, item: ItemKind) { + self.graph.scopes[scope].kind = ScopeKind::Item(item); + self.graph.scopes[scope].vis = item.vis(self.db); + self.graph.item_map.insert(item, scope); + } + + fn add_field_scope(&mut self, parent_scope: LocalScopeId, fields: RecordFieldListId) { for (i, field) in fields.data(self.db).iter().enumerate() { let scope = LocalScope::new( ScopeKind::Field(i), self.parent_module_id(), - Some(current_scope), + Some(parent_scope), field.vis, ); let field_scope = self.graph.scopes.push(scope); - self.add_lex_edge(field_scope, current_scope); + self.add_lex_edge(field_scope, parent_scope); let kind = field .name .to_opt() .map(EdgeKind::field) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, field_scope, kind) + self.add_local_edge(parent_scope, field_scope, kind) } } - fn add_variant_scope(&mut self, current_scope: LocalScopeId, variants: EnumVariantListId) { + fn add_variant_scope( + &mut self, + parent_scope: LocalScopeId, + parent_vis: Visibility, + variants: EnumVariantListId, + ) { for (i, field) in variants.data(self.db).iter().enumerate() { let scope = LocalScope::new( ScopeKind::Variant(i), self.parent_module_id(), - Some(current_scope), - Visibility::Public, + Some(parent_scope), + parent_vis, ); let variant_scope = self.graph.scopes.push(scope); - self.add_lex_edge(variant_scope, current_scope); + self.add_lex_edge(variant_scope, parent_scope); let kind = field .name .to_opt() .map(EdgeKind::variant) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, variant_scope, kind) + self.add_local_edge(parent_scope, variant_scope, kind) } } - fn add_func_param_scope(&mut self, current_scope: LocalScopeId, params: FnParamListId) { + fn add_func_param_scope(&mut self, parent_scope: LocalScopeId, params: FnParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { let scope = LocalScope::new( ScopeKind::FnParam(i), self.parent_module_id(), - Some(current_scope), + Some(parent_scope), Visibility::Private, ); let generic_param_scope = self.graph.scopes.push(scope); - self.add_lex_edge(generic_param_scope, current_scope); + self.add_lex_edge(generic_param_scope, parent_scope); let kind = param .name .to_opt() @@ -271,26 +282,26 @@ impl<'db> ScopeGraphBuilder<'db> { FnParamName::Underscore => EdgeKind::anon(), }) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, generic_param_scope, kind) + self.add_local_edge(parent_scope, generic_param_scope, kind) } } - fn add_generic_param_scope(&mut self, current_scope: LocalScopeId, params: GenericParamListId) { + fn add_generic_param_scope(&mut self, parent_scope: LocalScopeId, params: GenericParamListId) { for (i, param) in params.data(self.db).iter().enumerate() { let scope = LocalScope::new( ScopeKind::GenericParam(i), self.parent_module_id(), - Some(current_scope), + Some(parent_scope), Visibility::Private, ); let generic_param_scope = self.graph.scopes.push(scope); - self.add_lex_edge(generic_param_scope, current_scope); + self.add_lex_edge(generic_param_scope, parent_scope); let kind = param .name() .to_opt() .map(EdgeKind::generic_param) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(current_scope, generic_param_scope, kind) + self.add_local_edge(parent_scope, generic_param_scope, kind) } } diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 94cf9d7d19..68387f189c 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -181,6 +181,7 @@ mod tests { use crate::{ hir_def::{Body, Expr, Stmt}, test_db::TestDb, + HirDb, }; #[test] diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 856ba41174..d830665118 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -286,6 +286,7 @@ mod tests { use crate::{ hir_def::{Enum, Func, Mod, Struct, TypeAlias, Use}, test_db::TestDb, + HirDb, }; #[test] diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index dbf23c32db..e36bff1c8a 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -48,7 +48,7 @@ impl ChainInitiator for StmtRoot { #[cfg(test)] mod tests { - use crate::{hir_def::Body, test_db::TestDb}; + use crate::{hir_def::Body, test_db::TestDb, HirDb}; #[test] fn aug_assign() { From 96d793a30efdfd65433c722cc73b9357e64e6212 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 21 May 2023 10:35:51 +0200 Subject: [PATCH 166/678] Fix a bug in scope graph where `super` points to `self` module --- crates/hir/src/lower/scope_builder.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 9df8fa3e3d..2aa36ed376 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -87,6 +87,8 @@ impl<'db> ScopeGraphBuilder<'db> { let parent_scope = *self.scope_stack.last().unwrap(); let parent_to_child_edge = match item { Mod(inner) => { + self.module_stack.pop().unwrap(); + self.add_local_edge( item_scope, *self.module_stack.last().unwrap(), @@ -99,7 +101,6 @@ impl<'db> ScopeGraphBuilder<'db> { ); self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); - self.module_stack.pop().unwrap(); inner .name(self.db) .to_opt() From 10a7e8237b30be797f5d9979cf382d17c53fb2c7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 23 May 2023 00:00:59 +0200 Subject: [PATCH 167/678] Add tests for import resolution --- Cargo.lock | 4 + crates/hir-analysis/Cargo.toml | 6 + crates/hir-analysis/build.rs | 4 + .../src/name_resolution/import_resolver.rs | 19 ++- .../src/name_resolution/name_resolver.rs | 8 +- .../test_files/imports/glob_chain.fe | 12 ++ .../test_files/imports/glob_chain.snap | 18 +++ .../test_files/imports/glob_mutual_dep.fe | 13 ++ .../test_files/imports/glob_mutual_dep.snap | 24 +++ .../test_files/imports/glob_shadow.fe | 12 ++ .../test_files/imports/glob_shadow.snap | 18 +++ .../test_files/imports/multiple_domains.fe | 6 + .../test_files/imports/multiple_domains.snap | 12 ++ .../test_files/imports/use_depends_glob.fe | 9 ++ .../test_files/imports/use_depends_glob.snap | 18 +++ crates/hir-analysis/tests/import.rs | 71 +++++++++ crates/hir-analysis/tests/test_db.rs | 148 ++++++++++++++++++ crates/hir/src/hir_def/scope_graph.rs | 9 ++ 18 files changed, 404 insertions(+), 7 deletions(-) create mode 100644 crates/hir-analysis/build.rs create mode 100644 crates/hir-analysis/test_files/imports/glob_chain.fe create mode 100644 crates/hir-analysis/test_files/imports/glob_chain.snap create mode 100644 crates/hir-analysis/test_files/imports/glob_mutual_dep.fe create mode 100644 crates/hir-analysis/test_files/imports/glob_mutual_dep.snap create mode 100644 crates/hir-analysis/test_files/imports/glob_shadow.fe create mode 100644 crates/hir-analysis/test_files/imports/glob_shadow.snap create mode 100644 crates/hir-analysis/test_files/imports/multiple_domains.fe create mode 100644 crates/hir-analysis/test_files/imports/multiple_domains.snap create mode 100644 crates/hir-analysis/test_files/imports/use_depends_glob.fe create mode 100644 crates/hir-analysis/test_files/imports/use_depends_glob.snap create mode 100644 crates/hir-analysis/tests/import.rs create mode 100644 crates/hir-analysis/tests/test_db.rs diff --git a/Cargo.lock b/Cargo.lock index 3fc191b50e..feaedd847e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -923,11 +923,15 @@ dependencies = [ name = "fe-hir-analysis" version = "0.20.0-alpha" dependencies = [ + "codespan-reporting", "derive_more", + "dir-test", "either", "fe-common2", + "fe-compiler-test-utils", "fe-hir", "fe-macros", + "itertools", "rustc-hash", "salsa-2022", "smallvec", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 170cecd66f..b13801c9fa 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -13,7 +13,13 @@ smallvec = "1.10" rustc-hash = "1.1.0" either = "1.8" derive_more = "0.99" +itertools = "0.10" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common2", package = "fe-common2" } macros = { path = "../macros", package = "fe-macros" } +fe-compiler-test-utils = { path = "../test-utils" } + +[dev-dependencies] +codespan-reporting = "0.11" +dir-test = "0.1" diff --git a/crates/hir-analysis/build.rs b/crates/hir-analysis/build.rs new file mode 100644 index 0000000000..8e048f9218 --- /dev/null +++ b/crates/hir-analysis/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./test_files"); +} diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 11ca618dc0..87b679d598 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -8,6 +8,7 @@ use hir::{ hir_def::{scope_graph::ScopeId, IdentId, IngotId, Use}, span::DynLazySpan, }; +use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; @@ -64,7 +65,8 @@ impl<'db> ImportResolver<'db> { self.initialize_i_uses(); let mut changed = true; - let mut unresolved_scope: VecDeque<_> = self.intermediate_uses.keys().copied().collect(); + let mut unresolved_scope: VecDeque<_> = + self.intermediate_uses.keys().copied().dedup().collect(); while changed { changed = false; let n_unresolved_scope = unresolved_scope.len(); @@ -87,7 +89,7 @@ impl<'db> ImportResolver<'db> { match self.resolve_i_use(i_use) { (Some(updated_i_use), resolved) => { - changed = changed || resolved; + changed |= resolved; self.intermediate_uses .get_mut(&scope) .unwrap() @@ -95,12 +97,12 @@ impl<'db> ImportResolver<'db> { } (None, resolved) => { - changed = changed || resolved; + changed |= resolved; } } } - if self.scope_state(scope).is_closed() { + if !self.scope_state(scope).is_closed() { unresolved_scope.push_back(scope); } } @@ -537,7 +539,7 @@ impl<'db> ImportResolver<'db> { directive.disallow_lex().disallow_external(); } - if self.does_named_import_exist_for(seg_name, current_scope, i_use.is_first_segment()) { + if self.contains_unresolved_named_use(seg_name, current_scope, i_use.is_first_segment()) { directive.disallow_glob().disallow_external(); } @@ -554,7 +556,12 @@ impl<'db> ImportResolver<'db> { /// Returns `true` if there is an unresolved named import for the given name /// in the given scope or its lexical parents(if `allow_lex` is `true`). - fn does_named_import_exist_for(&self, name: IdentId, scope: ScopeId, allow_lex: bool) -> bool { + fn contains_unresolved_named_use( + &self, + name: IdentId, + scope: ScopeId, + allow_lex: bool, + ) -> bool { let mut current_scope = Some(scope); while let Some(scope) = current_scope { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index d8e24d0a4d..d4ecb40e56 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -342,7 +342,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { } *found_domains_after_named.entry(name).or_default() |= res.domain as u8; - res_collection.entry(name).or_default().push(res.clone()); } } @@ -765,6 +764,13 @@ impl NameRes { } } + pub fn pretty_path(&self, db: &dyn HirAnalysisDb) -> Option { + match self.kind { + NameResKind::Scope(scope) => scope.pretty_path(db.as_hir_db()), + NameResKind::Prim(prim) => prim.name().data(db.as_hir_db()).into(), + } + } + pub(super) fn derived_from(&self, db: &dyn HirAnalysisDb) -> Option { match self.derivation { NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { diff --git a/crates/hir-analysis/test_files/imports/glob_chain.fe b/crates/hir-analysis/test_files/imports/glob_chain.fe new file mode 100644 index 0000000000..ae84f95bfe --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_chain.fe @@ -0,0 +1,12 @@ +use foo::* + +mod foo { + pub use MyEnum::* + + pub struct Variant {} + + pub enum MyEnum { + Variant + Variant2 + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_chain.snap b/crates/hir-analysis/test_files/imports/glob_chain.snap new file mode 100644 index 0000000000..b13fe3c74f --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_chain.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_chain.fe +--- +note: + ┌─ test_file.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ test_file::foo::MyEnum | test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 | test_file::foo::Variant + +note: + ┌─ test_file.fe:4:5 + │ +4 │ pub use MyEnum::* + │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 + + diff --git a/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe b/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe new file mode 100644 index 0000000000..6909683f98 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe @@ -0,0 +1,13 @@ +use foo::* + +pub mod foo { + pub use super::bar::* + + pub struct Foo {} +} + +pub mod bar { + pub use super::foo::* + + pub struct Bar {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap new file mode 100644 index 0000000000..e351d68f38 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap @@ -0,0 +1,24 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_mutual_dep.fe +--- +note: + ┌─ test_file.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + +note: + ┌─ test_file.fe:4:5 + │ +4 │ pub use super::bar::* + │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + +note: + ┌─ test_file.fe:10:5 + │ +10 │ pub use super::foo::* + │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + + diff --git a/crates/hir-analysis/test_files/imports/glob_shadow.fe b/crates/hir-analysis/test_files/imports/glob_shadow.fe new file mode 100644 index 0000000000..f0bc26c979 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_shadow.fe @@ -0,0 +1,12 @@ +use foo::* + +mod foo { + pub use MyEnum::* + + pub const Variant: i32 = 0 + + pub enum MyEnum { + Variant + Variant2 + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_shadow.snap b/crates/hir-analysis/test_files/imports/glob_shadow.snap new file mode 100644 index 0000000000..588425ab8a --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_shadow.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_shadow.fe +--- +note: + ┌─ test_file.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ test_file::foo::MyEnum | test_file::foo::MyEnum::Variant2 | test_file::foo::Variant + +note: + ┌─ test_file.fe:4:5 + │ +4 │ pub use MyEnum::* + │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 + + diff --git a/crates/hir-analysis/test_files/imports/multiple_domains.fe b/crates/hir-analysis/test_files/imports/multiple_domains.fe new file mode 100644 index 0000000000..903dcd068e --- /dev/null +++ b/crates/hir-analysis/test_files/imports/multiple_domains.fe @@ -0,0 +1,6 @@ +use foo::S + +mod foo { + pub struct S {} + pub fn S() {} +} diff --git a/crates/hir-analysis/test_files/imports/multiple_domains.snap b/crates/hir-analysis/test_files/imports/multiple_domains.snap new file mode 100644 index 0000000000..d4ced34a33 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/multiple_domains.snap @@ -0,0 +1,12 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/multiple_domains.fe +--- +note: + ┌─ test_file.fe:1:1 + │ +1 │ use foo::S + │ ^^^^^^^^^^ test_file::foo::S | test_file::foo::S + + diff --git a/crates/hir-analysis/test_files/imports/use_depends_glob.fe b/crates/hir-analysis/test_files/imports/use_depends_glob.fe new file mode 100644 index 0000000000..219fefc84f --- /dev/null +++ b/crates/hir-analysis/test_files/imports/use_depends_glob.fe @@ -0,0 +1,9 @@ +use bar::Bar +use foo::* + +mod foo { + pub mod bar { + pub struct Bar {} + } + +} diff --git a/crates/hir-analysis/test_files/imports/use_depends_glob.snap b/crates/hir-analysis/test_files/imports/use_depends_glob.snap new file mode 100644 index 0000000000..5c16f75413 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/use_depends_glob.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/use_depends_glob.fe +--- +note: + ┌─ test_file.fe:1:1 + │ +1 │ use bar::Bar + │ ^^^^^^^^^^^^ test_file::foo::bar::Bar + +note: + ┌─ test_file.fe:2:1 + │ +2 │ use foo::* + │ ^^^^^^^^^^ test_file::foo::bar + + diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs new file mode 100644 index 0000000000..04877f3fbb --- /dev/null +++ b/crates/hir-analysis/tests/import.rs @@ -0,0 +1,71 @@ +mod test_db; +use test_db::{HirAnalysisTestDb, HirPropertyFormatter}; + +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use fe_compiler_test_utils::snap_test; +use fe_hir_analysis::name_resolution::{ + import_resolver::ResolvedImports, name_resolver::NameDerivation, resolve_imports_with_diag, +}; +use hir::hir_def::Use; +use rustc_hash::FxHashMap; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/imports", + glob: "*.fe" +)] +fn test_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (top_mod, mut prop_formatter) = db.new_stand_alone(file_name, fixture.content()); + + let (resolved_imports, diags) = resolve_imports_with_diag(&db, top_mod.ingot(&db)); + if !diags.is_empty() { + panic!("Failed to resolve imports: {:?}", diags); + } + + let res = format_imports(&db, &mut prop_formatter, resolved_imports); + snap_test!(res, fixture.path()); +} + +fn format_imports( + db: &HirAnalysisTestDb, + prop_formatter: &mut HirPropertyFormatter, + imports: &ResolvedImports, +) -> String { + let mut use_res_map: FxHashMap> = FxHashMap::default(); + + for name_resolved in imports.named_resolved.values().flat_map(|r| r.values()) { + for res in name_resolved.binding.iter() { + match res.derivation { + NameDerivation::NamedImported(use_) => use_res_map + .entry(use_) + .or_default() + .push(res.pretty_path(db).unwrap()), + _ => unreachable!(), + } + } + } + + for (_, glob_set) in imports.glob_resolved.iter() { + dbg!(glob_set.iter().count()); + for (&use_, res_set) in glob_set.iter() { + for res in res_set.values().flatten() { + use_res_map + .entry(use_) + .or_default() + .push(res.pretty_path(db).unwrap()) + } + } + } + for (use_, mut values) in use_res_map.into_iter() { + let use_span = use_.lazy_span().into(); + values.sort_unstable(); + let imported_names = values.join(" | "); + prop_formatter.set_properties(use_.top_mod(db), use_span, imported_names) + } + + prop_formatter.format_all_properties(db) +} diff --git a/crates/hir-analysis/tests/test_db.rs b/crates/hir-analysis/tests/test_db.rs new file mode 100644 index 0000000000..bf5a4117a7 --- /dev/null +++ b/crates/hir-analysis/tests/test_db.rs @@ -0,0 +1,148 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use codespan_reporting::{ + diagnostic::{Diagnostic, Label}, + files::SimpleFiles, + term::{ + self, + termcolor::{BufferWriter, ColorChoice}, + }, +}; +use common::{ + diagnostics::Span, + input::{IngotKind, Version}, + InputFile, InputIngot, +}; +use hir::{ + hir_def::TopLevelMod, + lower, + span::{DynLazySpan, LazySpan}, + HirDb, LowerHirDb, SpannedHirDb, +}; +use rustc_hash::FxHashMap; + +type CodeSpanFileId = usize; + +#[salsa::db(common::Jar, hir::Jar, fe_hir_analysis::Jar)] +pub struct HirAnalysisTestDb { + storage: salsa::Storage, +} + +impl HirAnalysisTestDb { + pub fn new_stand_alone( + &mut self, + file_name: &str, + text: &str, + ) -> (TopLevelMod, HirPropertyFormatter) { + let kind = IngotKind::StandAlone; + let version = Version::new(0, 0, 1); + let ingot = InputIngot::new(self, file_name, kind, version, BTreeSet::default()); + let root = InputFile::new(self, ingot, "test_file.fe".into(), text.to_string()); + ingot.set_root_file(self, root); + ingot.set_files(self, [root].into()); + + let mut prop_formatter = HirPropertyFormatter::default(); + let top_mod = self.register_file(&mut prop_formatter, root); + (top_mod, prop_formatter) + } + + fn register_file( + &self, + prop_formatter: &mut HirPropertyFormatter, + input_file: InputFile, + ) -> TopLevelMod { + let top_mod = lower::map_file_to_mod(self, input_file); + let path = input_file.path(self); + let text = input_file.text(self); + prop_formatter.register_top_mod(path.as_str(), text, top_mod); + top_mod + } +} + +impl Default for HirAnalysisTestDb { + fn default() -> Self { + let db = Self { + storage: Default::default(), + }; + db.prefill(); + db + } +} + +pub struct HirPropertyFormatter { + properties: BTreeMap>, + top_mod_to_file: FxHashMap, + code_span_files: SimpleFiles, +} + +impl HirPropertyFormatter { + pub fn set_properties(&mut self, top_mod: TopLevelMod, span: DynLazySpan, prop: String) { + self.properties + .entry(top_mod) + .or_default() + .push((prop, span)); + } + + pub fn format_all_properties(&mut self, db: &dyn SpannedHirDb) -> String { + let writer = BufferWriter::stderr(ColorChoice::Never); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for top_mod in self.top_mod_to_file.keys() { + if !self.properties.contains_key(top_mod) { + continue; + } + + let diags = self.properties[top_mod] + .iter() + .map(|(prop, span)| { + let (span, diag) = self.property_to_diag(db, *top_mod, prop, span.clone()); + ((span.file, span.range.start()), diag) + }) + .collect::>(); + + for diag in diags.values() { + term::emit(&mut buffer, &config, &self.code_span_files, diag).unwrap(); + } + } + + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() + } + + fn property_to_diag( + &self, + db: &dyn SpannedHirDb, + top_mod: TopLevelMod, + prop: &str, + span: DynLazySpan, + ) -> (Span, Diagnostic) { + let file_id = self.top_mod_to_file[&top_mod]; + let span = span.resolve(db).unwrap(); + let diag = Diagnostic::note() + .with_labels(vec![Label::primary(file_id, span.range).with_message(prop)]); + (span, diag) + } + + fn register_top_mod(&mut self, path: &str, text: &str, top_mod: TopLevelMod) { + let file_id = self.code_span_files.add(path.to_string(), text.to_string()); + self.top_mod_to_file.insert(top_mod, file_id); + } +} + +impl Default for HirPropertyFormatter { + fn default() -> Self { + Self { + properties: Default::default(), + top_mod_to_file: Default::default(), + code_span_files: SimpleFiles::new(), + } + } +} + +impl HirDb for HirAnalysisTestDb {} +impl SpannedHirDb for HirAnalysisTestDb {} +impl LowerHirDb for HirAnalysisTestDb {} + +impl salsa::Database for HirAnalysisTestDb { + fn salsa_event(&self, _: salsa::Event) {} +} diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index ba4fbcf3c9..d2da77cbf7 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -233,6 +233,15 @@ impl ScopeId { let s_graph = self.top_mod.scope_graph(db); self.local_id.name_span(s_graph) } + + pub fn pretty_path(self, db: &dyn HirDb) -> Option { + if let Some(parent) = self.parent(db) { + let parent_path = parent.pretty_path(db)?; + Some(format!("{}::{}", parent_path, self.name(db)?.data(db))) + } else { + self.name(db).map(|name| name.data(db)) + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] From 1d624eca0db8eac66cb3ea6f50a5fae84c006e9f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 23 May 2023 17:21:15 +0200 Subject: [PATCH 168/678] Add `Spanned` type --- crates/hir/src/hir_def/item.rs | 29 +++++++++++++++++++++++---- crates/hir/src/hir_def/mod.rs | 23 ++++++++++++++++++++- crates/hir/src/hir_def/scope_graph.rs | 25 +++++++++++++++++------ crates/hir/src/lower/scope_builder.rs | 6 +++--- 4 files changed, 69 insertions(+), 14 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 3d40e64880..fe94e9de2b 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -204,14 +204,29 @@ impl TopLevelMod { lower::scope_graph_impl(db, self) } - pub fn parent(self, db: &dyn HirDb) -> Option { + /// Returns the child top level modules of `self`. + pub fn child_top_mods(self, db: &dyn HirDb) -> impl Iterator + '_ { let module_tree = self.ingot(db).module_tree(db); - module_tree.parent(self) + module_tree.children(self) + } + + /// Returns the top level children of this module. + /// If you need all the children, use [`children_nested`] instead. + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.scope_graph(db); + let scope = s_graph.scope_from_item(self.into()); + s_graph.child_items(scope) + } + + /// Returns all the children of this module, including nested items. + pub fn children_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.scope_graph(db); + s_graph.items_dfs() } - pub fn children(self, db: &dyn HirDb) -> impl Iterator + '_ { + pub fn parent(self, db: &dyn HirDb) -> Option { let module_tree = self.ingot(db).module_tree(db); - module_tree.children(self) + module_tree.parent(self) } pub fn vis(self, _db: &dyn HirDb) -> Visibility { @@ -239,6 +254,12 @@ impl Mod { pub fn lazy_span(self) -> LazyModSpan { LazyModSpan::new(self) } + + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = s_graph.scope_from_item(self.into()); + s_graph.child_items(scope) + } } #[salsa::tracked] diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index f93f0eca42..48fb0b4c09 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -14,6 +14,8 @@ pub mod use_tree; pub(crate) mod module_tree; +use std::ops::Deref; + pub use attr::*; pub use body::*; use common::{input::IngotKind, InputIngot}; @@ -30,7 +32,26 @@ pub use use_tree::*; use num_bigint::BigUint; -use crate::{external_ingots_impl, HirDb}; +use crate::{external_ingots_impl, span::LazySpan, HirDb}; + +pub struct Spanned +where + S: LazySpan, +{ + pub value: T, + pub span: S, +} + +impl Deref for Spanned +where + S: LazySpan, +{ + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.value + } +} #[salsa::tracked] pub struct IngotId { diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index d2da77cbf7..6cb01b7fba 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,3 +1,5 @@ +use std::collections::BTreeSet; + use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; @@ -10,7 +12,7 @@ pub struct ScopeGraph { pub top_mod: TopLevelMod, pub scopes: PrimaryMap, pub item_map: FxHashMap, - pub unresolved_uses: Vec, + pub unresolved_uses: BTreeSet, } impl ScopeGraph { @@ -22,6 +24,17 @@ impl ScopeGraph { } } + /// Returns the direct child items of the scope. + pub fn child_items(&self, scope: LocalScopeId) -> impl Iterator + '_ { + self.edges(scope).iter().filter_map(|edge| match edge.kind { + EdgeKind::Lex(_) | EdgeKind::Super(_) | EdgeKind::Ingot(_) | EdgeKind::SelfTy(_) => { + None + } + + _ => self.item_from_scope(edge.dest.to_local()), + }) + } + pub fn edges(&self, scope: LocalScopeId) -> &[ScopeEdge] { &self.scopes[scope].edges } @@ -30,7 +43,7 @@ impl ScopeGraph { &self.scopes[scope] } - pub fn scope_item(&self, scope: LocalScopeId) -> Option { + pub fn item_from_scope(&self, scope: LocalScopeId) -> Option { if let ScopeKind::Item(item) = self.scope_data(scope).kind { Some(item) } else { @@ -38,7 +51,7 @@ impl ScopeGraph { } } - pub fn item_scope(&self, item: ItemKind) -> LocalScopeId { + pub fn scope_from_item(&self, item: ItemKind) -> LocalScopeId { self.item_map[&item] } } @@ -55,14 +68,14 @@ impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { fn next(&mut self) -> Option { let item = self.stack.pop()?; self.visited.insert(item); - let scope_id = self.graph.item_scope(item); + let scope_id = self.graph.scope_from_item(item); for edge in self.graph.edges(scope_id) { let ScopeId { top_mod, local_id } = edge.dest; if top_mod != self.graph.top_mod { continue; } - if let Some(item) = self.graph.scope_item(local_id) { + if let Some(item) = self.graph.item_from_scope(local_id) { if !self.visited.contains(&item) { self.stack.push(item); } @@ -127,7 +140,7 @@ impl ScopeId { pub fn from_item(db: &dyn HirDb, item: ItemKind) -> Self { let top_mod = item.top_mod(db); let scope_graph = top_mod.scope_graph(db); - Self::new(top_mod, scope_graph.item_scope(item)) + Self::new(top_mod, scope_graph.scope_from_item(item)) } pub fn root(top_mod: TopLevelMod) -> Self { diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 2aa36ed376..a2d32b3999 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -38,7 +38,7 @@ impl<'db> ScopeGraphBuilder<'db> { pub fn build(self) -> ScopeGraph { debug_assert!(matches!( - self.graph.scope_item(LocalScopeId::root()), + self.graph.item_from_scope(LocalScopeId::root()), Some(ItemKind::TopMod(_)) )); @@ -69,7 +69,7 @@ impl<'db> ScopeGraphBuilder<'db> { top_mod.ingot(self.db).root_mod(self.db), EdgeKind::ingot(), ); - for child in top_mod.children(self.db) { + for child in top_mod.child_top_mods(self.db) { let child_name = child.name(self.db); let edge = EdgeKind::mod_(child_name); self.add_global_edge(item_scope, child, edge) @@ -199,7 +199,7 @@ impl<'db> ScopeGraphBuilder<'db> { } Use(use_) => { - self.graph.unresolved_uses.push(use_); + self.graph.unresolved_uses.insert(use_); self.add_lex_edge(item_scope, parent_scope); EdgeKind::anon() From 5e89674e9b14355afd2154cecd67144b88dd51dc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 23 May 2023 20:51:17 +0200 Subject: [PATCH 169/678] Change the structure of `ScopeGraph` to let salsa work maximally --- .../src/name_resolution/import_resolver.rs | 8 +- .../src/name_resolution/name_resolver.rs | 32 +- .../src/name_resolution/visibility_checker.rs | 9 +- crates/hir/src/hir_def/item.rs | 23 +- crates/hir/src/hir_def/scope_graph.rs | 460 ++++++++---------- crates/hir/src/lib.rs | 17 +- crates/hir/src/lower/scope_builder.rs | 334 ++++++++----- crates/hir/src/span/item.rs | 3 +- 8 files changed, 449 insertions(+), 437 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 87b679d598..2eba2df207 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -738,7 +738,7 @@ struct IntermediateUse { impl IntermediateUse { fn new(db: &dyn HirAnalysisDb, use_: Use) -> Self { - let scope = ScopeId::from_item(db.as_hir_db(), use_.into()) + let scope = ScopeId::from_item(use_.into()) .lex_parent(db.as_hir_db()) .unwrap(); Self { @@ -930,7 +930,7 @@ impl Importer for IntermediateResolvedImports { db: &'a dyn HirAnalysisDb, scope: ScopeId, ) -> Option<&'a NamedImportSet> { - if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { resolved_imports_for_scope(db, scope) .named_resolved .get(&scope) @@ -944,7 +944,7 @@ impl Importer for IntermediateResolvedImports { db: &'a dyn HirAnalysisDb, scope: ScopeId, ) -> Option<&'a GlobImportSet> { - if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { resolved_imports_for_scope(db, scope) .glob_resolved .get(&scope) @@ -958,7 +958,7 @@ impl Importer for IntermediateResolvedImports { db: &'a dyn HirAnalysisDb, scope: ScopeId, ) -> &'a [NameBinding] { - if scope.top_mod().ingot(db.as_hir_db()) != self.ingot { + if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { &resolved_imports_for_scope(db, scope).unnamed_resolved } else { &self.resolved_imports.unnamed_resolved diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index d4ecb40e56..80adabe60a 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -10,8 +10,7 @@ use hir::{ prim_ty::PrimTy, scope_graph::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, - ScopeKind, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, - VariantEdge, + SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, VariantEdge, }, IdentId, ItemKind, Partial, PathId, Use, }, @@ -119,11 +118,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { } // Set pred segment to the current scope. - let mut pred = NameRes::new_scope( - scope, - NameDomain::from_scope(self.db, scope), - NameDerivation::Def, - ); + let mut pred = + NameRes::new_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); let seg_len = segments.len(); for (i, seg) in segments[0..seg_len - 1].iter().enumerate() { @@ -165,7 +161,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if found_scopes.insert(edge.dest) { let res = NameRes::new_scope( edge.dest, - NameDomain::from_scope(self.db, edge.dest), + NameDomain::from_scope(edge.dest), NameDerivation::Def, ); if binding.push(&res).is_some() { @@ -229,7 +225,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if query.directive.is_allowed_domain(NameDomain::Item as u8) { query .scope - .top_mod() + .top_mod(self.db.as_hir_db()) .ingot(self.db.as_hir_db()) .external_ingots(self.db.as_hir_db()) .iter() @@ -316,11 +312,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if !found_kinds.insert((name, scope.into())) { continue; } - let res = NameRes::new_scope( - scope, - NameDomain::from_scope(self.db, scope), - NameDerivation::Def, - ); + let res = NameRes::new_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); *found_domains.entry(name).or_default() |= res.domain as u8; res_collection.entry(name).or_default().push(res); @@ -966,14 +958,14 @@ pub enum NameDomain { } impl NameDomain { - fn from_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> Self { - match scope.data(db.as_hir_db()).kind { - ScopeKind::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeKind::FnParam(_) => { + fn from_scope(scope: ScopeId) -> Self { + match scope { + ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FnParam(..) => { Self::Value } - ScopeKind::Item(_) | ScopeKind::GenericParam(_) => Self::Item, - ScopeKind::Field(_) => Self::Field, - ScopeKind::Variant(_) => Self::Value, + ScopeId::Item(_) | ScopeId::GenericParam(..) => Self::Item, + ScopeId::Field(..) => Self::Field, + ScopeId::Variant(..) => Self::Value, } } } diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index 7b2e7fb825..5e5d5ad66a 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -1,7 +1,4 @@ -use hir::hir_def::{ - scope_graph::{ScopeId, ScopeKind}, - Use, -}; +use hir::hir_def::{scope_graph::ScopeId, Use}; use crate::HirAnalysisDb; @@ -16,7 +13,7 @@ pub fn is_scope_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, target_scope return true; } - let Some(def_scope) = (if matches!(target_scope.kind(db.as_hir_db()), ScopeKind::Field(_) | ScopeKind::Variant(_)) { + let Some(def_scope) = (if matches!(target_scope, ScopeId::Field(..) | ScopeId::Variant(..)) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. target_scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) @@ -32,7 +29,7 @@ pub fn is_scope_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, target_scope /// Return `true` if the given `use_` is visible from the `ref_scope`. pub(super) fn is_use_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, use_: Use) -> bool { - let use_scope = ScopeId::from_item(db.as_hir_db(), use_.into()); + let use_scope = ScopeId::from_item(use_.into()); if use_scope.data(db.as_hir_db()).vis.is_pub() { return true; diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index fe94e9de2b..dcb0e74164 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -22,8 +22,9 @@ use crate::{ }; use super::{ - scope_graph::ScopeGraph, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, IngotId, - Partial, TypeId, UseAlias, WhereClauseId, + scope_graph::{ScopeGraph, ScopeId}, + AttrListId, Body, FnParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, + UseAlias, WhereClauseId, }; #[derive( @@ -68,6 +69,18 @@ pub enum GenericParamOwner { } impl GenericParamOwner { + pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { + match self { + GenericParamOwner::Func(func) => func.top_mod(db), + GenericParamOwner::Struct(struct_) => struct_.top_mod(db), + GenericParamOwner::Enum(enum_) => enum_.top_mod(db), + GenericParamOwner::TypeAlias(type_alias) => type_alias.top_mod(db), + GenericParamOwner::Impl(impl_) => impl_.top_mod(db), + GenericParamOwner::Trait(trait_) => trait_.top_mod(db), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.top_mod(db), + } + } + pub fn params(&self, db: &dyn HirDb) -> GenericParamListId { match self { GenericParamOwner::Func(func) => func.generic_params(db), @@ -214,14 +227,14 @@ impl TopLevelMod { /// If you need all the children, use [`children_nested`] instead. pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { let s_graph = self.scope_graph(db); - let scope = s_graph.scope_from_item(self.into()); + let scope = ScopeId::from_item(self.into()); s_graph.child_items(scope) } /// Returns all the children of this module, including nested items. pub fn children_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { let s_graph = self.scope_graph(db); - s_graph.items_dfs() + s_graph.items_dfs(db) } pub fn parent(self, db: &dyn HirDb) -> Option { @@ -257,7 +270,7 @@ impl Mod { pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { let s_graph = self.top_mod(db).scope_graph(db); - let scope = s_graph.scope_from_item(self.into()); + let scope = ScopeId::from_item(self.into()); s_graph.child_items(scope) } } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 6cb01b7fba..3bd35de596 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,23 +1,22 @@ use std::collections::BTreeSet; -use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{hir_def::GenericParamOwner, span::DynLazySpan, HirDb}; -use super::{Enum, Func, IdentId, IngotId, ItemKind, Struct, TopLevelMod, Use, Visibility}; +use super::{Enum, Func, IdentId, IngotId, ItemKind, TopLevelMod, Use, Visibility}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { pub top_mod: TopLevelMod, - pub scopes: PrimaryMap, - pub item_map: FxHashMap, - pub unresolved_uses: BTreeSet, + pub scopes: FxHashMap, + pub unresolved_uses: FxHashSet, } impl ScopeGraph { - pub fn items_dfs(&self) -> impl Iterator + '_ { + pub fn items_dfs<'a>(&'a self, db: &'a dyn HirDb) -> impl Iterator + 'a { ScopeGraphItemIterDfs { + db, graph: self, visited: Default::default(), stack: vec![self.top_mod.into()], @@ -25,151 +24,84 @@ impl ScopeGraph { } /// Returns the direct child items of the scope. - pub fn child_items(&self, scope: LocalScopeId) -> impl Iterator + '_ { - self.edges(scope).iter().filter_map(|edge| match edge.kind { + pub fn child_items(&self, scope: ScopeId) -> impl Iterator + '_ { + self.edges(scope).filter_map(|edge| match edge.kind { EdgeKind::Lex(_) | EdgeKind::Super(_) | EdgeKind::Ingot(_) | EdgeKind::SelfTy(_) => { None } - _ => self.item_from_scope(edge.dest.to_local()), + _ => edge.dest.to_item(), }) } - pub fn edges(&self, scope: LocalScopeId) -> &[ScopeEdge] { - &self.scopes[scope].edges + pub fn edges(&self, scope: ScopeId) -> impl Iterator + '_ { + self.scopes[&scope].edges.iter() } - pub fn scope_data(&self, scope: LocalScopeId) -> &LocalScope { + pub fn scope_data(&self, scope: &ScopeId) -> &Scope { &self.scopes[scope] } - - pub fn item_from_scope(&self, scope: LocalScopeId) -> Option { - if let ScopeKind::Item(item) = self.scope_data(scope).kind { - Some(item) - } else { - None - } - } - - pub fn scope_from_item(&self, item: ItemKind) -> LocalScopeId { - self.item_map[&item] - } -} - -struct ScopeGraphItemIterDfs<'a> { - graph: &'a ScopeGraph, - visited: FxHashSet, - stack: Vec, } -impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { - type Item = ItemKind; - - fn next(&mut self) -> Option { - let item = self.stack.pop()?; - self.visited.insert(item); - let scope_id = self.graph.scope_from_item(item); - - for edge in self.graph.edges(scope_id) { - let ScopeId { top_mod, local_id } = edge.dest; - if top_mod != self.graph.top_mod { - continue; - } - if let Some(item) = self.graph.item_from_scope(local_id) { - if !self.visited.contains(&item) { - self.stack.push(item); - } - } - } - Some(item) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct LocalScope { - pub kind: ScopeKind, - pub edges: Vec, - pub parent_module: Option, - pub parent_scope: Option, - pub vis: Visibility, +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ScopeId { + Item(ItemKind), + GenericParam(ItemKind, usize), + FnParam(ItemKind, usize), + Field(ItemKind, usize), + Variant(ItemKind, usize), } - -impl LocalScope { - pub fn new( - kind: ScopeKind, - parent_module: Option, - parent_scope: Option, - vis: Visibility, - ) -> Self { - Self { - kind, - edges: vec![], - parent_module, - parent_scope, - vis, +impl ScopeId { + pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { + match self { + ScopeId::Item(item) => item.top_mod(db), + ScopeId::GenericParam(item, _) => item.top_mod(db), + ScopeId::FnParam(item, _) => item.top_mod(db), + ScopeId::Field(item, _) => item.top_mod(db), + ScopeId::Variant(item, _) => item.top_mod(db), } } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ScopeKind { - Item(ItemKind), - GenericParam(usize), - FnParam(usize), - Field(usize), - Variant(usize), -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ScopeEdge { - pub dest: ScopeId, - pub kind: EdgeKind, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct ScopeId { - top_mod: TopLevelMod, - local_id: LocalScopeId, -} -impl ScopeId { - pub fn new(top_mod: TopLevelMod, local_id: LocalScopeId) -> Self { - Self { top_mod, local_id } + pub fn from_item(item: ItemKind) -> Self { + Self::Item(item) } - pub fn from_item(db: &dyn HirDb, item: ItemKind) -> Self { - let top_mod = item.top_mod(db); - let scope_graph = top_mod.scope_graph(db); - Self::new(top_mod, scope_graph.scope_from_item(item)) + pub fn to_item(self) -> Option { + match self { + ScopeId::Item(item) => Some(item), + _ => None, + } } pub fn root(top_mod: TopLevelMod) -> Self { - Self::new(top_mod, LocalScopeId::root()) + Self::Item(top_mod.into()) } /// Returns the scope graph containing this scope. pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { - self.top_mod.scope_graph(db) - } - - /// Returns the local id of the scope graph. - pub fn to_local(self) -> LocalScopeId { - self.local_id + self.top_mod(db).scope_graph(db) } - pub fn edges(self, db: &dyn HirDb) -> &[ScopeEdge] { - self.scope_graph(db).edges(self.local_id) + pub fn edges(self, db: &dyn HirDb) -> impl Iterator { + self.scope_graph(db).edges(self) } /// Returns `true` if `scope` is reachable from `self` by following only /// lexical edges. - pub fn is_lex_child(self, db: &dyn HirDb, parent: ScopeId) -> bool { - if self.top_mod != parent.top_mod { + pub fn is_lex_child(self, db: &dyn HirDb, scope: &ScopeId) -> bool { + if self.top_mod(db) != scope.top_mod(db) { return false; } - let scope_graph = self.scope_graph(db); - self.local_id.is_lex_child(scope_graph, parent.local_id) + match self.lex_parent(db) { + Some(lex_parent) => { + if &lex_parent == scope { + return true; + } + lex_parent.is_lex_child(db, scope) + } + None => false, + } } /// Returns true if `self` is a transitive reflexive child of `of`. @@ -186,22 +118,13 @@ impl ScopeId { false } - /// Returns the `TopLevelMod` containing the scope . - pub fn top_mod(self) -> TopLevelMod { - self.top_mod - } - /// Return the `IngotId` containing the scope. pub fn ingot(self, db: &dyn HirDb) -> IngotId { - self.top_mod.ingot(db) - } - - pub fn data(self, db: &dyn HirDb) -> &LocalScope { - self.top_mod.scope_graph(db).scope_data(self.local_id) + self.top_mod(db).ingot(db) } - pub fn kind(self, db: &dyn HirDb) -> ScopeKind { - self.data(db).kind + pub fn data(self, db: &dyn HirDb) -> &Scope { + self.top_mod(db).scope_graph(db).scope_data(&self) } pub fn parent(self, db: &dyn HirDb) -> Option { @@ -226,25 +149,90 @@ impl ScopeId { } pub fn parent_module(self, db: &dyn HirDb) -> Option { - self.data(db).parent_module + let parent_item = self.parent_item(db)?; + match parent_item { + ItemKind::Mod(_) | ItemKind::TopMod(_) => Some(Self::Item(parent_item)), + _ => { + let parent_id = Self::from_item(parent_item); + parent_id.parent_module(db) + } + } } pub fn is_type(self, db: &dyn HirDb) -> bool { - match self.data(db).kind { - ScopeKind::Item(item) => item.is_type(), - ScopeKind::GenericParam(_) => true, + match self.data(db).id { + ScopeId::Item(item) => item.is_type(), + ScopeId::GenericParam(..) => true, _ => false, } } pub fn name(self, db: &dyn HirDb) -> Option { - let s_graph = self.top_mod.scope_graph(db); - self.local_id.name(db, s_graph) + match self.data(db).id { + ScopeId::Item(item) => item.name(db), + + ScopeId::Variant(parent, idx) => { + let enum_: Enum = parent.try_into().unwrap(); + enum_.variants(db).data(db)[idx].name.to_opt() + } + + ScopeId::Field(parent, idx) => match parent { + ItemKind::Struct(s) => s.fields(db).data(db)[idx].name.to_opt(), + ItemKind::Contract(c) => c.fields(db).data(db)[idx].name.to_opt(), + _ => unreachable!(), + }, + + ScopeId::FnParam(parent, idx) => { + let func: Func = parent.try_into().unwrap(); + func.params(db).to_opt()?.data(db)[idx].name() + } + + ScopeId::GenericParam(parent, idx) => { + let parent = GenericParamOwner::from_item_opt(parent).unwrap(); + + let params = &parent.params(db).data(db)[idx]; + params.name().to_opt() + } + } + } + + pub fn parent_item(self, db: &dyn HirDb) -> Option { + let data = self.data(db); + match data.id { + ScopeId::Item(item) => Some(item), + _ => { + let parent = data.parent_scope?; + parent.parent_item(db) + } + } } pub fn name_span(self, db: &dyn HirDb) -> Option { - let s_graph = self.top_mod.scope_graph(db); - self.local_id.name_span(s_graph) + match self.data(db).id { + ScopeId::Item(item) => item.name_span(), + + ScopeId::Variant(parent, idx) => { + let enum_: Enum = parent.try_into().unwrap(); + Some(enum_.lazy_span().variants().variant(idx).name().into()) + } + + ScopeId::Field(parent, idx) => match parent { + ItemKind::Struct(s) => Some(s.lazy_span().fields().field(idx).name().into()), + ItemKind::Contract(c) => Some(c.lazy_span().fields().field(idx).name().into()), + _ => unreachable!(), + }, + + ScopeId::FnParam(parent, idx) => { + let func: Func = parent.try_into().unwrap(); + Some(func.lazy_span().params().param(idx).name().into()) + } + + ScopeId::GenericParam(parent, idx) => { + let parent = GenericParamOwner::from_item_opt(parent).unwrap(); + + Some(parent.params_span().param(idx).into()) + } + } } pub fn pretty_path(self, db: &dyn HirDb) -> Option { @@ -257,7 +245,62 @@ impl ScopeId { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +struct ScopeGraphItemIterDfs<'a> { + db: &'a dyn HirDb, + graph: &'a ScopeGraph, + visited: FxHashSet, + stack: Vec, +} + +impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { + type Item = ItemKind; + + fn next(&mut self) -> Option { + let item = self.stack.pop()?; + self.visited.insert(item); + let scope_id = ScopeId::from_item(item); + + for edge in self.graph.edges(scope_id) { + let top_mod = edge.dest.top_mod(self.db); + if top_mod != self.graph.top_mod { + continue; + } + if let Some(item) = edge.dest.to_item() { + if !self.visited.contains(&item) { + self.stack.push(item); + } + } + } + Some(item) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Scope { + pub id: ScopeId, + pub edges: BTreeSet, + pub parent_scope: Option, + pub vis: Visibility, +} + +impl Scope { + pub fn new(kind: ScopeId, parent_scope: Option, vis: Visibility) -> Self { + Self { + id: kind, + edges: Default::default(), + parent_scope, + vis, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ScopeEdge { + pub dest: ScopeId, + pub kind: EdgeKind, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub enum EdgeKind { Lex(LexEdge), Mod(ModEdge), @@ -328,149 +371,45 @@ impl EdgeKind { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct LexEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct ModEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct TypeEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct TraitEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct ValueEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct GenericParamEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct FieldEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct VariantEdge(pub IdentId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SuperEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct IngotEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct SelfTyEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub struct SelfEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct AnonEdge(); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct LocalScopeId(u32); -entity_impl!(LocalScopeId); - -impl LocalScopeId { - pub fn to_global(self, top_mod: TopLevelMod) -> ScopeId { - ScopeId::new(top_mod, self) - } - - /// Returns `true` if `scope` is reachable from `self` by following only - /// lexical edges. - pub fn is_lex_child(self, s_graph: &ScopeGraph, scope: LocalScopeId) -> bool { - let data = self.data(s_graph); - match data.parent_scope { - Some(parent) => { - if parent == scope { - return true; - } - parent.is_lex_child(s_graph, scope) - } - None => false, - } - } - - pub fn data(self, s_graph: &ScopeGraph) -> &LocalScope { - &s_graph.scopes[self] - } - - pub fn name(self, db: &dyn HirDb, s_graph: &ScopeGraph) -> Option { - match self.data(s_graph).kind { - ScopeKind::Item(item) => item.name(db), - - ScopeKind::Variant(idx) => { - let parent: Enum = self.parent_item(s_graph).unwrap().try_into().unwrap(); - parent.variants(db).data(db)[idx].name.to_opt() - } - - ScopeKind::Field(idx) => { - let parent: Struct = self.parent_item(s_graph).unwrap().try_into().unwrap(); - parent.fields(db).data(db)[idx].name.to_opt() - } - - ScopeKind::FnParam(idx) => { - let parent: Func = self.parent_item(s_graph).unwrap().try_into().unwrap(); - parent.params(db).to_opt()?.data(db)[idx].name() - } - - ScopeKind::GenericParam(idx) => { - let parent = - GenericParamOwner::from_item_opt(self.parent_item(s_graph).unwrap()).unwrap(); - - let params = &parent.params(db).data(db)[idx]; - params.name().to_opt() - } - } - } - - pub fn name_span(self, s_graph: &ScopeGraph) -> Option { - match self.data(s_graph).kind { - ScopeKind::Item(item) => item.name_span(), - - ScopeKind::Variant(idx) => { - let parent: Enum = self.parent_item(s_graph).unwrap().try_into().unwrap(); - Some(parent.lazy_span().variants().variant(idx).name().into()) - } - - ScopeKind::Field(idx) => { - let parent: Struct = self.parent_item(s_graph).unwrap().try_into().unwrap(); - Some(parent.lazy_span().fields().field(idx).name().into()) - } - - ScopeKind::FnParam(idx) => { - let parent: Func = self.parent_item(s_graph).unwrap().try_into().unwrap(); - Some(parent.lazy_span().params().param(idx).name().into()) - } - - ScopeKind::GenericParam(idx) => { - let parent = - GenericParamOwner::from_item_opt(self.parent_item(s_graph).unwrap()).unwrap(); - - Some(parent.params_span().param(idx).into()) - } - } - } - - pub fn parent(self, s_graph: &ScopeGraph) -> Option { - self.data(s_graph).parent_scope - } - - pub fn parent_item(self, s_graph: &ScopeGraph) -> Option { - match self.data(s_graph).kind { - ScopeKind::Item(item) => Some(item), - _ => { - let parent = self.parent(s_graph)?; - parent.parent_item(s_graph) - } - } - } - - pub(crate) fn root() -> Self { - LocalScopeId(0) - } -} - #[cfg(test)] mod tests { @@ -495,15 +434,16 @@ mod tests { } "#; - let scope_graph = db.parse_source(text); - assert_eq!(scope_graph.items_dfs().count(), 8); + let file = db.standalone_file(text); + let scope_graph = db.parse_source(file); + assert_eq!(scope_graph.items_dfs(&db).count(), 8); - for (i, item) in scope_graph.items_dfs().enumerate() { + for (i, item) in scope_graph.items_dfs(&db).enumerate() { match i { 0 => assert!(matches!(item, ItemKind::TopMod(_))), - 1 => assert!(matches!(item, ItemKind::Mod(_))), - 2 => assert!(matches!(item, ItemKind::Struct(_))), - 3 => assert!(matches!(item, ItemKind::Enum(_))), + 1 => assert!(matches!(item, ItemKind::Enum(_))), + 2 => assert!(matches!(item, ItemKind::Mod(_))), + 3 => assert!(matches!(item, ItemKind::Struct(_))), 4 => assert!(matches!(item, ItemKind::Mod(_))), 5 => assert!(matches!(item, ItemKind::Func(_))), 6 => assert!(matches!(item, ItemKind::Func(_))), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 50ff240d10..65da08cba9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -171,8 +171,7 @@ mod test_db { } impl TestDb { - pub fn parse_source(&mut self, text: &str) -> &ScopeGraph { - let file = self.standalone_file(text); + pub fn parse_source(&self, file: InputFile) -> &ScopeGraph { let top_mod = map_file_to_mod(self, file); scope_graph(self, top_mod) } @@ -183,16 +182,20 @@ mod test_db { where ItemKind: TryInto, { - let tree = self.parse_source(text); - tree.items_dfs().find_map(|it| it.try_into().ok()).unwrap() + let file = self.standalone_file(text); + let tree = self.parse_source(file); + tree.items_dfs(self) + .find_map(|it| it.try_into().ok()) + .unwrap() } pub fn expect_items(&mut self, text: &str) -> Vec where ItemKind: TryInto, { - let tree = self.parse_source(text); - tree.items_dfs() + let file = self.standalone_file(text); + let tree = self.parse_source(file); + tree.items_dfs(self) .filter_map(|it| it.try_into().ok()) .collect() } @@ -204,7 +207,7 @@ mod test_db { &text[range.start().into()..range.end().into()] } - fn standalone_file(&mut self, text: &str) -> InputFile { + pub fn standalone_file(&mut self, text: &str) -> InputFile { let path = "hir_test"; let kind = IngotKind::StandAlone; let version = Version::new(0, 0, 1); diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index a2d32b3999..f11c953d27 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -1,10 +1,11 @@ +use cranelift_entity::{entity_impl, PrimaryMap}; +use rustc_hash::{FxHashMap, FxHashSet}; + use crate::{ hir_def::{ - scope_graph::{ - EdgeKind, LocalScope, LocalScopeId, ScopeEdge, ScopeGraph, ScopeId, ScopeKind, - }, + scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, EnumVariantListId, FnParamListId, FnParamName, GenericParamListId, ItemKind, - RecordFieldListId, TopLevelMod, Visibility, + RecordFieldListId, TopLevelMod, Use, Visibility, }, HirDb, }; @@ -12,9 +13,9 @@ use crate::{ pub struct ScopeGraphBuilder<'db> { pub(super) db: &'db dyn HirDb, pub(super) top_mod: TopLevelMod, - graph: ScopeGraph, - scope_stack: Vec, - module_stack: Vec, + graph: IntermediateScopeGraph, + scope_stack: Vec, + module_stack: Vec, } impl<'db> ScopeGraphBuilder<'db> { @@ -22,12 +23,7 @@ impl<'db> ScopeGraphBuilder<'db> { let mut builder = Self { db, top_mod, - graph: ScopeGraph { - top_mod, - scopes: Default::default(), - item_map: Default::default(), - unresolved_uses: Default::default(), - }, + graph: IntermediateScopeGraph::default(), scope_stack: Default::default(), module_stack: Default::default(), }; @@ -37,17 +33,13 @@ impl<'db> ScopeGraphBuilder<'db> { } pub fn build(self) -> ScopeGraph { - debug_assert!(matches!( - self.graph.item_from_scope(LocalScopeId::root()), - Some(ItemKind::TopMod(_)) - )); - - self.graph + self.graph.build(self.top_mod) } pub fn enter_scope(&mut self, is_mod: bool) { // Create dummy scope, the scope kind is initialized in `leave_scope`. - let id = self.graph.scopes.push(self.dummy_scope()); + let (dummy_scope_id, dummy_scope) = self.dummy_scope(); + let id = self.graph.push(dummy_scope_id, dummy_scope); self.scope_stack.push(id); if is_mod { self.module_stack.push(id); @@ -57,49 +49,51 @@ impl<'db> ScopeGraphBuilder<'db> { pub fn leave_scope(&mut self, item: ItemKind) { use ItemKind::*; - let item_scope = self.scope_stack.pop().unwrap(); - self.initialize_item_scope(item_scope, item); + let item_node = self.scope_stack.pop().unwrap(); + self.initialize_item_scope(item_node, item); if let ItemKind::TopMod(top_mod) = item { debug_assert!(self.scope_stack.is_empty()); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); + self.graph.add_edge(item_node, item_node, EdgeKind::self_()); - self.add_global_edge( - item_scope, - top_mod.ingot(self.db).root_mod(self.db), + self.graph.add_external_edge( + item_node, + ScopeId::Item(top_mod.ingot(self.db).root_mod(self.db).into()), EdgeKind::ingot(), ); for child in top_mod.child_top_mods(self.db) { let child_name = child.name(self.db); let edge = EdgeKind::mod_(child_name); - self.add_global_edge(item_scope, child, edge) + self.graph + .add_external_edge(item_node, ScopeId::Item(child.into()), edge) } if let Some(parent) = top_mod.parent(self.db) { let edge = EdgeKind::super_(); - self.add_global_edge(item_scope, parent, edge); + self.graph + .add_external_edge(item_node, ScopeId::Item(parent.into()), edge); } self.module_stack.pop().unwrap(); return; } - let parent_scope = *self.scope_stack.last().unwrap(); + let parent_node = *self.scope_stack.last().unwrap(); let parent_to_child_edge = match item { Mod(inner) => { self.module_stack.pop().unwrap(); - self.add_local_edge( - item_scope, + self.graph.add_edge( + item_node, *self.module_stack.last().unwrap(), EdgeKind::super_(), ); - self.add_global_edge( - item_scope, - self.top_mod.ingot(self.db).root_mod(self.db), + self.graph.add_external_edge( + item_node, + ScopeId::Item(self.top_mod.ingot(self.db).root_mod(self.db).into()), EdgeKind::ingot(), ); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_()); + self.graph.add_edge(item_node, item_node, EdgeKind::self_()); inner .name(self.db) @@ -109,10 +103,14 @@ impl<'db> ScopeGraphBuilder<'db> { } Func(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); if let Some(params) = inner.params(self.db).to_opt() { - self.add_func_param_scope(item_scope, params); + self.add_func_param_scope(item_node, inner.into(), params); } inner .name(self.db) @@ -122,9 +120,13 @@ impl<'db> ScopeGraphBuilder<'db> { } Struct(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_field_scope(item_scope, inner.fields(self.db)); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.graph.add_lex_edge(item_node, parent_node); + self.add_field_scope(item_node, inner.into(), inner.fields(self.db)); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); inner .name(self.db) .to_opt() @@ -133,8 +135,8 @@ impl<'db> ScopeGraphBuilder<'db> { } Contract(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_field_scope(item_scope, inner.fields(self.db)); + self.graph.add_lex_edge(item_node, parent_node); + self.add_field_scope(item_node, inner.into(), inner.fields(self.db)); inner .name(self.db) .to_opt() @@ -143,10 +145,13 @@ impl<'db> ScopeGraphBuilder<'db> { } Enum(inner) => { - self.add_lex_edge(item_scope, parent_scope); - let vis = inner.vis(self.db); - self.add_variant_scope(item_scope, vis, inner.variants(self.db)); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.graph.add_lex_edge(item_node, parent_node); + self.add_variant_scope(item_node, inner.into(), inner.variants(self.db)); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); inner .name(self.db) .to_opt() @@ -155,8 +160,12 @@ impl<'db> ScopeGraphBuilder<'db> { } TypeAlias(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); inner .name(self.db) .to_opt() @@ -165,16 +174,26 @@ impl<'db> ScopeGraphBuilder<'db> { } Impl(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); EdgeKind::anon() } Trait(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); inner .name(self.db) .to_opt() @@ -183,14 +202,19 @@ impl<'db> ScopeGraphBuilder<'db> { } ImplTrait(inner) => { - self.add_lex_edge(item_scope, parent_scope); - self.add_generic_param_scope(item_scope, inner.generic_params(self.db)); - self.add_local_edge(item_scope, item_scope, EdgeKind::self_ty()); + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); EdgeKind::anon() } Const(inner) => { - self.add_lex_edge(item_scope, parent_scope); + self.graph.add_lex_edge(item_node, parent_node); inner .name(self.db) .to_opt() @@ -201,80 +225,87 @@ impl<'db> ScopeGraphBuilder<'db> { Use(use_) => { self.graph.unresolved_uses.insert(use_); - self.add_lex_edge(item_scope, parent_scope); + self.graph.add_lex_edge(item_node, parent_node); EdgeKind::anon() } Body(_) => { - self.add_lex_edge(item_scope, parent_scope); + self.graph.add_lex_edge(item_node, parent_node); EdgeKind::anon() } _ => unreachable!(), }; - self.add_local_edge(parent_scope, item_scope, parent_to_child_edge); + self.graph + .add_edge(parent_node, item_node, parent_to_child_edge); } - fn initialize_item_scope(&mut self, scope: LocalScopeId, item: ItemKind) { - self.graph.scopes[scope].kind = ScopeKind::Item(item); - self.graph.scopes[scope].vis = item.vis(self.db); - self.graph.item_map.insert(item, scope); + fn initialize_item_scope(&mut self, node: NodeId, item: ItemKind) { + self.graph.initialize_item_scope(self.db, node, item) } - fn add_field_scope(&mut self, parent_scope: LocalScopeId, fields: RecordFieldListId) { + fn add_field_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind, + fields: RecordFieldListId, + ) { + let parent_scope = ScopeId::Item(parent_item); + for (i, field) in fields.data(self.db).iter().enumerate() { - let scope = LocalScope::new( - ScopeKind::Field(i), - self.parent_module_id(), - Some(parent_scope), - field.vis, - ); - let field_scope = self.graph.scopes.push(scope); - self.add_lex_edge(field_scope, parent_scope); + let scope_id = ScopeId::Field(parent_item, i); + let scope_data = Scope::new(scope_id, Some(parent_scope), field.vis); + + let field_node = self.graph.push(scope_id, scope_data); + self.graph.add_lex_edge(field_node, parent_node); let kind = field .name .to_opt() .map(EdgeKind::field) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(parent_scope, field_scope, kind) + self.graph.add_edge(parent_node, field_node, kind) } } fn add_variant_scope( &mut self, - parent_scope: LocalScopeId, - parent_vis: Visibility, + parent_node: NodeId, + parent_item: ItemKind, variants: EnumVariantListId, ) { + let parent_scope = ScopeId::Item(parent_item); + let parent_vis = parent_item.vis(self.db); + for (i, field) in variants.data(self.db).iter().enumerate() { - let scope = LocalScope::new( - ScopeKind::Variant(i), - self.parent_module_id(), - Some(parent_scope), - parent_vis, - ); - let variant_scope = self.graph.scopes.push(scope); - self.add_lex_edge(variant_scope, parent_scope); + let scope_id = ScopeId::Variant(parent_item, i); + let scope_data = Scope::new(scope_id, Some(parent_scope), parent_vis); + + let variant_node = self.graph.push(scope_id, scope_data); + self.graph.add_lex_edge(variant_node, parent_node); let kind = field .name .to_opt() .map(EdgeKind::variant) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(parent_scope, variant_scope, kind) + self.graph.add_edge(parent_node, variant_node, kind) } } - fn add_func_param_scope(&mut self, parent_scope: LocalScopeId, params: FnParamListId) { + fn add_func_param_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind, + params: FnParamListId, + ) { + let parent_scope = ScopeId::Item(parent_item); + for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new( - ScopeKind::FnParam(i), - self.parent_module_id(), - Some(parent_scope), - Visibility::Private, - ); - let generic_param_scope = self.graph.scopes.push(scope); - self.add_lex_edge(generic_param_scope, parent_scope); + let scope_id = ScopeId::FnParam(parent_item, i); + let scope = Scope::new(scope_id, Some(parent_scope), Visibility::Private); + let func_param_node = self.graph.push(scope_id, scope); + + self.graph.add_lex_edge(func_param_node, parent_node); let kind = param .name .to_opt() @@ -283,64 +314,99 @@ impl<'db> ScopeGraphBuilder<'db> { FnParamName::Underscore => EdgeKind::anon(), }) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(parent_scope, generic_param_scope, kind) + self.graph.add_edge(parent_node, func_param_node, kind) } } - fn add_generic_param_scope(&mut self, parent_scope: LocalScopeId, params: GenericParamListId) { + fn add_generic_param_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind, + params: GenericParamListId, + ) { + let parent_scope = ScopeId::Item(parent_item); + for (i, param) in params.data(self.db).iter().enumerate() { - let scope = LocalScope::new( - ScopeKind::GenericParam(i), - self.parent_module_id(), - Some(parent_scope), - Visibility::Private, - ); - let generic_param_scope = self.graph.scopes.push(scope); - self.add_lex_edge(generic_param_scope, parent_scope); + let scope_id = ScopeId::GenericParam(parent_item, i); + let scope = Scope::new(scope_id, Some(parent_scope), Visibility::Private); + + let generic_param_node = self.graph.push(scope_id, scope); + self.graph.add_lex_edge(generic_param_node, parent_node); let kind = param .name() .to_opt() .map(EdgeKind::generic_param) .unwrap_or_else(EdgeKind::anon); - self.add_local_edge(parent_scope, generic_param_scope, kind) + self.graph.add_edge(parent_node, generic_param_node, kind) } } - fn dummy_scope(&self) -> LocalScope { - LocalScope::new( - ScopeKind::Item(self.top_mod.into()), - self.parent_module_id(), - None, - Visibility::Public, - ) + fn dummy_scope(&self) -> (ScopeId, Scope) { + let scope_id = ScopeId::Item(self.top_mod.into()); + (scope_id, Scope::new(scope_id, None, Visibility::Public)) } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct NodeId(u32); +entity_impl!(NodeId); + +#[derive(Default)] +struct IntermediateScopeGraph { + nodes: PrimaryMap, + edges: FxHashMap>, + unresolved_uses: FxHashSet, +} - fn parent_module_id(&self) -> Option { - if let Some(id) = self.module_stack.last() { - Some(ScopeId::new(self.top_mod, *id)) - } else { - self.top_mod - .parent(self.db) - .map(|top_mod| ScopeId::new(top_mod, LocalScopeId::root())) +impl IntermediateScopeGraph { + fn build(mut self, top_mod: TopLevelMod) -> ScopeGraph { + for (from_node, edges) in self.edges { + for (dest_node, kind) in edges { + let dest = self.nodes[dest_node].0; + let edge = ScopeEdge { dest, kind }; + self.nodes[from_node].1.edges.insert(edge); + } + } + + let scopes = self + .nodes + .into_iter() + .map(|(_, (id, data))| (id, data)) + .collect(); + + ScopeGraph { + top_mod, + scopes, + unresolved_uses: self.unresolved_uses, } } - fn add_lex_edge(&mut self, child: LocalScopeId, parent: LocalScopeId) { - self.add_local_edge(child, parent, EdgeKind::lex()); - self.graph.scopes[child].parent_scope = Some(parent); + fn push(&mut self, scope_id: ScopeId, scope_data: Scope) -> NodeId { + self.nodes.push((scope_id, scope_data)) + } + + fn initialize_item_scope(&mut self, db: &dyn HirDb, node: NodeId, item: ItemKind) { + let scope_id = ScopeId::Item(item); + + let scope_data = &mut self.nodes[node]; + scope_data.0 = scope_id; + scope_data.1.id = scope_id; + scope_data.1.vis = item.vis(db); + } + + fn add_lex_edge(&mut self, child: NodeId, parent: NodeId) { + self.edges + .entry(child) + .or_default() + .push((parent, EdgeKind::lex())); } - fn add_local_edge(&mut self, source: LocalScopeId, dest: LocalScopeId, kind: EdgeKind) { - self.graph.scopes[source].edges.push(ScopeEdge { - dest: ScopeId::new(self.top_mod, dest), - kind, - }); + fn add_edge(&mut self, from: NodeId, dest: NodeId, kind: EdgeKind) { + self.edges.entry(from).or_default().push((dest, kind)); } - fn add_global_edge(&mut self, source: LocalScopeId, dest: TopLevelMod, kind: EdgeKind) { - self.graph.scopes[source].edges.push(ScopeEdge { - dest: ScopeId::new(dest, LocalScopeId::root()), - kind, - }); + /// Add an edge to the graph that is not part of the current file. + fn add_external_edge(&mut self, from: NodeId, dest: ScopeId, kind: EdgeKind) { + self.nodes[from].1.edges.insert(ScopeEdge { dest, kind }); } } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index d830665118..5c2cf52511 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -303,7 +303,8 @@ mod tests { } "#; - let item_tree = db.parse_source(text); + let file = db.standalone_file(text); + let item_tree = db.parse_source(file); let top_mod = item_tree.top_mod; assert_eq!(text, db.text_at(top_mod, &top_mod.lazy_span())); } From 2983e4c22167262782152870c29c14e20ff473da Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 24 May 2023 20:09:11 +0200 Subject: [PATCH 170/678] Add `SpanDowncast` trait --- .../src/name_resolution/name_resolver.rs | 4 ++-- crates/hir/src/hir_def/mod.rs | 23 +------------------ crates/hir/src/span/mod.rs | 7 +++++- crates/hir/src/span/transition.rs | 6 +++++ 4 files changed, 15 insertions(+), 25 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 80adabe60a..1d2e29666a 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -103,8 +103,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// use foo::FOO /// /// mod foo { - /// const FOO: i32 = 1 - /// struct FOO {} + /// pub const FOO: i32 = 1 + /// pub struct FOO {} /// } /// ``` pub fn resolve_path( diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 48fb0b4c09..f93f0eca42 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -14,8 +14,6 @@ pub mod use_tree; pub(crate) mod module_tree; -use std::ops::Deref; - pub use attr::*; pub use body::*; use common::{input::IngotKind, InputIngot}; @@ -32,26 +30,7 @@ pub use use_tree::*; use num_bigint::BigUint; -use crate::{external_ingots_impl, span::LazySpan, HirDb}; - -pub struct Spanned -where - S: LazySpan, -{ - pub value: T, - pub span: S, -} - -impl Deref for Spanned -where - S: LazySpan, -{ - type Target = T; - - fn deref(&self) -> &Self::Target { - &self.value - } -} +use crate::{external_ingots_impl, HirDb}; #[salsa::tracked] pub struct IngotId { diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 152a123989..9de77213c0 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -31,7 +31,7 @@ mod transition; /// LazySpan` usage because it doesn't implement `Clone` and `Eq` which leads to /// a lot of difficulties in salsa integration #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct DynLazySpan(Option); +pub struct DynLazySpan(pub(super) Option); impl DynLazySpan { pub fn invalid_span() -> Self { Self(None) @@ -46,6 +46,11 @@ impl LazySpan for DynLazySpan { } } } +pub(crate) trait SpanDowncast { + fn downcast(dyn_span: DynLazySpan) -> Option + where + Self: Sized; +} /// The trait provides a way to extract [`Span`](common::diagnostics::Span) from /// types which don't have a span information directly, but can be resolved into diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 561715fb24..9b0a193853 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -345,6 +345,12 @@ macro_rules! define_lazy_span_node { Self(val.0.into()) } } + + impl crate::span::SpanDowncast for $name { + fn downcast(val: crate::span::DynLazySpan) -> Option { + val.0.map(|inner| Self(inner)) + } + } }; } From e52c51b715fcc9d5d1e73589b3c4e5f192ef4d62 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 25 May 2023 00:47:57 +0200 Subject: [PATCH 171/678] Impl `IntoIterator` for `DynaLazySpan` --- crates/hir/src/span/mod.rs | 28 ++++++++++++++++++++++++++++ crates/hir/src/span/transition.rs | 4 ++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 9de77213c0..327d5371bc 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -46,6 +46,34 @@ impl LazySpan for DynLazySpan { } } } +impl FromIterator for DynLazySpan { + fn from_iter(iter: T) -> Self + where + T: IntoIterator, + { + let mut iter = iter.into_iter(); + + let Some(first) = iter.next().map(|first| first.0).flatten() else { + return Self::invalid_span(); + }; + let chain_root = first.root; + let mut chain = first.chain; + + for item in iter { + let Some(mut item) = item.0 else { + return Self::invalid_span(); + }; + + chain.append(&mut item.chain); + } + + Self(Some(SpanTransitionChain { + root: chain_root, + chain, + })) + } +} + pub(crate) trait SpanDowncast { fn downcast(dyn_span: DynLazySpan) -> Option where diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 9b0a193853..f46e370bc9 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -43,8 +43,8 @@ pub(crate) enum LazyArg { #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub(crate) struct SpanTransitionChain { - root: ChainRoot, - chain: Vec, + pub(super) root: ChainRoot, + pub(super) chain: Vec, } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, derive_more::From)] From 46be683bb3c11b277e72ec9e19784f4f500fcbcd Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 25 May 2023 15:09:06 +0200 Subject: [PATCH 172/678] Implement `SpanCtxt` for `Visitor` implementation --- crates/hir/src/hir_def/body.rs | 8 +++- crates/hir/src/hir_def/item.rs | 14 ++++-- crates/hir/src/lib.rs | 1 + crates/hir/src/span/item.rs | 14 ++++-- crates/hir/src/span/mod.rs | 27 ----------- crates/hir/src/span/transition.rs | 50 +++++++++++++------- crates/hir/src/visitor.rs | 76 +++++++++++++++++++++++++++++++ 7 files changed, 138 insertions(+), 52 deletions(-) create mode 100644 crates/hir/src/visitor.rs diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index be3ff48e00..178a32a490 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -9,7 +9,7 @@ use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap}; use parser::ast::{self, prelude::*}; use rustc_hash::FxHashMap; -use crate::span::HirOrigin; +use crate::span::{item::LazyBodySpan, HirOrigin}; use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, TrackedItemId}; @@ -32,6 +32,12 @@ pub struct Body { pub(crate) origin: HirOrigin, } +impl Body { + pub fn lazy_span(self) -> LazyBodySpan { + LazyBodySpan::new(self) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TrackedBodyId { ItemBody(Box), diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index dcb0e74164..9eb7f20d3e 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -12,8 +12,8 @@ use crate::{ span::{ item::{ LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, LazyImplSpan, - LazyImplTraitSpan, LazyModSpan, LazyStructSpan, LazyTopLevelModSpan, LazyTraitSpan, - LazyTypeAliasSpan, LazyUseSpan, + LazyImplTraitSpan, LazyItemSpan, LazyModSpan, LazyStructSpan, LazyTopModSpan, + LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, }, params::LazyGenericParamListSpan, DynLazySpan, HirOrigin, @@ -57,6 +57,12 @@ pub enum ItemKind { Body(Body), } +impl ItemKind { + pub fn lazy_span(self) -> LazyItemSpan { + LazyItemSpan::new(self) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] pub enum GenericParamOwner { Func(Func), @@ -209,8 +215,8 @@ pub struct TopLevelMod { pub(crate) file: InputFile, } impl TopLevelMod { - pub fn lazy_span(self) -> LazyTopLevelModSpan { - LazyTopLevelModSpan::new(self) + pub fn lazy_span(self) -> LazyTopModSpan { + LazyTopModSpan::new(self) } pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 65da08cba9..faff330af9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -11,6 +11,7 @@ pub mod diagnostics; pub mod hir_def; pub mod lower; pub mod span; +pub mod visitor; #[salsa::jar(db = HirDb)] pub struct Jar( diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 5c2cf52511..371523fda3 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -2,8 +2,8 @@ use parser::ast::{self, prelude::AstNode}; use crate::{ hir_def::{ - Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, - TypeAlias, Use, + Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, }, span::{ transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, @@ -16,11 +16,19 @@ use super::{ attr::LazyAttrListSpan, define_lazy_span_node, params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, + transition::SpanTransitionChain, types::{LazyPathTypeSpan, LazyTypeSpan}, use_tree::LazyUseAliasSpan, }; -define_lazy_span_node!(LazyTopLevelModSpan, ast::Root, new(TopLevelMod),); +define_lazy_span_node!(LazyTopModSpan, ast::Root, new(TopLevelMod),); + +define_lazy_span_node!(LazyItemSpan); +impl LazyItemSpan { + pub fn new(item: ItemKind) -> Self { + Self(SpanTransitionChain::new(item)) + } +} define_lazy_span_node!( LazyModSpan, diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 327d5371bc..662b3f8dc2 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -46,33 +46,6 @@ impl LazySpan for DynLazySpan { } } } -impl FromIterator for DynLazySpan { - fn from_iter(iter: T) -> Self - where - T: IntoIterator, - { - let mut iter = iter.into_iter(); - - let Some(first) = iter.next().map(|first| first.0).flatten() else { - return Self::invalid_span(); - }; - let chain_root = first.root; - let mut chain = first.chain; - - for item in iter { - let Some(mut item) = item.0 else { - return Self::invalid_span(); - }; - - chain.append(&mut item.chain); - } - - Self(Some(SpanTransitionChain { - root: chain_root, - chain, - })) - } -} pub(crate) trait SpanDowncast { fn downcast(dyn_span: DynLazySpan) -> Option diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index f46e370bc9..963ae5439c 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -8,8 +8,8 @@ use parser::{ use crate::{ hir_def::{ - Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, - TypeAlias, Use, + Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, }, lower::top_mod_ast, SpannedHirDb, @@ -47,8 +47,24 @@ pub(crate) struct SpanTransitionChain { pub(super) chain: Vec, } +impl SpanTransitionChain { + pub(super) fn new(root: impl Into) -> Self { + Self { + root: root.into(), + chain: Vec::new(), + } + } + + pub(super) fn push_transition(&self, transition: LazyTransitionFn) -> Self { + let mut new_state = self.clone(); + new_state.chain.push(transition); + new_state + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, derive_more::From)] pub(crate) enum ChainRoot { + ItemKind(ItemKind), TopMod(TopLevelMod), Mod(Mod), Func(Func), @@ -144,6 +160,21 @@ pub(crate) enum ResolvedOriginKind { impl ChainInitiator for ChainRoot { fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { match self { + Self::ItemKind(kind) => match kind { + ItemKind::TopMod(top_mod) => top_mod.init(db), + ItemKind::Mod(mod_) => mod_.init(db), + ItemKind::Func(func) => func.init(db), + ItemKind::Struct(struct_) => struct_.init(db), + ItemKind::Contract(contract) => contract.init(db), + ItemKind::Enum(enum_) => enum_.init(db), + ItemKind::TypeAlias(type_alias) => type_alias.init(db), + ItemKind::Impl(impl_) => impl_.init(db), + ItemKind::Trait(trait_) => trait_.init(db), + ItemKind::ImplTrait(impl_trait) => impl_trait.init(db), + ItemKind::Const(const_) => const_.init(db), + ItemKind::Use(use_) => use_.init(db), + ItemKind::Body(body) => body.init(db), + }, Self::TopMod(top_mod) => top_mod.init(db), Self::Mod(mod_) => mod_.init(db), Self::Func(func) => func.init(db), @@ -164,21 +195,6 @@ impl ChainInitiator for ChainRoot { } } -impl SpanTransitionChain { - pub(super) fn new(root: impl Into) -> Self { - Self { - root: root.into(), - chain: Vec::new(), - } - } - - pub(super) fn push_transition(&self, transition: LazyTransitionFn) -> Self { - let mut new_state = self.clone(); - new_state.chain.push(transition); - new_state - } -} - impl LazySpan for SpanTransitionChain { fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { let mut resolved = self.root.init(db); diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs new file mode 100644 index 0000000000..7f4001b5ee --- /dev/null +++ b/crates/hir/src/visitor.rs @@ -0,0 +1,76 @@ +#![allow(unused)] +use std::{marker::PhantomData, mem}; + +use crate::{ + span::{ + item::{ + LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, + LazyImplSpan, LazyImplTraitSpan, LazyItemSpan, LazyModSpan, LazyStructSpan, + LazyTopModSpan, LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, + }, + DynLazySpan, LazySpan, LazySpanAtom, SpanDowncast, + }, + HirDb, +}; + +pub struct SpanCtxt +where + T: LazySpan, +{ + span: DynLazySpan, + _t: PhantomData, +} + +impl SpanCtxt +where + T: LazySpan, +{ + fn cast(self) -> SpanCtxt { + SpanCtxt { + span: self.span, + _t: PhantomData, + } + } + + fn pop_span(&mut self) { + self.span.0.pop_transition(); + } + + pub fn current_span(&self) -> Option + where + T: SpanDowncast, + { + let dyn_span: DynLazySpan = self.span.clone(); + T::downcast(dyn_span) + } +} + +macro_rules! define_ctxt_ctor { + ($(($hir_ty:ty, $span_ty:ty, $ctor_name:ident),)*) => { + $(impl SpanCtxt<$span_ty> { + pub fn $ctor_name(item: $hir_ty) -> Self { + Self { + span: item.lazy_span().into(), + _t: PhantomData, + } + } + })* + }; +} + +define_ctxt_ctor! { + (ItemKind, LazyItemSpan, with_item), + (TopLevelMod, LazyTopModSpan, with_top_mod), + (Mod, LazyModSpan, with_mod), + (Func, LazyFuncSpan, with_func), + (Struct, LazyStructSpan, with_struct), + (Contract, LazyContractSpan, with_contract), + (Enum, LazyEnumSpan, with_enum), + (TypeAlias, LazyTypeAliasSpan, with_type_alias), + (Impl, LazyImplSpan, with_impl), + (Trait, LazyTraitSpan, with_trait), + (ImplTrait, LazyImplTraitSpan, with_impl_trait), + (Const, LazyConstSpan, with_const), + (Use, LazyUseSpan, with_use), + (Body, LazyBodySpan, with_body), +} From 3b918815bc8b3dfc858bc691652fb1f61c17f5db Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 25 May 2023 16:25:31 +0200 Subject: [PATCH 173/678] Add efficient implementation to lazy span methods which doesn't clone `self` --- Cargo.lock | 7 ++ crates/hir/Cargo.toml | 1 + crates/hir/src/span/attr.rs | 7 +- crates/hir/src/span/item.rs | 14 +++- crates/hir/src/span/mod.rs | 2 +- crates/hir/src/span/transition.rs | 115 +++++++++++++++++++----------- crates/hir/src/span/use_tree.rs | 14 +++- crates/hir/src/visitor.rs | 6 +- 8 files changed, 117 insertions(+), 49 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index feaedd847e..b8e4baa9c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -914,6 +914,7 @@ dependencies = [ "fe-parser2", "num-bigint", "num-traits", + "paste", "rustc-hash", "salsa-2022", "smallvec", @@ -1660,6 +1661,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "paste" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" + [[package]] name = "petgraph" version = "0.6.3" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 4bed6f466f..e714c6129b 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -18,6 +18,7 @@ num-traits = "0.2.15" camino = "1.1.4" rustc-hash = "1.1.0" smallvec = "1.10.0" +paste = "1.0" common = { path = "../common2", package = "fe-common2" } parser = { path = "../parser2", package = "fe-parser2" } diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index 6e78feeb4b..f0edb8adb2 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -13,6 +13,10 @@ define_lazy_span_node!( ); impl LazyAttrListSpan { pub fn normal_attr(&self, idx: usize) -> LazyNormalAttrSpan { + self.clone().normal_attr_moved(idx) + } + + pub fn normal_attr_moved(mut self, idx: usize) -> LazyNormalAttrSpan { fn f(origin: ResolvedOrigin, arg: crate::span::transition::LazyArg) -> ResolvedOrigin { let idx = match arg { crate::span::transition::LazyArg::Idx(idx) => idx, @@ -30,7 +34,8 @@ impl LazyAttrListSpan { arg: crate::span::transition::LazyArg::Idx(idx), }; - LazyNormalAttrSpan(self.0.push_transition(lazy_transition)) + self.0.push(lazy_transition); + LazyNormalAttrSpan(self.0) } } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 371523fda3..dab7346194 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -187,6 +187,10 @@ define_lazy_span_node!( impl LazyUseSpan { pub fn path(&self) -> LazyUsePathSpan { + self.clone().path_moved() + } + + pub fn path_moved(mut self) -> LazyUsePathSpan { fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { origin .map(|node| { @@ -209,10 +213,15 @@ impl LazyUseSpan { arg: LazyArg::None, }; - LazyUsePathSpan(self.0.push_transition(lazy_transition)) + self.0.push(lazy_transition); + LazyUsePathSpan(self.0) } pub fn alias(&self) -> LazyUseAliasSpan { + self.clone().alias_moved() + } + + pub fn alias_moved(mut self) -> LazyUseAliasSpan { fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { origin .map(|node| { @@ -235,7 +244,8 @@ impl LazyUseSpan { arg: LazyArg::None, }; - LazyUseAliasSpan(self.0.push_transition(lazy_transition)) + self.0.push(lazy_transition); + LazyUseAliasSpan(self.0) } } diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 662b3f8dc2..0f3e121e48 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -47,7 +47,7 @@ impl LazySpan for DynLazySpan { } } -pub(crate) trait SpanDowncast { +pub trait SpanDowncast { fn downcast(dyn_span: DynLazySpan) -> Option where Self: Sized; diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 963ae5439c..09496166f1 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -55,10 +55,12 @@ impl SpanTransitionChain { } } - pub(super) fn push_transition(&self, transition: LazyTransitionFn) -> Self { - let mut new_state = self.clone(); - new_state.chain.push(transition); - new_state + pub(super) fn push(&mut self, transition: LazyTransitionFn) { + self.chain.push(transition); + } + + pub(crate) fn pop_transition(&mut self) { + self.chain.pop(); } } @@ -289,62 +291,91 @@ macro_rules! define_lazy_span_node { $($( pub fn $name_token(&self) -> crate::span::LazySpanAtom { - use parser::ast::prelude::*; - fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { - origin.map(|node| <$sk_node as AstNode>::cast(node) - .and_then(|n| n.$getter_token()) - .map(|n| n.into())) + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_token _moved>]() } + } + + paste::paste! { + pub fn [<$name_token _moved>](mut self) -> crate::span::LazySpanAtom { + use parser::ast::prelude::*; + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_token()) + .map(|n| n.into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, + }; - let lazy_transition = crate::span::transition::LazyTransitionFn { - f, - arg: crate::span::transition::LazyArg::None, - }; - crate::span::LazySpanAtom( - self.0.push_transition(lazy_transition) - ) + self.0.push(lazy_transition); + crate::span::LazySpanAtom(self.0) + } } )*)? $($( pub fn $name_node(&self) -> $result { - use parser::ast::prelude::*; - - fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { - origin.map(|node| <$sk_node as AstNode>::cast(node) - .and_then(|n| n.$getter_node()) - .map(|n| n.syntax().clone().into())) + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_node _moved>]() } + } + + paste::paste! { + pub fn [<$name_node _moved>](mut self) -> $result { + use parser::ast::prelude::*; - let lazy_transition = crate::span::transition::LazyTransitionFn { - f, - arg: crate::span::transition::LazyArg::None, - }; - $result(self.0.push_transition(lazy_transition)) + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_node()) + .map(|n| n.syntax().clone().into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, + }; + self.0.push(lazy_transition); + $result(self.0) + } } )*)? $($( pub fn $name_iter(&self, idx: usize) -> $result_iter { - use parser::ast::prelude::*; - fn f(origin: crate::span::transition::ResolvedOrigin, arg: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { - let idx = match arg { - crate::span::transition::LazyArg::Idx(idx) => idx, - _ => unreachable!(), - }; - - origin.map(|node| <$sk_node as AstNode>::cast(node) - .and_then(|f| f.into_iter().nth(idx)) - .map(|n| n.syntax().clone().into())) + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_iter _moved>](idx) } + } - let lazy_transition = crate::span::transition::LazyTransitionFn { - f, - arg: crate::span::transition::LazyArg::Idx(idx), - }; + paste::paste! { + pub fn [<$name_iter _moved>](mut self, idx: usize) -> $result_iter { + use parser::ast::prelude::*; + fn f(origin: crate::span::transition::ResolvedOrigin, arg: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + let idx = match arg { + crate::span::transition::LazyArg::Idx(idx) => idx, + _ => unreachable!(), + }; + + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::Idx(idx), + }; - $result_iter(self.0.push_transition(lazy_transition)) + self.0.push(lazy_transition); + $result_iter(self.0) + } } )*)? })?)? diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index a4ba2975ec..93861d4ef8 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -10,6 +10,10 @@ use super::{define_lazy_span_node, LazySpanAtom}; define_lazy_span_node!(LazyUsePathSpan); impl LazyUsePathSpan { pub fn segment(&self, idx: usize) -> LazyUsePathSegmentSpan { + self.clone().segment_moved(idx) + } + + pub fn segment_moved(mut self, idx: usize) -> LazyUsePathSegmentSpan { fn f(origin: ResolvedOrigin, arg: LazyArg) -> ResolvedOrigin { let LazyArg::Idx(idx) = arg else { unreachable!() @@ -36,7 +40,8 @@ impl LazyUsePathSpan { arg: LazyArg::Idx(idx), }; - LazyUsePathSegmentSpan(self.0.push_transition(lazy_transition)) + self.0.push(lazy_transition); + LazyUsePathSegmentSpan(self.0) } } @@ -46,6 +51,10 @@ define_lazy_span_node!(LazyUseAliasSpan, ast::UseAlias,); impl LazyUseAliasSpan { pub fn name(&self) -> LazySpanAtom { + self.clone().name_moved() + } + + pub fn name_moved(mut self) -> LazySpanAtom { fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { origin .map(|node| { @@ -66,7 +75,8 @@ impl LazyUseAliasSpan { f, arg: LazyArg::None, }; + self.0.push(lazy_transition); - LazySpanAtom(self.0.push_transition(lazy_transition)) + LazySpanAtom(self.0) } } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 7f4001b5ee..7328a42bfc 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -2,6 +2,10 @@ use std::{marker::PhantomData, mem}; use crate::{ + hir_def::{ + Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, + }, span::{ item::{ LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, @@ -33,7 +37,7 @@ where } fn pop_span(&mut self) { - self.span.0.pop_transition(); + self.span.0.as_mut().map(|chain| chain.pop_transition()); } pub fn current_span(&self) -> Option From 3f98eb23f4963c01d63c89eef2f90163997b8a7a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 26 May 2023 18:37:01 +0200 Subject: [PATCH 174/678] Rename `Fn` to `Func` in the compiler internal representation to avoid name conflict with `Fn` trait in Rust --- Cargo.lock | 15 ++++++ .../src/name_resolution/name_resolver.rs | 2 +- crates/hir/src/hir_def/attr.rs | 2 +- crates/hir/src/hir_def/item.rs | 8 ++-- crates/hir/src/hir_def/params.rs | 24 +++++----- crates/hir/src/hir_def/scope_graph.rs | 8 ++-- crates/hir/src/lib.rs | 2 +- crates/hir/src/lower/item.rs | 8 ++-- crates/hir/src/lower/params.rs | 36 ++++++++------- crates/hir/src/lower/scope_builder.rs | 10 ++-- crates/hir/src/span/item.rs | 6 +-- crates/hir/src/span/mod.rs | 4 +- crates/hir/src/span/params.rs | 10 ++-- crates/parser2/src/ast/item.rs | 36 +++++++-------- crates/parser2/src/ast/param.rs | 46 +++++++++---------- crates/parser2/src/parser/func.rs | 26 +++++------ crates/parser2/src/parser/item.rs | 18 ++++---- crates/parser2/src/parser/param.rs | 6 +-- crates/parser2/src/syntax_kind.rs | 4 +- .../error_recovery/items/extern_.snap | 8 ++-- .../test_files/error_recovery/items/func.snap | 8 ++-- .../test_files/syntax_node/items/extern.snap | 12 ++--- .../test_files/syntax_node/items/func.snap | 20 ++++---- .../test_files/syntax_node/items/impl.snap | 8 ++-- .../syntax_node/items/impl_trait.snap | 12 ++--- .../test_files/syntax_node/items/mod.snap | 4 +- .../test_files/syntax_node/items/trait.snap | 20 ++++---- 27 files changed, 191 insertions(+), 172 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b8e4baa9c4..6d5ffe4917 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -902,6 +902,21 @@ dependencies = [ "vfs", ] +[[package]] +name = "fe-driver2" +version = "0.20.0-alpha" +dependencies = [ + "derive_more", + "either", + "fe-common2", + "fe-hir", + "fe-hir-analysis", + "itertools", + "rustc-hash", + "salsa-2022", + "smallvec", +] + [[package]] name = "fe-hir" version = "0.22.0" diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 1d2e29666a..76094b384e 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -960,7 +960,7 @@ pub enum NameDomain { impl NameDomain { fn from_scope(scope: ScopeId) -> Self { match scope { - ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FnParam(..) => { + ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FuncParam(..) => { Self::Value } ScopeId::Item(_) | ScopeId::GenericParam(..) => Self::Item, diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs index ff71ec9752..2e5275561f 100644 --- a/crates/hir/src/hir_def/attr.rs +++ b/crates/hir/src/hir_def/attr.rs @@ -3,7 +3,7 @@ use super::{IdentId, Partial, StringId}; #[salsa::interned] pub struct AttrListId { #[return_ref] - attrs: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 9eb7f20d3e..4bb4dea74e 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -23,7 +23,7 @@ use crate::{ use super::{ scope_graph::{ScopeGraph, ScopeId}, - AttrListId, Body, FnParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, + AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, UseAlias, WhereClauseId, }; @@ -290,7 +290,7 @@ pub struct Func { pub attributes: AttrListId, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub params: Partial, + pub params: Partial, pub ret_ty: Option, pub modifier: ItemModifier, pub body: Option, @@ -298,7 +298,7 @@ pub struct Func { pub top_mod: TopLevelMod, #[return_ref] - pub(crate) origin: HirOrigin, + pub(crate) origin: HirOrigin, } impl Func { pub fn lazy_span(self) -> LazyFuncSpan { @@ -617,7 +617,7 @@ impl Visibility { pub enum TrackedItemId { TopLevelMod(IdentId), Mod(Partial), - Fn(Partial), + Func(Partial), Struct(Partial), Contract(Partial), Enum(Partial), diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 69cc7a49b0..6d5254f6ee 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -15,15 +15,15 @@ pub struct GenericParamListId { } #[salsa::interned] -pub struct FnParamListId { +pub struct FuncParamListId { #[return_ref] - pub data: Vec, + pub data: Vec, } #[salsa::interned] pub struct WhereClauseId { #[return_ref] - pub predicates: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] @@ -69,17 +69,17 @@ pub struct ConstGenericArg { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct FnParam { +pub struct FuncParam { pub is_mut: bool, - pub label: Option, - pub name: Partial, + pub label: Option, + pub name: Partial, pub ty: Partial, } -impl FnParam { +impl FuncParam { pub fn name(&self) -> Option { match self.name.to_opt()? { - FnParamName::Ident(name) => Some(name), + FuncParamName::Ident(name) => Some(name), _ => None, } } @@ -92,22 +92,22 @@ pub struct WherePredicate { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum FnParamLabel { +pub enum FuncParamLabel { Ident(IdentId), Underscore, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum FnParamName { +pub enum FuncParamName { /// `self` parameter. Ident(IdentId), Underscore, } -impl FnParamName { +impl FuncParamName { pub fn as_name(&self) -> Option { match self { - FnParamName::Ident(name) => Some(*name), + FuncParamName::Ident(name) => Some(*name), _ => None, } } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 3bd35de596..b44eb23bf7 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -47,7 +47,7 @@ impl ScopeGraph { pub enum ScopeId { Item(ItemKind), GenericParam(ItemKind, usize), - FnParam(ItemKind, usize), + FuncParam(ItemKind, usize), Field(ItemKind, usize), Variant(ItemKind, usize), } @@ -56,7 +56,7 @@ impl ScopeId { match self { ScopeId::Item(item) => item.top_mod(db), ScopeId::GenericParam(item, _) => item.top_mod(db), - ScopeId::FnParam(item, _) => item.top_mod(db), + ScopeId::FuncParam(item, _) => item.top_mod(db), ScopeId::Field(item, _) => item.top_mod(db), ScopeId::Variant(item, _) => item.top_mod(db), } @@ -182,7 +182,7 @@ impl ScopeId { _ => unreachable!(), }, - ScopeId::FnParam(parent, idx) => { + ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); func.params(db).to_opt()?.data(db)[idx].name() } @@ -222,7 +222,7 @@ impl ScopeId { _ => unreachable!(), }, - ScopeId::FnParam(parent, idx) => { + ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); Some(func.lazy_span().params().param(idx).name().into()) } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index faff330af9..ed29f7e101 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -35,7 +35,7 @@ pub struct Jar( hir_def::IntegerId, hir_def::StringId, hir_def::PathId, - hir_def::FnParamListId, + hir_def::FuncParamListId, hir_def::AttrListId, hir_def::WhereClauseId, hir_def::GenericArgListId, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index e228ed82bf..0330872bc9 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -2,7 +2,7 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{ - item::*, AttrListId, Body, FnParamListId, GenericParamListId, IdentId, TraitRef, TypeId, + item::*, AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, TraitRef, TypeId, WhereClauseId, }, span::HirOrigin, @@ -87,20 +87,20 @@ impl Func { pub(super) fn lower_ast( ctxt: &mut FileLowerCtxt<'_>, parent_id: TrackedItemId, - ast: ast::Fn, + ast: ast::Func, is_extern: bool, ) -> Self { ctxt.enter_scope(false); let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Fn(name).join(parent_id); + let id = TrackedItemId::Func(name).join(parent_id); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let params = ast .params() - .map(|params| FnParamListId::lower_ast(ctxt, params)) + .map(|params| FuncParamListId::lower_ast(ctxt, params)) .into(); let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); let modifier = ItemModifier::lower_ast(ast.modifier()); diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index b6ff20b3b8..e99ace950a 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -40,11 +40,11 @@ impl GenericParamListId { } } -impl FnParamListId { - pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamList) -> Self { +impl FuncParamListId { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FuncParamList) -> Self { let params = ast .into_iter() - .map(|param| FnParam::lower_ast(ctxt, param)) + .map(|param| FuncParam::lower_ast(ctxt, param)) .collect(); Self::new(ctxt.db(), params) } @@ -137,13 +137,13 @@ impl GenericParam { } } -impl FnParam { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParam) -> Self { +impl FuncParam { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FuncParam) -> Self { let is_mut = ast.mut_token().is_some(); - let label = ast.label().map(|ast| FnParamLabel::lower_ast(ctxt, ast)); + let label = ast.label().map(|ast| FuncParamLabel::lower_ast(ctxt, ast)); let name = ast .name() - .map(|ast| FnParamName::lower_ast(ctxt, ast)) + .map(|ast| FuncParamName::lower_ast(ctxt, ast)) .into(); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); @@ -182,21 +182,25 @@ impl TypeBound { } } -impl FnParamName { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamName) -> Self { +impl FuncParamName { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FuncParamName) -> Self { match ast { - ast::FnParamName::Ident(name) => FnParamName::Ident(IdentId::lower_token(ctxt, name)), - ast::FnParamName::SelfParam(_) => FnParamName::Ident(kw::SELF), - ast::FnParamName::Underscore(_) => FnParamName::Underscore, + ast::FuncParamName::Ident(name) => { + FuncParamName::Ident(IdentId::lower_token(ctxt, name)) + } + ast::FuncParamName::SelfParam(_) => FuncParamName::Ident(kw::SELF), + ast::FuncParamName::Underscore(_) => FuncParamName::Underscore, } } } -impl FnParamLabel { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FnParamLabel) -> Self { +impl FuncParamLabel { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FuncParamLabel) -> Self { match ast { - ast::FnParamLabel::Ident(name) => FnParamLabel::Ident(IdentId::lower_token(ctxt, name)), - ast::FnParamLabel::Underscore(_) => FnParamLabel::Underscore, + ast::FuncParamLabel::Ident(name) => { + FuncParamLabel::Ident(IdentId::lower_token(ctxt, name)) + } + ast::FuncParamLabel::Underscore(_) => FuncParamLabel::Underscore, } } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index f11c953d27..09f4200fd1 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -4,7 +4,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ hir_def::{ scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, - EnumVariantListId, FnParamListId, FnParamName, GenericParamListId, ItemKind, + EnumVariantListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, RecordFieldListId, TopLevelMod, Use, Visibility, }, HirDb, @@ -296,12 +296,12 @@ impl<'db> ScopeGraphBuilder<'db> { &mut self, parent_node: NodeId, parent_item: ItemKind, - params: FnParamListId, + params: FuncParamListId, ) { let parent_scope = ScopeId::Item(parent_item); for (i, param) in params.data(self.db).iter().enumerate() { - let scope_id = ScopeId::FnParam(parent_item, i); + let scope_id = ScopeId::FuncParam(parent_item, i); let scope = Scope::new(scope_id, Some(parent_scope), Visibility::Private); let func_param_node = self.graph.push(scope_id, scope); @@ -310,8 +310,8 @@ impl<'db> ScopeGraphBuilder<'db> { .name .to_opt() .map(|name| match name { - FnParamName::Ident(ident) => EdgeKind::value(ident), - FnParamName::Underscore => EdgeKind::anon(), + FuncParamName::Ident(ident) => EdgeKind::value(ident), + FuncParamName::Underscore => EdgeKind::anon(), }) .unwrap_or_else(EdgeKind::anon); self.graph.add_edge(parent_node, func_param_node, kind) diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index dab7346194..407caa90fb 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -15,7 +15,7 @@ use crate::{ use super::{ attr::LazyAttrListSpan, define_lazy_span_node, - params::{LazyFnParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, + params::{LazyFuncParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, transition::SpanTransitionChain, types::{LazyPathTypeSpan, LazyTypeSpan}, use_tree::LazyUseAliasSpan, @@ -47,7 +47,7 @@ define_lazy_span_node!( define_lazy_span_node!( LazyFuncSpan, - ast::Fn, + ast::Func, new(Func), @token { (name, name), @@ -57,7 +57,7 @@ define_lazy_span_node!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (params, params, LazyFnParamListSpan), + (params, params, LazyFuncParamListSpan), (ret_ty, ret_ty, LazyTypeSpan), } ); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 0f3e121e48..c79cd9a135 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -33,7 +33,7 @@ mod transition; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct DynLazySpan(pub(super) Option); impl DynLazySpan { - pub fn invalid_span() -> Self { + pub fn invalid() -> Self { Self(None) } } @@ -68,7 +68,7 @@ pub fn mod_ast(db: &dyn SpannedHirDb, item: Mod) -> &HirOrigin { item.origin(db.as_hir_db()) } -pub fn func_ast(db: &dyn SpannedHirDb, item: Func) -> &HirOrigin { +pub fn func_ast(db: &dyn SpannedHirDb, item: Func) -> &HirOrigin { item.origin(db.as_hir_db()) } diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 560328435c..5c99a35166 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -5,10 +5,10 @@ use crate::span::{path::LazyPathSpan, LazySpanAtom}; use super::{define_lazy_span_node, types::LazyTypeSpan}; define_lazy_span_node!( - LazyFnParamListSpan, - ast::FnParamList, + LazyFuncParamListSpan, + ast::FuncParamList, @idx { - (param, LazyFnParamSpan), + (param, LazyFuncParamSpan), } ); @@ -40,8 +40,8 @@ define_lazy_span_node!( ); define_lazy_span_node!( - LazyFnParamSpan, - ast::FnParam, + LazyFuncParamSpan, + ast::FuncParam, @token { (mut_kw, mut_token), } diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index ea521573c7..283633a385 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -26,7 +26,7 @@ ast_node! { /// Use `[Item::kind]` to get the specific type of item. pub struct Item, SK::Mod - | SK::Fn + | SK::Func | SK::Struct | SK::Contract | SK::Enum @@ -42,7 +42,7 @@ impl Item { pub fn kind(&self) -> ItemKind { match self.syntax().kind() { SK::Mod => ItemKind::Mod(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Fn => ItemKind::Fn(AstNode::cast(self.syntax().clone()).unwrap()), + SK::Func => ItemKind::Fn(AstNode::cast(self.syntax().clone()).unwrap()), SK::Struct => ItemKind::Struct(AstNode::cast(self.syntax().clone()).unwrap()), SK::Contract => ItemKind::Contract(AstNode::cast(self.syntax().clone()).unwrap()), SK::Enum => ItemKind::Enum(AstNode::cast(self.syntax().clone()).unwrap()), @@ -78,21 +78,21 @@ impl Mod { ast_node! { /// `pub fn foo(_ x: T, from u: U) -> T where T: Trait2 { ... }` - pub struct Fn, - SK::Fn, -} -impl super::GenericParamsOwner for Fn {} -impl super::WhereClauseOwner for Fn {} -impl super::AttrListOwner for Fn {} -impl super::ItemModifierOwner for Fn {} -impl Fn { + pub struct Func, + SK::Func, +} +impl super::GenericParamsOwner for Func {} +impl super::WhereClauseOwner for Func {} +impl super::AttrListOwner for Func {} +impl super::ItemModifierOwner for Func {} +impl Func { /// Returns the name of the function. pub fn name(&self) -> Option { support::token(self.syntax(), SK::Ident) } /// Returns the function's parameter list. - pub fn params(&self) -> Option { + pub fn params(&self) -> Option { support::child(self.syntax()) } @@ -379,25 +379,25 @@ impl EnumVariantDef { ast_node! { pub struct TraitItemList, SK::TraitItemList, - IntoIterator, + IntoIterator, } ast_node! { pub struct ImplItemList, SK::ImplItemList, - IntoIterator, + IntoIterator, } ast_node! { pub struct ImplTraitItemList, SK::ImplTraitItemList, - IntoIterator, + IntoIterator, } ast_node! { pub struct ExternItemList, SK::ExternItemList, - IntoIterator, + IntoIterator, } ast_node! { @@ -425,7 +425,7 @@ pub trait ItemModifierOwner: AstNode { #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum ItemKind { Mod(Mod), - Fn(Fn), + Fn(Func), Struct(Struct), Contract(Contract), Enum(Enum), @@ -480,7 +480,7 @@ mod tests { match i { 0 => { assert!(matches!(item.kind(), ItemKind::Fn(_))); - let func: Fn = item.kind().try_into().unwrap(); + let func: Func = item.kind().try_into().unwrap(); assert_eq!(func.name().unwrap().text(), "bar"); } 1 => { @@ -504,7 +504,7 @@ mod tests { #evm pub unsafe fn foo(_ x: T, from u: U) -> (T, U) where T: Trait2 { return } "#; - let func: Fn = parse_item(source); + let func: Func = parse_item(source); assert_eq!(func.name().unwrap().text(), "foo"); assert_eq!(func.attr_list().unwrap().iter().count(), 2); diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 8b9c6a08c5..0ae5021eb7 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -6,19 +6,19 @@ use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; ast_node! { /// A list of parameters. /// `(self, a: u256, b: u256)` - pub struct FnParamList, - SK::FnParamList, - IntoIterator, + pub struct FuncParamList, + SK::FuncParamList, + IntoIterator, } ast_node! { /// A single parameter. /// `self` /// `label a: u256` - pub struct FnParam, + pub struct FuncParam, SK::FnParam, } -impl FnParam { +impl FuncParam { /// Returns the `mut` keyword if the parameter is mutable. pub fn mut_token(&self) -> Option { support::token(self.syntax(), SK::MutKw) @@ -26,21 +26,21 @@ impl FnParam { /// Returns the `label` if the parameter is labeled. /// `label` in `label a: u256`. - pub fn label(&self) -> Option { + pub fn label(&self) -> Option { self.syntax() .children_with_tokens() .find_map(|child| match child { - rowan::NodeOrToken::Token(token) => FnParamLabel::from_token(token), + rowan::NodeOrToken::Token(token) => FuncParamLabel::from_token(token), _ => None, }) } /// Returns the name of the parameter. /// `a` in `label a: u256`. - pub fn name(&self) -> Option { + pub fn name(&self) -> Option { let mut param_names = self.syntax().children_with_tokens().filter_map(|child| { if let rowan::NodeOrToken::Token(token) = child { - FnParamName::from_token(token) + FuncParamName::from_token(token) } else { None } @@ -316,31 +316,31 @@ pub trait WhereClauseOwner: AstNode { } } -pub enum FnParamLabel { +pub enum FuncParamLabel { /// `label` in `label a: u256` Ident(SyntaxToken), /// `_` in `_ a: u256`. Underscore(SyntaxToken), } -impl FnParamLabel { +impl FuncParamLabel { pub fn syntax(&self) -> SyntaxToken { match self { - FnParamLabel::Ident(token) => token, - FnParamLabel::Underscore(token) => token, + FuncParamLabel::Ident(token) => token, + FuncParamLabel::Underscore(token) => token, } .clone() } fn from_token(token: SyntaxToken) -> Option { match token.kind() { - SK::Ident => Some(FnParamLabel::Ident(token)), - SK::Underscore => Some(FnParamLabel::Underscore(token)), + SK::Ident => Some(FuncParamLabel::Ident(token)), + SK::Underscore => Some(FuncParamLabel::Underscore(token)), _ => None, } } } -pub enum FnParamName { +pub enum FuncParamName { /// `a` in `label a: u256` Ident(SyntaxToken), /// `self` parameter. @@ -348,21 +348,21 @@ pub enum FnParamName { /// `_` parameter. Underscore(SyntaxToken), } -impl FnParamName { +impl FuncParamName { pub fn syntax(&self) -> SyntaxToken { match self { - FnParamName::Ident(token) => token, - FnParamName::SelfParam(token) => token, - FnParamName::Underscore(token) => token, + FuncParamName::Ident(token) => token, + FuncParamName::SelfParam(token) => token, + FuncParamName::Underscore(token) => token, } .clone() } fn from_token(token: SyntaxToken) -> Option { match token.kind() { - SK::Ident => Some(FnParamName::Ident(token)), - SK::SelfKw => Some(FnParamName::SelfParam(token)), - SK::Underscore => Some(FnParamName::Underscore(token)), + SK::Ident => Some(FuncParamName::Ident(token)), + SK::SelfKw => Some(FuncParamName::SelfParam(token)), + SK::Underscore => Some(FuncParamName::Underscore(token)), _ => None, } } diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index fc03fed711..3ddda04f4b 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -3,40 +3,40 @@ use crate::SyntaxKind; use super::{ define_scope, expr_atom::BlockExprScope, - param::{parse_generic_params_opt, parse_where_clause_opt, FnParamListScope}, + param::{parse_generic_params_opt, parse_where_clause_opt, FuncParamListScope}, token_stream::TokenStream, type_::parse_type, Parser, }; define_scope! { - pub(crate) FnScope { - fn_def_scope: FnDefScope + pub(crate) FuncScope { + fn_def_scope: FuncDefScope }, - Fn, + Func, Inheritance } #[derive(Clone, Copy, Debug)] -pub(crate) enum FnDefScope { +pub(crate) enum FuncDefScope { Normal, TraitDef, Extern, } -impl Default for FnDefScope { +impl Default for FuncDefScope { fn default() -> Self { Self::Normal } } -impl super::Parse for FnScope { +impl super::Parse for FuncScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::FnKw); match self.fn_def_scope { - FnDefScope::Normal => parse_normal_fn_def_impl(parser), - FnDefScope::TraitDef => parse_trait_fn_def_impl(parser), - FnDefScope::Extern => parse_extern_fn_def_impl(parser), + FuncDefScope::Normal => parse_normal_fn_def_impl(parser), + FuncDefScope::TraitDef => parse_trait_fn_def_impl(parser), + FuncDefScope::Extern => parse_extern_fn_def_impl(parser), } } } @@ -61,7 +61,7 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { parser.with_next_expected_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnParamListScope::default(), None); + parser.parse(FuncParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } @@ -106,7 +106,7 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { parser.with_recovery_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnParamListScope::default(), None); + parser.parse(FuncParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } @@ -144,7 +144,7 @@ fn parse_extern_fn_def_impl(parser: &mut Parser) { parser.with_recovery_tokens( |parser| { if parser.current_kind() == Some(SyntaxKind::LParen) { - parser.parse(FnParamListScope::default(), None); + parser.parse(FuncParamListScope::default(), None); } else { parser.error_and_recover("expected `(` for the function arguments", None); } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index f6c94690a6..9ad3a0ee3b 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -1,11 +1,11 @@ use std::{cell::Cell, rc::Rc}; -use crate::{parser::func::FnScope, SyntaxKind}; +use crate::{parser::func::FuncScope, SyntaxKind}; use super::{ attr, define_scope, expr::parse_expr, - func::FnDefScope, + func::FuncDefScope, param::{parse_generic_params_opt, parse_where_clause_opt}, struct_::RecordFieldDefListScope, token_stream::{LexicalToken, TokenStream}, @@ -83,7 +83,7 @@ impl super::Parse for ItemListScope { parser.parse(ModScope::default(), checkpoint); } Some(FnKw) => { - parser.parse(FnScope::default(), checkpoint); + parser.parse(FuncScope::default(), checkpoint); } Some(StructKw) => { parser.parse(super::struct_::StructScope::default(), checkpoint); @@ -320,7 +320,7 @@ impl super::Parse for TraitScope { define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline, FnKw) } impl super::Parse for TraitItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, false, FnDefScope::TraitDef) + parse_fn_item_block(parser, false, FuncDefScope::TraitDef) } } @@ -382,14 +382,14 @@ impl super::Parse for ImplScope { define_scope! { ImplTraitItemListScope, ImplTraitItemList, Override(RBrace, FnKw) } impl super::Parse for ImplTraitItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, false, FnDefScope::Normal) + parse_fn_item_block(parser, false, FuncDefScope::Normal) } } define_scope! { ImplItemListScope, ImplItemList, Override(RBrace, FnKw) } impl super::Parse for ImplItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, true, FnDefScope::Normal) + parse_fn_item_block(parser, true, FuncDefScope::Normal) } } @@ -450,7 +450,7 @@ impl super::Parse for ExternScope { define_scope! { ExternItemListScope, ExternItemList, Override(RBrace, FnKw) } impl super::Parse for ExternItemListScope { fn parse(&mut self, parser: &mut Parser) { - parse_fn_item_block(parser, true, FnDefScope::Extern); + parse_fn_item_block(parser, true, FuncDefScope::Extern); } } @@ -495,7 +495,7 @@ impl super::Parse for TypeAliasScope { fn parse_fn_item_block( parser: &mut Parser, allow_modifier: bool, - fn_def_scope: FnDefScope, + fn_def_scope: FuncDefScope, ) { parser.bump_expected(SyntaxKind::LBrace); loop { @@ -520,7 +520,7 @@ fn parse_fn_item_block( match parser.current_kind() { Some(SyntaxKind::FnKw) => { - parser.parse(FnScope::new(fn_def_scope), checkpoint); + parser.parse(FuncScope::new(fn_def_scope), checkpoint); } _ => { parser.error_and_recover("only `fn` is allowed in the block", checkpoint); diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 6472c9bd21..61b291185f 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -11,11 +11,11 @@ use super::{ }; define_scope! { - pub(crate) FnParamListScope, - FnParamList, + pub(crate) FuncParamListScope, + FuncParamList, Override(RParen, Comma) } -impl super::Parse for FnParamListScope { +impl super::Parse for FuncParamListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LParen); if parser.bump_if(SyntaxKind::RParen) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 098a2f39fd..a517dd71ec 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -341,7 +341,7 @@ pub enum SyntaxKind { /// `mod s { .. }` Mod, /// `fn foo(x: i32) -> i32 { .. }` - Fn, + Func, /// `struct Foo { .. }` Struct, /// `contract Foo { .. }` @@ -433,7 +433,7 @@ pub enum SyntaxKind { GenericParamList, /// `(x: i32, _ y: mut i32)` - FnParamList, + FuncParamList, /// `_ x: mut i32` FnParam, diff --git a/crates/parser2/test_files/error_recovery/items/extern_.snap b/crates/parser2/test_files/error_recovery/items/extern_.snap index 373eb022c6..23459991e9 100644 --- a/crates/parser2/test_files/error_recovery/items/extern_.snap +++ b/crates/parser2/test_files/error_recovery/items/extern_.snap @@ -12,7 +12,7 @@ Root@0..90 LBrace@7..8 "{" Newline@8..9 "\n" WhiteSpace@9..13 " " - Fn@13..41 + Func@13..41 ItemModifier@13..23 PubKw@13..16 "pub" WhiteSpace@16..17 " " @@ -21,7 +21,7 @@ Root@0..90 FnKw@24..26 "fn" WhiteSpace@26..27 " " Ident@27..30 "Foo" - FnParamList@30..41 + FuncParamList@30..41 LParen@30..31 "(" FnParam@31..40 Ident@31..32 "x" @@ -46,7 +46,7 @@ Root@0..90 LBrace@62..63 "{" Newline@63..65 "\n\n" WhiteSpace@65..69 " " - Fn@69..88 + Func@69..88 ItemModifier@69..79 PubKw@69..72 "pub" WhiteSpace@72..73 " " @@ -55,7 +55,7 @@ Root@0..90 FnKw@80..82 "fn" WhiteSpace@82..83 " " Ident@83..86 "foo" - FnParamList@86..88 + FuncParamList@86..88 LParen@86..87 "(" RParen@87..88 ")" Newline@88..89 "\n" diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 7bebf11c78..69afea3379 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -5,7 +5,7 @@ input_file: crates/parser2/test_files/error_recovery/items/func.fe --- Root@0..133 ItemList@0..133 - Fn@0..78 + Func@0..78 FnKw@0..2 "fn" WhiteSpace@2..3 " " Ident@3..6 "foo" @@ -23,7 +23,7 @@ Root@0..133 Gt@15..16 ">" Error@16..17 Gt@16..17 ">" - FnParamList@17..47 + FuncParamList@17..47 LParen@17..18 "(" FnParam@18..24 Ident@18..19 "x" @@ -93,7 +93,7 @@ Root@0..133 Newline@75..77 "\n\n" RBrace@77..78 "}" Newline@78..80 "\n\n" - Fn@80..132 + Func@80..132 FnKw@80..82 "fn" WhiteSpace@82..83 " " Ident@83..86 "foo" @@ -110,7 +110,7 @@ Root@0..133 Gt@97..98 ">" Error@98..98 Error@98..98 - FnParamList@98..106 + FuncParamList@98..106 LParen@98..99 "(" FnParam@99..105 Ident@99..100 "x" diff --git a/crates/parser2/test_files/syntax_node/items/extern.snap b/crates/parser2/test_files/syntax_node/items/extern.snap index 08b5f3a644..fe27a22685 100644 --- a/crates/parser2/test_files/syntax_node/items/extern.snap +++ b/crates/parser2/test_files/syntax_node/items/extern.snap @@ -20,7 +20,7 @@ Root@0..146 LBrace@20..21 "{" Newline@21..22 "\n" WhiteSpace@22..26 " " - Fn@26..76 + Func@26..76 ItemModifier@26..36 PubKw@26..29 "pub" WhiteSpace@29..30 " " @@ -29,7 +29,7 @@ Root@0..146 FnKw@37..39 "fn" WhiteSpace@39..40 " " Ident@40..45 "write" - FnParamList@45..68 + FuncParamList@45..68 LParen@45..46 "(" FnParam@46..55 Ident@46..49 "loc" @@ -61,7 +61,7 @@ Root@0..146 Ident@72..76 "bool" Newline@76..77 "\n" WhiteSpace@77..81 " " - Fn@81..131 + Func@81..131 ItemModifier@81..91 PubKw@81..84 "pub" WhiteSpace@84..85 " " @@ -70,7 +70,7 @@ Root@0..146 FnKw@92..94 "fn" WhiteSpace@94..95 " " Ident@95..99 "read" - FnParamList@99..122 + FuncParamList@99..122 LParen@99..100 "(" FnParam@100..109 Ident@100..103 "loc" @@ -102,11 +102,11 @@ Root@0..146 Ident@126..131 "usize" Newline@131..132 "\n" WhiteSpace@132..136 " " - Fn@136..144 + Func@136..144 FnKw@136..138 "fn" WhiteSpace@138..139 " " Ident@139..142 "foo" - FnParamList@142..144 + FuncParamList@142..144 LParen@142..143 "(" RParen@143..144 ")" Newline@144..145 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index dca917002e..ab98a0d767 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -5,14 +5,14 @@ input_file: crates/parser2/test_files/syntax_node/items/func.fe --- Root@0..361 ItemList@0..361 - Fn@0..30 + Func@0..30 ItemModifier@0..3 PubKw@0..3 "pub" WhiteSpace@3..4 " " FnKw@4..6 "fn" WhiteSpace@6..7 " " Ident@7..10 "foo" - FnParamList@10..12 + FuncParamList@10..12 LParen@10..11 "(" RParen@11..12 ")" WhiteSpace@12..13 " " @@ -36,11 +36,11 @@ Root@0..361 Newline@28..29 "\n" RBrace@29..30 "}" Newline@30..32 "\n\n" - Fn@32..80 + Func@32..80 FnKw@32..34 "fn" WhiteSpace@34..35 " " Ident@35..38 "bar" - FnParamList@38..63 + FuncParamList@38..63 LParen@38..39 "(" FnParam@39..47 Ident@39..42 "bar" @@ -82,11 +82,11 @@ Root@0..361 Newline@78..79 "\n" RBrace@79..80 "}" Newline@80..82 "\n\n" - Fn@82..178 + Func@82..178 FnKw@82..84 "fn" WhiteSpace@84..85 " " Ident@85..88 "baz" - FnParamList@88..161 + FuncParamList@88..161 LParen@88..89 "(" FnParam@89..109 Ident@89..93 "from" @@ -156,7 +156,7 @@ Root@0..361 Newline@176..177 "\n" RBrace@177..178 "}" Newline@178..180 "\n\n" - Fn@180..306 + Func@180..306 FnKw@180..182 "fn" WhiteSpace@182..183 " " Ident@183..192 "generics1" @@ -176,7 +176,7 @@ Root@0..361 TypeGenericParam@203..204 Ident@203..204 "U" Gt@204..205 ">" - FnParamList@205..225 + FuncParamList@205..225 LParen@205..206 "(" FnParam@206..210 Ident@206..207 "t" @@ -275,7 +275,7 @@ Root@0..361 Newline@304..305 "\n" RBrace@305..306 "}" Newline@306..308 "\n\n" - Fn@308..361 + Func@308..361 FnKw@308..310 "fn" WhiteSpace@310..311 " " Ident@311..315 "decl" @@ -288,7 +288,7 @@ Root@0..361 TypeGenericParam@319..320 Ident@319..320 "U" Gt@320..321 ">" - FnParamList@321..340 + FuncParamList@321..340 LParen@321..322 "(" FnParam@322..339 Ident@322..323 "t" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 5c8744d28d..d2485f2d2a 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -40,14 +40,14 @@ Root@0..272 LBrace@25..26 "{" Newline@26..27 "\n" WhiteSpace@27..31 " " - Fn@31..135 + Func@31..135 ItemModifier@31..34 PubKw@31..34 "pub" WhiteSpace@34..35 " " FnKw@35..37 "fn" WhiteSpace@37..38 " " Ident@38..41 "add" - FnParamList@41..58 + FuncParamList@41..58 LParen@41..42 "(" FnParam@42..46 SelfKw@42..46 "self" @@ -161,7 +161,7 @@ Root@0..272 LBrace@175..176 "{" Newline@176..177 "\n" WhiteSpace@177..181 " " - Fn@181..270 + Func@181..270 FnKw@181..183 "fn" WhiteSpace@183..184 " " Ident@184..187 "add" @@ -185,7 +185,7 @@ Root@0..272 Ident@195..196 "T" Gt@196..197 ">" Gt@197..198 ">" - FnParamList@198..212 + FuncParamList@198..212 LParen@198..199 "(" FnParam@199..203 SelfKw@199..203 "self" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index a482a10541..a80d601968 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -45,11 +45,11 @@ Root@0..335 LBrace@26..27 "{" Newline@27..28 "\n" WhiteSpace@28..32 " " - Fn@32..65 + Func@32..65 FnKw@32..34 "fn" WhiteSpace@34..35 " " Ident@35..38 "foo" - FnParamList@38..40 + FuncParamList@38..40 LParen@38..39 "(" RParen@39..40 ")" WhiteSpace@40..41 " " @@ -151,7 +151,7 @@ Root@0..335 LBrace@130..131 "{" Newline@131..132 "\n" WhiteSpace@132..136 " " - Fn@136..203 + Func@136..203 FnKw@136..138 "fn" WhiteSpace@138..139 " " Ident@139..142 "foo" @@ -175,7 +175,7 @@ Root@0..335 Ident@157..158 "U" Gt@158..159 ">" Gt@159..160 ">" - FnParamList@160..166 + FuncParamList@160..166 LParen@160..161 "(" FnParam@161..165 Ident@161..162 "t" @@ -294,7 +294,7 @@ Root@0..335 LBrace@260..261 "{" Newline@261..262 "\n" WhiteSpace@262..266 " " - Fn@266..333 + Func@266..333 FnKw@266..268 "fn" WhiteSpace@268..269 " " Ident@269..272 "foo" @@ -318,7 +318,7 @@ Root@0..335 Ident@287..288 "U" Gt@288..289 ">" Gt@289..290 ">" - FnParamList@290..296 + FuncParamList@290..296 LParen@290..291 "(" FnParam@291..295 Ident@291..292 "t" diff --git a/crates/parser2/test_files/syntax_node/items/mod.snap b/crates/parser2/test_files/syntax_node/items/mod.snap index 4d3981a5fb..15f6d25ddc 100644 --- a/crates/parser2/test_files/syntax_node/items/mod.snap +++ b/crates/parser2/test_files/syntax_node/items/mod.snap @@ -17,11 +17,11 @@ Root@0..146 LBrace@12..13 "{" Newline@13..14 "\n" WhiteSpace@14..18 " " - Fn@18..78 + Func@18..78 FnKw@18..20 "fn" WhiteSpace@20..21 " " Ident@21..28 "foo_foo" - FnParamList@28..53 + FuncParamList@28..53 LParen@28..29 "(" FnParam@29..37 Ident@29..32 "bar" diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index 6433173821..cc4f36d9c1 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -26,7 +26,7 @@ Root@0..592 LBrace@31..32 "{" Newline@32..33 "\n" WhiteSpace@33..37 " " - Fn@37..79 + Func@37..79 FnKw@37..39 "fn" WhiteSpace@39..40 " " Ident@40..43 "foo" @@ -54,7 +54,7 @@ Root@0..592 PathSegment@63..66 Ident@63..66 "i32" Gt@66..67 ">" - FnParamList@67..79 + FuncParamList@67..79 LParen@67..68 "(" FnParam@68..72 Ident@68..69 "t" @@ -77,7 +77,7 @@ Root@0..592 RParen@78..79 ")" Newline@79..81 "\n\n" WhiteSpace@81..85 " " - Fn@85..180 + Func@85..180 FnKw@85..87 "fn" WhiteSpace@87..88 " " Ident@88..102 "default_method" @@ -100,7 +100,7 @@ Root@0..592 PathSegment@112..115 Ident@112..115 "Sub" Gt@115..116 ">" - FnParamList@116..132 + FuncParamList@116..132 LParen@116..117 "(" FnParam@117..123 Ident@117..120 "lhs" @@ -196,11 +196,11 @@ Root@0..592 LBrace@209..210 "{" Newline@210..211 "\n" WhiteSpace@211..215 " " - Fn@215..270 + Func@215..270 FnKw@215..217 "fn" WhiteSpace@217..218 " " Ident@218..221 "add" - FnParamList@221..237 + FuncParamList@221..237 LParen@221..222 "(" FnParam@222..226 SelfKw@222..226 "self" @@ -253,7 +253,7 @@ Root@0..592 LBrace@290..291 "{" Newline@291..292 "\n" WhiteSpace@292..296 " " - Fn@296..353 + Func@296..353 FnKw@296..298 "fn" WhiteSpace@298..299 " " Ident@299..304 "parse" @@ -269,7 +269,7 @@ Root@0..592 PathSegment@308..319 Ident@308..319 "TokenStream" Gt@319..320 ">" - FnParamList@320..353 + FuncParamList@320..353 LParen@320..321 "(" FnParam@321..329 MutKw@321..324 "mut" @@ -349,7 +349,7 @@ Root@0..592 LBrace@409..410 "{" Newline@410..411 "\n" WhiteSpace@411..415 " " - Fn@415..590 + Func@415..590 ItemModifier@415..418 PubKw@415..418 "pub" WhiteSpace@418..419 " " @@ -368,7 +368,7 @@ Root@0..592 PathSegment@431..436 Ident@431..436 "Parse" Gt@436..437 ">" - FnParamList@437..493 + FuncParamList@437..493 LParen@437..438 "(" FnParam@438..446 MutKw@438..441 "mut" From 89922fa6346183573ad6b37f021ab85aa2360399 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 26 May 2023 18:37:19 +0200 Subject: [PATCH 175/678] Implement `Visitor` --- .../src/name_resolution/import_resolver.rs | 2 +- .../src/name_resolution/name_resolver.rs | 2 +- crates/hir/src/hir_def/expr.rs | 12 +- crates/hir/src/hir_def/item.rs | 36 +- crates/hir/src/hir_def/mod.rs | 6 +- crates/hir/src/hir_def/params.rs | 2 +- crates/hir/src/hir_def/pat.rs | 8 +- crates/hir/src/hir_def/path.rs | 2 +- crates/hir/src/hir_def/stmt.rs | 10 +- crates/hir/src/hir_def/types.rs | 2 +- crates/hir/src/hir_def/use_tree.rs | 8 +- crates/hir/src/lib.rs | 4 +- crates/hir/src/lower/expr.rs | 4 +- crates/hir/src/lower/item.rs | 24 +- crates/hir/src/lower/scope_builder.rs | 8 +- crates/hir/src/span/attr.rs | 48 +- crates/hir/src/span/expr.rs | 32 +- crates/hir/src/span/item.rs | 38 +- crates/hir/src/span/mod.rs | 8 +- crates/hir/src/span/params.rs | 25 +- crates/hir/src/span/pat.rs | 22 +- crates/hir/src/span/path.rs | 7 +- crates/hir/src/span/stmt.rs | 8 +- crates/hir/src/span/transition.rs | 2 +- crates/hir/src/span/types.rs | 10 +- crates/hir/src/span/use_tree.rs | 5 + crates/hir/src/visitor.rs | 1926 ++++++++++++++++- crates/parser2/src/ast/expr.rs | 4 +- crates/parser2/src/ast/item.rs | 10 +- 29 files changed, 2104 insertions(+), 171 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 2eba2df207..35acb74d39 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -796,7 +796,7 @@ impl IntermediateUse { .use_ .path(db.as_hir_db()) .to_opt()? - .segments(db.as_hir_db()); + .data(db.as_hir_db()); let seg_idx = self.unresolved_from; let segment = segments[seg_idx].to_opt()?; diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 76094b384e..6725b920a9 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -112,7 +112,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { path: PathId, scope: ScopeId, ) -> Result { - let segments = path.segments(self.db.as_hir_db()); + let segments = path.data(self.db.as_hir_db()); if segments.is_empty() { return Err(PathResolutionError::invalid(0)); } diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 0abc647779..57fc99f2cf 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -1,6 +1,6 @@ use cranelift_entity::entity_impl; -use crate::span::expr::LazyExprSpan; +use crate::{span::expr::LazyExprSpan, HirDb}; use super::{Body, GenericArgListId, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; @@ -22,7 +22,7 @@ pub enum Expr { Path(Partial), /// The record construction expression. /// The fist `PathId` is the record type, the second is the record fields. - RecordInit(Partial, Vec), + RecordInit(Partial, Vec), Field(ExprId, Partial), Tuple(Vec), /// The first `ExprId` is the indexed expression, the second is the index. @@ -48,7 +48,11 @@ entity_impl!(ExprId); impl ExprId { pub fn lazy_span(self, body: Body) -> LazyExprSpan { - LazyExprSpan::new(self, body) + LazyExprSpan::new(body, self) + } + + pub fn data(self, db: &dyn HirDb, body: Body) -> &Partial { + &body.exprs(db)[self] } } @@ -144,7 +148,7 @@ pub struct CallArg { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct RecordField { +pub struct Field { pub label: Option, pub expr: ExprId, } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 4bb4dea74e..585deda404 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -320,7 +320,7 @@ pub struct Struct { pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub fields: RecordFieldListId, + pub fields: FieldDefListId, pub top_mod: TopLevelMod, #[return_ref] @@ -340,7 +340,7 @@ pub struct Contract { pub name: Partial, pub attributes: AttrListId, pub vis: Visibility, - pub fields: RecordFieldListId, + pub fields: FieldDefListId, pub top_mod: TopLevelMod, #[return_ref] @@ -362,7 +362,7 @@ pub struct Enum { pub vis: Visibility, pub generic_params: GenericParamListId, pub where_clause: WhereClauseId, - pub variants: EnumVariantListId, + pub variants: VariantDefListId, pub top_mod: TopLevelMod, #[return_ref] @@ -414,6 +414,12 @@ impl Impl { pub fn lazy_span(self) -> LazyImplSpan { LazyImplSpan::new(self) } + + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } } #[salsa::tracked] @@ -436,6 +442,12 @@ impl Trait { pub fn lazy_span(self) -> LazyTraitSpan { LazyTraitSpan::new(self) } + + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } } #[salsa::tracked] @@ -457,6 +469,12 @@ impl ImplTrait { pub fn lazy_span(self) -> LazyImplTraitSpan { LazyImplTraitSpan::new(self) } + + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } } #[salsa::tracked] @@ -567,26 +585,26 @@ impl ItemModifier { } #[salsa::interned] -pub struct RecordFieldListId { +pub struct FieldDefListId { #[return_ref] - pub data: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct RecordField { +pub struct FieldDef { pub name: Partial, pub ty: Partial, pub vis: Visibility, } #[salsa::interned] -pub struct EnumVariantListId { +pub struct VariantDefListId { #[return_ref] - pub data: Vec, + pub data: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct EnumVariant { +pub struct VariantDef { pub name: Partial, pub ty: Option, } diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index f93f0eca42..9cd42e3f0c 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -67,7 +67,7 @@ pub struct StringId { pub data: String, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] pub enum LitKind { Int(IntegerId), String(StringId), @@ -105,6 +105,10 @@ impl Partial { Self::Absent => None, } } + + pub fn is_present(&self) -> bool { + matches!(self, Self::Present(_)) + } } impl Default for Partial { diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 6d5254f6ee..96ab9b2eb7 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -5,7 +5,7 @@ use super::{Body, IdentId, Partial, PathId}; #[salsa::interned] pub struct GenericArgListId { #[return_ref] - pub args: Vec, + pub data: Vec, } #[salsa::interned] diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index 3b55fbd3bf..5839948406 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -1,6 +1,6 @@ use cranelift_entity::entity_impl; -use crate::span::pat::LazyPatSpan; +use crate::{span::pat::LazyPatSpan, HirDb}; use super::{Body, IdentId, LitKind, Partial, PathId}; @@ -22,7 +22,11 @@ entity_impl!(PatId); impl PatId { pub fn lazy_span(self, body: Body) -> LazyPatSpan { - LazyPatSpan::new(self, body) + LazyPatSpan::new(body, self) + } + + pub fn data(self, db: &dyn HirDb, body: Body) -> &Partial { + &body.pats(db)[self] } } diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index 418f51a4f0..f0bf88b309 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -4,5 +4,5 @@ use super::IdentId; #[salsa::interned] pub struct PathId { - pub segments: Vec>, + pub data: Vec>, } diff --git a/crates/hir/src/hir_def/stmt.rs b/crates/hir/src/hir_def/stmt.rs index 707d2bdf3e..648dca03f0 100644 --- a/crates/hir/src/hir_def/stmt.rs +++ b/crates/hir/src/hir_def/stmt.rs @@ -1,8 +1,8 @@ use cranelift_entity::entity_impl; -use crate::span::stmt::LazyStmtSpan; +use crate::{span::stmt::LazyStmtSpan, HirDb}; -use super::{Body, ExprId, PatId, TypeId}; +use super::{Body, ExprId, Partial, PatId, TypeId}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Stmt { @@ -36,6 +36,10 @@ entity_impl!(StmtId); impl StmtId { pub fn lazy_span(self, body: Body) -> LazyStmtSpan { - LazyStmtSpan::new(self, body) + LazyStmtSpan::new(body, self) + } + + pub fn data(self, db: &dyn HirDb, body: Body) -> &Partial { + &body.stmts(db)[self] } } diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index c4d59cbf3a..468adaefc5 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -2,7 +2,7 @@ use super::{Body, GenericArgListId, Partial, PathId}; #[salsa::interned] pub struct TypeId { - kind: TypeKind, + pub data: TypeKind, } #[derive(Clone, PartialEq, Eq, Hash, Debug)] diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 59a9290a86..18e2e0453e 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -5,26 +5,26 @@ use super::IdentId; #[salsa::interned] pub struct UsePathId { #[return_ref] - pub segments: Vec>, + pub data: Vec>, } impl UsePathId { pub fn is_glob(&self, db: &dyn HirDb) -> bool { - self.segments(db) + self.data(db) .last() .and_then(|seg| seg.to_opt()) .map_or(false, |seg| seg.is_glob()) } pub fn last_ident(&self, db: &dyn HirDb) -> Option { - self.segments(db) + self.data(db) .last() .and_then(|seg| seg.to_opt()) .and_then(|seg| seg.ident()) } pub fn segment_len(&self, db: &dyn HirDb) -> usize { - self.segments(db).len() + self.data(db).len() } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ed29f7e101..7fd1c5865a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -40,8 +40,8 @@ pub struct Jar( hir_def::WhereClauseId, hir_def::GenericArgListId, hir_def::GenericParamListId, - hir_def::RecordFieldListId, - hir_def::EnumVariantListId, + hir_def::FieldDefListId, + hir_def::VariantDefListId, hir_def::ImplItemListId, hir_def::TypeId, hir_def::UsePathId, diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 61add0b2cc..b9f7008dde 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -85,7 +85,7 @@ impl Expr { .map(|fields| { fields .into_iter() - .map(|field| RecordField::lower_ast(ctxt, field)) + .map(|field| Field::lower_ast(ctxt, field)) .collect() }) .unwrap_or_default(); @@ -257,7 +257,7 @@ impl CallArg { } } -impl RecordField { +impl Field { fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::RecordField) -> Self { let label = ast .label() diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 0330872bc9..c30e313c39 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -147,7 +147,7 @@ impl Struct { let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); + let fields = FieldDefListId::lower_ast_opt(ctxt, ast.fields()); let origin = HirOrigin::raw(&ast); let struct_ = Self::new( @@ -179,7 +179,7 @@ impl Contract { let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); - let fields = RecordFieldListId::lower_ast_opt(ctxt, ast.fields()); + let fields = FieldDefListId::lower_ast_opt(ctxt, ast.fields()); let origin = HirOrigin::raw(&ast); let contract = Self::new( @@ -211,7 +211,7 @@ impl Enum { let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); - let variants = EnumVariantListId::lower_ast_opt(ctxt, ast.variants()); + let variants = VariantDefListId::lower_ast_opt(ctxt, ast.variants()); let origin = HirOrigin::raw(&ast); let enum_ = Self::new( @@ -414,11 +414,11 @@ impl ItemModifier { } } -impl RecordFieldListId { +impl FieldDefListId { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDefList) -> Self { let fields = ast .into_iter() - .map(|field| RecordField::lower_ast(ctxt, field)) + .map(|field| FieldDef::lower_ast(ctxt, field)) .collect(); Self::new(ctxt.db(), fields) } @@ -429,7 +429,7 @@ impl RecordFieldListId { } } -impl RecordField { +impl FieldDef { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::RecordFieldDef) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); @@ -443,23 +443,23 @@ impl RecordField { } } -impl EnumVariantListId { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::EnumVariantDefList) -> Self { +impl VariantDefListId { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::VariantDefList) -> Self { let variants = ast .into_iter() - .map(|variant| EnumVariant::lower_ast(ctxt, variant)) + .map(|variant| VariantDef::lower_ast(ctxt, variant)) .collect(); Self::new(ctxt.db(), variants) } - fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Self { ast.map(|ast| Self::lower_ast(ctxt, ast)) .unwrap_or(Self::new(ctxt.db(), Vec::new())) } } -impl EnumVariant { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::EnumVariantDef) -> Self { +impl VariantDef { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::VariantDef) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let ty = ast.ty().map(|ty| TypeId::lower_ast(ctxt, ty)); diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 09f4200fd1..156b4f505d 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -4,8 +4,8 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ hir_def::{ scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, - EnumVariantListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, - RecordFieldListId, TopLevelMod, Use, Visibility, + FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, TopLevelMod, + Use, VariantDefListId, Visibility, }, HirDb, }; @@ -249,7 +249,7 @@ impl<'db> ScopeGraphBuilder<'db> { &mut self, parent_node: NodeId, parent_item: ItemKind, - fields: RecordFieldListId, + fields: FieldDefListId, ) { let parent_scope = ScopeId::Item(parent_item); @@ -272,7 +272,7 @@ impl<'db> ScopeGraphBuilder<'db> { &mut self, parent_node: NodeId, parent_item: ItemKind, - variants: EnumVariantListId, + variants: VariantDefListId, ) { let parent_scope = ScopeId::Item(parent_item); let parent_vis = parent_item.vis(self.db); diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs index f0edb8adb2..3fe344b493 100644 --- a/crates/hir/src/span/attr.rs +++ b/crates/hir/src/span/attr.rs @@ -1,6 +1,4 @@ -use parser::ast::{self, prelude::*}; - -use crate::span::transition::ResolvedOrigin; +use parser::ast; use super::define_lazy_span_node; @@ -11,35 +9,27 @@ define_lazy_span_node!( (attr, LazyAttrSpan), } ); -impl LazyAttrListSpan { - pub fn normal_attr(&self, idx: usize) -> LazyNormalAttrSpan { - self.clone().normal_attr_moved(idx) +impl LazyAttrListSpan {} + +define_lazy_span_node!(LazyAttrSpan); +impl LazyAttrSpan { + pub fn into_normal_attr(&self) -> LazyNormalAttrSpan { + self.clone().into_normal_attr_moved() } - pub fn normal_attr_moved(mut self, idx: usize) -> LazyNormalAttrSpan { - fn f(origin: ResolvedOrigin, arg: crate::span::transition::LazyArg) -> ResolvedOrigin { - let idx = match arg { - crate::span::transition::LazyArg::Idx(idx) => idx, - _ => unreachable!(), - }; - origin.map(|node| { - ast::AttrList::cast(node) - .and_then(|f| f.normal_attrs().nth(idx)) - .map(|n| n.syntax().clone().into()) - }) - } + pub fn into_normal_attr_moved(self) -> LazyNormalAttrSpan { + LazyNormalAttrSpan(self.0) + } - let lazy_transition = crate::span::transition::LazyTransitionFn { - f, - arg: crate::span::transition::LazyArg::Idx(idx), - }; + pub fn into_doc_comment_attr(&self) -> LazyDocCommentAttrSpan { + self.clone().into_doc_comment_attr_moved() + } - self.0.push(lazy_transition); - LazyNormalAttrSpan(self.0) + pub fn into_doc_comment_attr_moved(self) -> LazyDocCommentAttrSpan { + LazyDocCommentAttrSpan(self.0) } } -define_lazy_span_node!(LazyAttrSpan); define_lazy_span_node!( LazyNormalAttrSpan, ast::NormalAttr, @@ -51,6 +41,14 @@ define_lazy_span_node!( } ); +define_lazy_span_node!( + LazyDocCommentAttrSpan, + ast::DocCommentAttr, + @token { + (doc, doc), + } +); + define_lazy_span_node!( LazyAttrArgListSpan, ast::AttrArgList, diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 68387f189c..0dd76249ff 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{Body, ExprId}, - span::{params::LazyGenericArgListSpan, path::LazyPathSpan, LazySpanAtom}, + span::{params::LazyGenericArgListSpan, path::LazyPathSpan, LazyLitSpan, LazySpanAtom}, SpannedHirDb, }; @@ -13,11 +13,15 @@ use super::{ define_lazy_span_node!(LazyExprSpan, ast::Expr,); impl LazyExprSpan { - pub fn new(expr: ExprId, body: Body) -> Self { + pub fn new(body: Body, expr: ExprId) -> Self { let root = ExprRoot { expr, body }; Self(SpanTransitionChain::new(root)) } + pub fn into_lit_expr(self) -> LazyLitExprSpan { + LazyLitExprSpan(self.0) + } + pub fn into_bin_expr(self) -> LazyBinExprSpan { LazyBinExprSpan(self.0) } @@ -51,6 +55,14 @@ impl LazyExprSpan { } } +define_lazy_span_node! { + LazyLitExprSpan, + ast::LitExpr, + @node { + (lit, lit, LazyLitSpan), + } +} + define_lazy_span_node!( LazyBinExprSpan, ast::BinExpr, @@ -101,7 +113,7 @@ define_lazy_span_node!( ast::RecordInitExpr, @node { (path, path, LazyPathSpan), - (fields, fields, LazyRecordFieldListSpan), + (fields, fields, LazyFieldListSpan), } ); @@ -138,15 +150,15 @@ define_lazy_span_node!( ); define_lazy_span_node!( - LazyRecordFieldListSpan, - ast::RecordFieldList, + LazyFieldListSpan, + ast::FieldList, @idx { - (field, LazyRecordFieldSpan), + (field, LazyFieldSpan), } ); define_lazy_span_node!( - LazyRecordFieldSpan, + LazyFieldSpan, ast::RecordField, @token { (label, label), @@ -157,14 +169,16 @@ define_lazy_span_node!( LazyMatchArmListSpan, ast::MatchArmList, @idx { - (arm, LazySpanAtom), + (arm, LazyMatchArmSpan), } ); +define_lazy_span_node!(LazyMatchArmSpan); + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub(crate) struct ExprRoot { expr: ExprId, - body: Body, + pub(crate) body: Body, } impl ChainInitiator for ExprRoot { diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 407caa90fb..7f7b9cfdf6 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -17,7 +17,7 @@ use super::{ define_lazy_span_node, params::{LazyFuncParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, transition::SpanTransitionChain, - types::{LazyPathTypeSpan, LazyTypeSpan}, + types::{LazyPathTypeSpan, LazyTySpan}, use_tree::LazyUseAliasSpan, }; @@ -58,7 +58,7 @@ define_lazy_span_node!( (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), (params, params, LazyFuncParamListSpan), - (ret_ty, ret_ty, LazyTypeSpan), + (ret_ty, ret_ty, LazyTySpan), } ); @@ -74,7 +74,7 @@ define_lazy_span_node!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (fields, fields, LazyRecordFieldDefListSpan), + (fields, fields, LazyFieldDefListSpan), } ); @@ -88,7 +88,7 @@ define_lazy_span_node!( @node { (attributes, attr_list, LazyAttrListSpan), (modifier, modifier, LazyItemModifierSpan), - (fields, fields, LazyRecordFieldDefListSpan), + (fields, fields, LazyFieldDefListSpan), } ); @@ -104,7 +104,7 @@ define_lazy_span_node!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (variants, variants, LazyEnumVariantListSpan), + (variants, variants, LazyVariantDefListSpan), } ); @@ -120,7 +120,7 @@ define_lazy_span_node!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); @@ -132,7 +132,7 @@ define_lazy_span_node!( (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), - (target_ty, ty, LazyTypeSpan), + (target_ty, ty, LazyTySpan), } ); define_lazy_span_node!( @@ -159,7 +159,7 @@ define_lazy_span_node!( (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (trait_ref, trait_ref, LazyPathTypeSpan), - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); @@ -172,7 +172,7 @@ define_lazy_span_node!( } @node { (attributes, attr_list, LazyAttrListSpan), - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); @@ -252,41 +252,41 @@ impl LazyUseSpan { define_lazy_span_node!(LazyBodySpan, ast::Expr, new(Body),); define_lazy_span_node!( - LazyRecordFieldDefListSpan, + LazyFieldDefListSpan, ast::RecordFieldDefList, @idx { - (field, LazyRecordFieldDefSpan), + (field, LazyFieldDefSpan), } ); define_lazy_span_node!( - LazyRecordFieldDefSpan, + LazyFieldDefSpan, ast::RecordFieldDef, @token { (pub_span, pub_kw), (name, name), } @node { - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); define_lazy_span_node!( - LazyEnumVariantListSpan, - ast::EnumVariantDefList, + LazyVariantDefListSpan, + ast::VariantDefList, @idx { - (variant, LazyEnumVariantSpan), + (variant, LazyVariantDefSpan), } ); define_lazy_span_node!( - LazyEnumVariantSpan, - ast::EnumVariantDef, + LazyVariantDefSpan, + ast::VariantDef, @token { (name, name), } @node { - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index c79cd9a135..7cccb88e7e 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -24,7 +24,7 @@ pub mod stmt; pub mod types; pub mod use_tree; -mod transition; +pub(crate) mod transition; /// This struct represents a dynamic lazy span, which can be converted from all /// types that implement [`LazySpan`] in this module. We want to avoid `dyn @@ -234,3 +234,9 @@ use transition::define_lazy_span_node; use self::transition::SpanTransitionChain; define_lazy_span_node!(LazySpanAtom); +impl LazySpanAtom { + pub(super) fn into_lit_span(self) -> LazyLitSpan { + LazyLitSpan(self.0) + } +} +define_lazy_span_node!(LazyLitSpan); diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 5c99a35166..b7b43f0da0 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::span::{path::LazyPathSpan, LazySpanAtom}; -use super::{define_lazy_span_node, types::LazyTypeSpan}; +use super::{define_lazy_span_node, types::LazyTySpan}; define_lazy_span_node!( LazyFuncParamListSpan, @@ -24,7 +24,7 @@ define_lazy_span_node!( LazyGenericArgListSpan, ast::GenericArgList, @idx { - (param, LazyGenericArgParamSpan), + (arg, LazyGenericArgSpan), } ); @@ -48,7 +48,7 @@ define_lazy_span_node!( @node { (label, label, LazySpanAtom), (name, name, LazySpanAtom), - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); @@ -82,17 +82,30 @@ define_lazy_span_node!( (name, name), } @node { - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), } ); -define_lazy_span_node!(LazyGenericArgParamSpan); +define_lazy_span_node!(LazyGenericArgSpan); +impl LazyGenericArgSpan { + pub fn into_type_arg(self) -> LazyTypeGenericArgSpan { + LazyTypeGenericArgSpan(self.0) + } +} + +define_lazy_span_node!( + LazyTypeGenericArgSpan, + ast::TypeGenericArg, + @node { + (ty, ty, LazyTySpan), + } +); define_lazy_span_node!( LazyWherePredicateSpan, ast::WherePredicate, @node { - (ty, ty, LazyTypeSpan), + (ty, ty, LazyTySpan), (bounds, bounds, LazyTypeBoundListSpan), } ); diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs index 02a6774398..a5052d996d 100644 --- a/crates/hir/src/span/pat.rs +++ b/crates/hir/src/span/pat.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{Body, PatId}, - span::path::LazyPathSpan, + span::{path::LazyPathSpan, LazyLitSpan}, SpannedHirDb, }; @@ -13,7 +13,7 @@ use super::{ define_lazy_span_node!(LazyPatSpan, ast::Pat,); impl LazyPatSpan { - pub fn new(pat: PatId, body: Body) -> Self { + pub fn new(body: Body, pat: PatId) -> Self { let root = PatRoot { pat, body }; Self(SpanTransitionChain::new(root)) } @@ -22,6 +22,10 @@ impl LazyPatSpan { LazyPathPatSpan(self.0) } + pub fn into_lit_pat(self) -> LazyLitPatSpan { + LazyLitPatSpan(self.0) + } + pub fn into_path_tuple_pat(self) -> LazyPathPatSpan { LazyPathPatSpan(self.0) } @@ -31,6 +35,14 @@ impl LazyPatSpan { } } +define_lazy_span_node!( + LazyLitPatSpan, + ast::LitPat, + @node { + (lit, lit, LazyLitSpan), + } +); + define_lazy_span_node!( LazyPathPatSpan, ast::PathPat, @@ -52,7 +64,7 @@ define_lazy_span_node!( ast::RecordPat, @node { (path, path, LazyPathSpan), - (field, fields, LazyRecordPatFieldListSpan), + (fields, fields, LazyRecordPatFieldListSpan), } ); @@ -60,7 +72,7 @@ define_lazy_span_node!( LazyRecordPatFieldListSpan, ast::RecordPatFieldList, @idx { - (field, LazyRecordPatSpan), + (field, LazyRecordPatFieldSpan), } ); @@ -75,7 +87,7 @@ define_lazy_span_node!( #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub(crate) struct PatRoot { pat: PatId, - body: Body, + pub(crate) body: Body, } impl ChainInitiator for PatRoot { diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs index b3053222b5..ea9d134948 100644 --- a/crates/hir/src/span/path.rs +++ b/crates/hir/src/span/path.rs @@ -1,6 +1,6 @@ use parser::ast; -use super::define_lazy_span_node; +use super::{define_lazy_span_node, LazySpanAtom}; define_lazy_span_node!( LazyPathSpan, @@ -11,3 +11,8 @@ define_lazy_span_node!( ); define_lazy_span_node!(LazyPathSegmentSpan); +impl LazyPathSegmentSpan { + pub fn into_atom(self) -> LazySpanAtom { + LazySpanAtom(self.0) + } +} diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index e36bff1c8a..9c20c3cc3a 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{Body, StmtId}, - span::types::LazyTypeSpan, + span::types::LazyTySpan, SpannedHirDb, }; @@ -13,7 +13,7 @@ use super::{ define_lazy_span_node!(LazyStmtSpan, ast::Stmt,); impl LazyStmtSpan { - pub fn new(stmt: StmtId, body: Body) -> Self { + pub fn new(body: Body, stmt: StmtId) -> Self { let root = StmtRoot { stmt, body }; Self(SpanTransitionChain::new(root)) } @@ -27,14 +27,14 @@ define_lazy_span_node!( LazyLetStmtSpan, ast::LetStmt, @node { - (ty, type_annotation, LazyTypeSpan), + (ty, type_annotation, LazyTySpan), } ); #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub(crate) struct StmtRoot { stmt: StmtId, - body: Body, + pub(crate) body: Body, } impl ChainInitiator for StmtRoot { diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 09496166f1..97667b3029 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -43,7 +43,7 @@ pub(crate) enum LazyArg { #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub(crate) struct SpanTransitionChain { - pub(super) root: ChainRoot, + pub(crate) root: ChainRoot, pub(super) chain: Vec, } diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 3697619e0b..721212a5e3 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -4,8 +4,8 @@ use crate::span::{item::LazyBodySpan, params::LazyGenericArgListSpan, path::Lazy use super::define_lazy_span_node; -define_lazy_span_node!(LazyTypeSpan); -impl LazyTypeSpan { +define_lazy_span_node!(LazyTySpan); +impl LazyTySpan { /// Convert this [`LazyTypeSpan`] into a [`LazyPathTypeSpan`]. /// /// If the type that is pointed to by this is not a path type, the result @@ -46,7 +46,7 @@ define_lazy_span_node!( (star, star), } @node { - (ty, inner, LazyTypeSpan), + (ty, inner, LazyTySpan), } ); @@ -68,7 +68,7 @@ define_lazy_span_node!( (r_paren, r_paren), } @idx { - (elem_ty, LazyTypeSpan), + (elem_ty, LazyTySpan), } ); @@ -80,7 +80,7 @@ define_lazy_span_node!( (r_bracket, r_bracket), } @node { - (elem, elem_ty, LazyTypeSpan), + (elem, elem_ty, LazyTySpan), (len, len, LazyBodySpan), } ); diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs index 93861d4ef8..40e21a6543 100644 --- a/crates/hir/src/span/use_tree.rs +++ b/crates/hir/src/span/use_tree.rs @@ -46,6 +46,11 @@ impl LazyUsePathSpan { } define_lazy_span_node!(LazyUsePathSegmentSpan); +impl LazyUsePathSegmentSpan { + pub fn into_atom(self) -> LazySpanAtom { + LazySpanAtom(self.0) + } +} define_lazy_span_node!(LazyUseAliasSpan, ast::UseAlias,); diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 7328a42bfc..b396d40a77 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1,60 +1,1819 @@ -#![allow(unused)] use std::{marker::PhantomData, mem}; use crate::{ hir_def::{ - Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, - Trait, TypeAlias, Use, + attr, Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, FieldDef, FieldDefListId, + FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, + GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, + LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, TopLevelMod, + Trait, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, UsePathSegment, + VariantDef, VariantDefListId, WhereClauseId, WherePredicate, }, span::{ + attr::{LazyAttrListSpan, LazyAttrSpan}, + expr::{ + LazyCallArgListSpan, LazyCallArgSpan, LazyExprSpan, LazyFieldListSpan, LazyFieldSpan, + LazyMatchArmSpan, + }, item::{ - LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, - LazyImplSpan, LazyImplTraitSpan, LazyItemSpan, LazyModSpan, LazyStructSpan, - LazyTopModSpan, LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, + LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFieldDefListSpan, + LazyFieldDefSpan, LazyFuncSpan, LazyImplSpan, LazyImplTraitSpan, LazyItemSpan, + LazyModSpan, LazyStructSpan, LazyTopModSpan, LazyTraitSpan, LazyTypeAliasSpan, + LazyUseSpan, LazyVariantDefListSpan, LazyVariantDefSpan, + }, + params::{ + LazyFuncParamListSpan, LazyFuncParamSpan, LazyGenericArgListSpan, LazyGenericArgSpan, + LazyGenericParamListSpan, LazyGenericParamSpan, LazyTypeBoundListSpan, + LazyTypeBoundSpan, LazyWhereClauseSpan, LazyWherePredicateSpan, + }, + pat::LazyPatSpan, + path::LazyPathSpan, + stmt::LazyStmtSpan, + transition::ChainRoot, + types::LazyTySpan, + use_tree::LazyUsePathSpan, + DynLazySpan, LazyLitSpan, LazySpan, LazySpanAtom, SpanDowncast, + }, + HirDb, +}; + +/// A visitor for traversing the HIR. +pub trait Visitor { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { + walk_item(self, ctxt, item) + } + + fn visit_top_mod(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTopModSpan>, top_mod: TopLevelMod) { + walk_top_mod(self, ctxt, top_mod) + } + + fn visit_mod(&mut self, ctxt: &mut VisitorCtxt<'_, LazyModSpan>, module: Mod) { + walk_mod(self, ctxt, module) + } + + fn visit_func(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFuncSpan>, func: Func) { + walk_func(self, ctxt, func) + } + + fn visit_struct(&mut self, ctxt: &mut VisitorCtxt<'_, LazyStructSpan>, struct_: Struct) { + walk_struct(self, ctxt, struct_) + } + + fn visit_contract(&mut self, ctxt: &mut VisitorCtxt<'_, LazyContractSpan>, contract: Contract) { + walk_contract(self, ctxt, contract) + } + + fn visit_enum(&mut self, ctxt: &mut VisitorCtxt<'_, LazyEnumSpan>, enum_: Enum) { + walk_enum(self, ctxt, enum_) + } + + fn visit_type_alias( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyTypeAliasSpan>, + alias: TypeAlias, + ) { + walk_type_alias(self, ctxt, alias) + } + + fn visit_impl(&mut self, ctxt: &mut VisitorCtxt<'_, LazyImplSpan>, impl_: Impl) { + walk_impl(self, ctxt, impl_) + } + + fn visit_trait(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTraitSpan>, trait_: Trait) { + walk_trait(self, ctxt, trait_) + } + + fn visit_impl_trait( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyImplTraitSpan>, + impl_trait: ImplTrait, + ) { + walk_impl_trait(self, ctxt, impl_trait) + } + + fn visit_const(&mut self, ctxt: &mut VisitorCtxt<'_, LazyConstSpan>, constant: Const) { + walk_const(self, ctxt, constant) + } + + fn visit_use(&mut self, ctxt: &mut VisitorCtxt<'_, LazyUseSpan>, use_: Use) { + walk_use(self, ctxt, use_) + } + + fn visit_body(&mut self, ctxt: &mut VisitorCtxt<'_, LazyBodySpan>, body: Body) { + walk_body(self, ctxt, body) + } + + fn visit_attribute_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyAttrListSpan>, + attrs: AttrListId, + ) { + walk_attributes(self, ctxt, attrs); + } + + fn visit_attribute(&mut self, ctxt: &mut VisitorCtxt<'_, LazyAttrSpan>, attr: &Attr) { + walk_attribute(self, ctxt, attr); + } + + fn visit_generic_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamListSpan>, + params: GenericParamListId, + ) { + walk_generic_param_list(self, ctxt, params); + } + + fn visit_generic_param( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamSpan>, + param: &GenericParam, + ) { + walk_generic_param(self, ctxt, param); + } + + fn visit_generic_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericArgListSpan>, + args: GenericArgListId, + ) { + walk_generic_arg_list(self, ctxt, args); + } + + fn visit_generic_arg( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericArgSpan>, + arg: &GenericArg, + ) { + walk_generic_arg(self, ctxt, arg); + } + + fn visit_call_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyCallArgListSpan>, + args: &[CallArg], + ) { + walk_call_arg_list(self, ctxt, args); + } + + fn visit_call_arg(&mut self, ctxt: &mut VisitorCtxt<'_, LazyCallArgSpan>, arg: CallArg) { + walk_call_arg(self, ctxt, arg); + } + + fn visit_type_bound_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyTypeBoundListSpan>, + bounds: &[TypeBound], + ) { + walk_type_bound_list(self, ctxt, bounds); + } + + fn visit_type_bound( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyTypeBoundSpan>, + bound: &TypeBound, + ) { + walk_type_bound(self, ctxt, bound); + } + + fn visit_where_clause( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyWhereClauseSpan>, + where_clause: WhereClauseId, + ) { + walk_where_clause(self, ctxt, where_clause); + } + + fn visit_where_predicate( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyWherePredicateSpan>, + where_predicate: &WherePredicate, + ) { + walk_where_predicate(self, ctxt, where_predicate); + } + + fn visit_func_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFuncParamListSpan>, + params: FuncParamListId, + ) { + walk_func_param_list(self, ctxt, params); + } + + fn visit_func_param( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFuncParamSpan>, + param: &FuncParam, + ) { + walk_func_param(self, ctxt, param); + } + + fn visit_field_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFieldListSpan>, + fields: &[Field], + ) { + walk_field_list(self, ctxt, fields); + } + + fn visit_field(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldSpan>, field: Field) { + walk_field(self, ctxt, field); + } + + fn visit_field_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFieldDefListSpan>, + fields: FieldDefListId, + ) { + walk_field_def_list(self, ctxt, fields); + } + + fn visit_field_def(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, field: &FieldDef) { + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefListSpan>, + variants: VariantDefListId, + ) { + walk_variant_def_list(self, ctxt, variants); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, + variant: &VariantDef, + ) { + walk_variant_def(self, ctxt, variant) + } + + fn visit_stmt(&mut self, ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, stmt: &Stmt) { + walk_stmt(self, ctxt, stmt) + } + + fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { + walk_expr(self, ctxt, expr) + } + + fn visit_arm(&mut self, ctxt: &mut VisitorCtxt<'_, LazyMatchArmSpan>, arm: &MatchArm) { + walk_arm(self, ctxt, arm) + } + + fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { + walk_pat(self, ctxt, pat) + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { + walk_path(self, ctxt, path) + } + + fn visit_use_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyUsePathSpan>, use_path: UsePathId) { + walk_use_path(self, ctxt, use_path) + } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { + walk_ty(self, ctxt, ty) + } + + #[allow(unused_variables)] + fn visit_lit(&mut self, ctxt: &mut VisitorCtxt<'_, LazyLitSpan>, lit: LitKind) {} + + #[allow(unused_variables)] + fn visit_ident(&mut self, ctxt: &mut VisitorCtxt<'_, LazySpanAtom>, ident: IdentId) {} +} + +pub fn walk_item(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) +where + V: Visitor + ?Sized, +{ + match item { + ItemKind::TopMod(top_mod) => { + let mut new_ctxt = VisitorCtxt::with_top_mod(ctxt.db, top_mod); + visitor.visit_top_mod(&mut new_ctxt, top_mod); + } + ItemKind::Mod(mod_) => { + let mut new_ctxt = VisitorCtxt::with_mod(ctxt.db, mod_); + visitor.visit_mod(&mut new_ctxt, mod_) + } + ItemKind::Func(func) => { + let mut new_ctxt = VisitorCtxt::with_func(ctxt.db, func); + visitor.visit_func(&mut new_ctxt, func) + } + ItemKind::Struct(struct_) => { + let mut new_ctxt = VisitorCtxt::with_struct(ctxt.db, struct_); + visitor.visit_struct(&mut new_ctxt, struct_) + } + ItemKind::Contract(contract) => { + let mut new_ctxt = VisitorCtxt::with_contract(ctxt.db, contract); + visitor.visit_contract(&mut new_ctxt, contract) + } + ItemKind::Enum(enum_) => { + let mut new_ctxt = VisitorCtxt::with_enum(ctxt.db, enum_); + visitor.visit_enum(&mut new_ctxt, enum_) + } + ItemKind::TypeAlias(alias) => { + let mut new_ctxt = VisitorCtxt::with_type_alias(ctxt.db, alias); + visitor.visit_type_alias(&mut new_ctxt, alias) + } + ItemKind::Impl(impl_) => { + let mut new_ctxt = VisitorCtxt::with_impl(ctxt.db, impl_); + visitor.visit_impl(&mut new_ctxt, impl_) + } + ItemKind::Trait(trait_) => { + let mut new_ctxt = VisitorCtxt::with_trait(ctxt.db, trait_); + visitor.visit_trait(&mut new_ctxt, trait_) + } + ItemKind::ImplTrait(impl_trait) => { + let mut new_ctxt = VisitorCtxt::with_impl_trait(ctxt.db, impl_trait); + visitor.visit_impl_trait(&mut new_ctxt, impl_trait) + } + ItemKind::Const(const_) => { + let mut new_ctxt = VisitorCtxt::with_const(ctxt.db, const_); + visitor.visit_const(&mut new_ctxt, const_) + } + ItemKind::Use(use_) => { + let mut new_ctxt = VisitorCtxt::with_use(ctxt.db, use_); + visitor.visit_use(&mut new_ctxt, use_) + } + ItemKind::Body(body) => { + let mut new_ctxt = VisitorCtxt::with_body(ctxt.db, body); + visitor.visit_body(&mut new_ctxt, body) + } + }; +} + +pub fn walk_top_mod( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTopModSpan>, + top_mod: TopLevelMod, +) where + V: Visitor + ?Sized, +{ + for child in top_mod.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, child), child); + } +} + +pub fn walk_mod(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyModSpan>, mod_: Mod) +where + V: Visitor + ?Sized, +{ + if let Some(name) = mod_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = mod_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + for child in mod_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, child), child); + } +} + +pub fn walk_func(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyFuncSpan>, func: Func) +where + V: Visitor + ?Sized, +{ + if let Some(name) = func.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = func.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = func.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = func.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + if let Some(id) = func.params(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.params_moved(), + |ctxt| { + visitor.visit_func_param_list(ctxt, id); + }, + ) + } + + if let Some(ty) = func.ret_ty(ctxt.db) { + ctxt.with_new_ctxt( + |span| span.ret_ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + if let Some(body) = func.body(ctxt.db) { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } +} + +pub fn walk_struct(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyStructSpan>, struct_: Struct) +where + V: Visitor + ?Sized, +{ + if let Some(id) = struct_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = struct_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = struct_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = struct_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + let id = struct_.fields(ctxt.db); + visitor.visit_field_def_list(ctxt, id); + }, + ); +} + +pub fn walk_contract( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyContractSpan>, + contract: Contract, +) where + V: Visitor + ?Sized, +{ + if let Some(id) = contract.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = contract.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + let id = contract.fields(ctxt.db); + visitor.visit_field_def_list(ctxt, id); + }, + ); +} + +pub fn walk_enum(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyEnumSpan>, enum_: Enum) +where + V: Visitor + ?Sized, +{ + if let Some(id) = enum_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = enum_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = enum_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = enum_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.variants_moved(), + |ctxt| { + let id = enum_.variants(ctxt.db); + visitor.visit_variant_def_list(ctxt, id); + }, + ); +} + +pub fn walk_type_alias( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTypeAliasSpan>, + alias: TypeAlias, +) where + V: Visitor + ?Sized, +{ + if let Some(id) = alias.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.alias_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = alias.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = alias.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = alias.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + if let Some(ty) = alias.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } +} + +pub fn walk_impl(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyImplSpan>, impl_: Impl) +where + V: Visitor + ?Sized, +{ + if let Some(ty) = impl_.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.target_ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = impl_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = impl_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = impl_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + for item in impl_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, item), item); + } +} + +pub fn walk_trait(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyTraitSpan>, trait_: Trait) +where + V: Visitor + ?Sized, +{ + if let Some(name) = trait_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = trait_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = trait_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = trait_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + for item in trait_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, item), item); + } +} + +pub fn walk_impl_trait( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyImplTraitSpan>, + impl_trait: ImplTrait, +) where + V: Visitor + ?Sized, +{ + if let Some(trait_ref) = impl_trait.trait_ref(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.trait_ref_moved(), + |ctxt| { + if let Some(path) = trait_ref.path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, trait_ref.generic_args); + }, + ); + }, + ) + } + + if let Some(ty) = impl_trait.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = impl_trait.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = impl_trait.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = impl_trait.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); +} + +pub fn walk_const(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyConstSpan>, const_: Const) +where + V: Visitor + ?Sized, +{ + if let Some(name) = const_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + if let Some(body) = const_.body(ctxt.db).to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } +} + +pub fn walk_use(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyUseSpan>, use_: Use) +where + V: Visitor + ?Sized, +{ + if let Some(use_path) = use_.path(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_use_path(ctxt, use_path); + }, + ) + } + + if let Some(Partial::Present(UseAlias::Ident(ident))) = use_.alias(ctxt.db) { + ctxt.with_new_ctxt( + |span| span.alias_moved().name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } +} + +pub fn walk_body(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyBodySpan>, body: Body) +where + V: Visitor + ?Sized, +{ + for stmt_id in body.stmts(ctxt.db).keys() { + visit_node_in_body!(visitor, ctxt, &stmt_id, stmt); + } +} + +pub fn walk_stmt(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, stmt: &Stmt) +where + V: Visitor + ?Sized, +{ + match stmt { + Stmt::Let(pat_id, ty, expr_id) => { + visit_node_in_body!(visitor, ctxt, pat_id, pat); + + if let Some(ty) = ty { + ctxt.with_new_ctxt( + |span| span.into_let_stmt().ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, *ty); + }, + ) + }; + + if let Some(expr_id) = expr_id { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + } + + Stmt::Assign(pat_id, expr_id) => { + visit_node_in_body!(visitor, ctxt, pat_id, pat); + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + + Stmt::For(pat_id, cond_id, for_body_id) => { + visit_node_in_body!(visitor, ctxt, pat_id, pat); + visit_node_in_body!(visitor, ctxt, cond_id, expr); + visit_node_in_body!(visitor, ctxt, for_body_id, expr); + } + + Stmt::While(cond_id, while_body_id) => { + visit_node_in_body!(visitor, ctxt, cond_id, expr); + visit_node_in_body!(visitor, ctxt, while_body_id, expr); + } + + Stmt::Return(Some(expr_id)) | Stmt::Expr(expr_id) => { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + + Stmt::Return(None) | Stmt::Continue | Stmt::Break => {} + } +} + +pub fn walk_expr(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) +where + V: Visitor + ?Sized, +{ + match expr { + Expr::Lit(lit) => ctxt.with_new_ctxt( + |span| span.into_lit_expr().lit_moved(), + |ctxt| { + visitor.visit_lit(ctxt, *lit); + }, + ), + + Expr::Block(stmts) => { + for stmt_id in stmts { + visit_node_in_body!(visitor, ctxt, stmt_id, stmt); + } + } + + Expr::Bin(lhs_id, rhs_id, _) => { + visit_node_in_body!(visitor, ctxt, lhs_id, expr); + visit_node_in_body!(visitor, ctxt, rhs_id, expr); + } + + Expr::Un(expr_id, _) => { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + + Expr::Call(callee_id, generic_args, call_args) => { + visit_node_in_body!(visitor, ctxt, callee_id, expr); + ctxt.with_new_ctxt( + |span| span.into_call_expr(), + |ctxt| { + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| visitor.visit_generic_arg_list(ctxt, *generic_args), + ); + + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + visitor.visit_call_arg_list(ctxt, call_args); + }, + ); + }, + ); + } + + Expr::MethodCall(receiver_id, method_name, generic_args, call_args) => { + visit_node_in_body!(visitor, ctxt, receiver_id, expr); + + ctxt.with_new_ctxt( + |span| span.into_method_call_expr(), + |ctxt| { + if let Some(method_name) = method_name.to_opt() { + ctxt.with_new_ctxt( + |span| span.method_name_moved(), + |ctxt| visitor.visit_ident(ctxt, method_name), + ); + } + + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| visitor.visit_generic_arg_list(ctxt, *generic_args), + ); + + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + visitor.visit_call_arg_list(ctxt, call_args); + }, + ); + }, + ); + } + + Expr::Path(path) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_expr().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + } + + Expr::RecordInit(path, fields) => { + ctxt.with_new_ctxt( + |span| span.into_record_init_expr(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + visitor.visit_field_list(ctxt, fields); + }, + ); + }, + ); + } + + Expr::Field(receiver_id, field_name) => { + visit_node_in_body!(visitor, ctxt, receiver_id, expr); + + match field_name { + Partial::Present(FieldIndex::Ident(ident)) => { + ctxt.with_new_ctxt( + |span| span.into_field_expr().accessor_moved(), + |ctxt| visitor.visit_ident(ctxt, *ident), + ); + } + + Partial::Present(FieldIndex::Index(index)) => { + ctxt.with_new_ctxt( + |span| span.into_field_expr().accessor_moved().into_lit_span(), + |ctxt| visitor.visit_lit(ctxt, (*index).into()), + ); + } + + Partial::Absent => {} + } + } + + Expr::Tuple(elems) => { + for elem_id in elems { + visit_node_in_body!(visitor, ctxt, elem_id, expr); + } + } + + Expr::Index(lhs_id, rhs_id) => { + visit_node_in_body!(visitor, ctxt, lhs_id, expr); + visit_node_in_body!(visitor, ctxt, rhs_id, expr); + } + + Expr::Array(elems) => { + for elem_id in elems { + visit_node_in_body!(visitor, ctxt, elem_id, expr); + } + } + + Expr::ArrayRep(val, rep) => { + visit_node_in_body!(visitor, ctxt, val, expr); + if let Some(body) = rep.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + } + + Expr::If(cond, then, else_) => { + visit_node_in_body!(visitor, ctxt, cond, expr); + visit_node_in_body!(visitor, ctxt, then, expr); + if let Some(else_) = else_ { + visit_node_in_body!(visitor, ctxt, else_, expr); + } + } + + Expr::Match(scrutinee, arms) => { + visit_node_in_body!(visitor, ctxt, scrutinee, expr); + + if let Partial::Present(arms) = arms { + ctxt.with_new_ctxt( + |span| span.into_match_expr().arms_moved(), + |ctxt| { + for (i, arm) in arms.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arm_moved(i), + |ctxt| { + visitor.visit_arm(ctxt, arm); + }, + ); + } + }, + ); + } + } + } +} + +pub fn walk_arm(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyMatchArmSpan>, arm: &MatchArm) +where + V: Visitor + ?Sized, +{ + visit_node_in_body!(visitor, ctxt, &arm.pat, pat); + visit_node_in_body!(visitor, ctxt, &arm.body, expr); +} + +pub fn walk_pat(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) +where + V: Visitor + ?Sized, +{ + match pat { + Pat::Lit(lit) => { + if let Some(lit) = lit.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_lit_pat().lit_moved(), + |ctxt| { + visitor.visit_lit(ctxt, lit); + }, + ) + }; + } + + Pat::Tuple(elems) => { + for elem in elems { + visit_node_in_body!(visitor, ctxt, elem, pat); + } + } + + Pat::Path(path) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_pat().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + } + + Pat::PathTuple(path, elems) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_pat().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + + for elem in elems { + visit_node_in_body!(visitor, ctxt, elem, pat); + } + } + + Pat::Record(path, fields) => ctxt.with_new_ctxt( + |span| span.into_record_pat(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + for (i, field) in fields.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.field_moved(i), + |ctxt| { + if let Some(label) = field.label.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, label); + }, + ); + } + + visit_node_in_body!(visitor, ctxt, &field.pat, pat); + }, + ); + } + }, + ); + }, + ), + + Pat::Or(lhs, rhs) => { + visit_node_in_body!(visitor, ctxt, lhs, pat); + visit_node_in_body!(visitor, ctxt, rhs, pat); + } + + Pat::WildCard | Pat::Rest => {} + } +} + +pub fn walk_attributes( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyAttrListSpan>, + attr: AttrListId, +) where + V: Visitor + ?Sized, +{ + for (idx, attr) in attr.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.attr_moved(idx), + |ctxt| { + visitor.visit_attribute(ctxt, attr); + }, + ) + } +} + +pub fn walk_attribute(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyAttrSpan>, attr: &Attr) +where + V: Visitor + ?Sized, +{ + match attr { + Attr::Normal(normal_attr) => { + ctxt.with_new_ctxt( + |span| span.into_normal_attr(), + |ctxt| { + if let Some(ident) = normal_attr.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + for (i, arg) in normal_attr.args.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(i), + |ctxt| { + if let Some(key) = arg.key.to_opt() { + ctxt.with_new_ctxt( + |span| span.key_moved(), + |ctxt| { + visitor.visit_ident(ctxt, key); + }, + ); + } + if let Some(value) = arg.value.to_opt() { + ctxt.with_new_ctxt( + |span| span.value_moved(), + |ctxt| { + visitor.visit_ident(ctxt, value); + }, + ); + } + }, + ); + } + }, + ); + }, + ); + } + + Attr::DocComment(doc_comment) => ctxt.with_new_ctxt( + |span| span.into_doc_comment_attr().doc_moved().into_lit_span(), + |ctxt| { + visitor.visit_lit(ctxt, doc_comment.text.into()); + }, + ), + } +} + +pub fn walk_generic_param_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamListSpan>, + params: GenericParamListId, +) where + V: Visitor + ?Sized, +{ + for (i, param) in params.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.param_moved(i), + |ctxt| { + visitor.visit_generic_param(ctxt, param); + }, + ) + } +} + +pub fn walk_generic_param( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamSpan>, + param: &GenericParam, +) where + V: Visitor + ?Sized, +{ + match param { + GenericParam::Type(ty_param) => ctxt.with_new_ctxt( + |span| span.into_type_param(), + |ctxt| { + if let Some(name) = ty_param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.bounds_moved(), + |ctxt| { + visitor.visit_type_bound_list(ctxt, &ty_param.bounds); + }, + ); + }, + ), + + GenericParam::Const(const_param) => ctxt.with_new_ctxt( + |span| span.into_const_param(), + |ctxt| { + if let Some(name) = const_param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ); + } + + if let Some(ty) = const_param.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ); + } + }, + ), + } +} + +pub fn walk_generic_arg_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyGenericArgListSpan>, + args: GenericArgListId, +) where + V: Visitor + ?Sized, +{ + for (i, arg) in args.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(i), + |ctxt| { + visitor.visit_generic_arg(ctxt, arg); + }, + ) + } +} + +pub fn walk_generic_arg( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyGenericArgSpan>, + arg: &GenericArg, +) where + V: Visitor + ?Sized, +{ + match arg { + GenericArg::Type(type_arg) => { + if let Some(ty) = type_arg.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_type_arg().ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + } + + GenericArg::Const(const_arg) => { + if let Some(body) = const_arg.body.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + } + } +} + +pub fn walk_call_arg_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyCallArgListSpan>, + args: &[CallArg], +) where + V: Visitor + ?Sized, +{ + for (idx, arg) in args.iter().copied().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(idx), + |ctxt| { + visitor.visit_call_arg(ctxt, arg); + }, + ) + } +} + +pub fn walk_call_arg(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyCallArgSpan>, arg: CallArg) +where + V: Visitor + ?Sized, +{ + if let Some(label) = arg.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, label), + ); + } + + visit_node_in_body!(visitor, ctxt, &arg.expr, expr); +} + +pub fn walk_func_param_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyFuncParamListSpan>, + params: FuncParamListId, +) where + V: Visitor + ?Sized, +{ + for (idx, param) in params.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.param_moved(idx), + |ctxt| { + visitor.visit_func_param(ctxt, param); + }, + ) + } +} + +pub fn walk_func_param( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyFuncParamSpan>, + param: &FuncParam, +) where + V: Visitor + ?Sized, +{ + if let Some(FuncParamLabel::Ident(ident)) = param.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, ident), + ); + } + + if let Some(FuncParamName::Ident(ident)) = param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| visitor.visit_ident(ctxt, ident), + ); + } + + if let Some(ty) = param.ty.to_opt() { + ctxt.with_new_ctxt(|span| span.ty_moved(), |ctxt| visitor.visit_ty(ctxt, ty)); + } +} + +pub fn walk_field_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyFieldListSpan>, + fields: &[Field], +) where + V: Visitor + ?Sized, +{ + for (idx, field) in fields.iter().copied().enumerate() { + ctxt.with_new_ctxt( + |span| span.field_moved(idx), + |ctxt| { + visitor.visit_field(ctxt, field); + }, + ) + } +} + +pub fn walk_field(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyFieldSpan>, field: Field) +where + V: Visitor + ?Sized, +{ + if let Some(name) = field.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, name), + ); + } + + visit_node_in_body!(visitor, ctxt, &field.expr, expr); +} + +pub fn walk_field_def_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyFieldDefListSpan>, + fields: FieldDefListId, +) where + V: Visitor + ?Sized, +{ + for (idx, field) in fields.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.field_moved(idx), + |ctxt| { + visitor.visit_field_def(ctxt, field); + }, + ) + } +} + +pub fn walk_field_def( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, + field: &FieldDef, +) where + V: Visitor + ?Sized, +{ + if let Some(name) = field.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + if let Some(ty) = field.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } +} + +pub fn walk_variant_def_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefListSpan>, + variants: VariantDefListId, +) where + V: Visitor + ?Sized, +{ + for (idx, variant) in variants.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.variant_moved(idx), + |ctxt| { + visitor.visit_variant_def(ctxt, variant); + }, + ) + } +} + +pub fn walk_variant_def( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, + variant: &VariantDef, +) where + V: Visitor + ?Sized, +{ + if let Some(name) = variant.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + if let Some(ty) = variant.ty { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } +} + +pub fn walk_path(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) +where + V: Visitor + ?Sized, +{ + for (idx, segment) in path.data(ctxt.db).iter().enumerate() { + if let Some(ident) = segment.to_opt() { + ctxt.with_new_ctxt( + |span| span.segment_moved(idx).into_atom(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } + } +} + +pub fn walk_use_path( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyUsePathSpan>, + path: UsePathId, +) where + V: Visitor + ?Sized, +{ + for (i, segment) in path.data(ctxt.db).iter().enumerate() { + if let Some(UsePathSegment::Ident(ident)) = segment.to_opt() { + ctxt.with_new_ctxt( + |span| span.segment_moved(i).into_atom(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } + } +} + +pub fn walk_ty(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) +where + V: Visitor + ?Sized, +{ + match ty.data(ctxt.db) { + TypeKind::Ptr(ty) => { + if let Some(ty) = ty.to_opt() { + ctxt.with_new_ctxt( + |ctxt| ctxt.into_ptr_type().ty(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + } + + TypeKind::Path(path, generic_args) => ctxt.with_new_ctxt( + |span| span.into_path_type(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| visitor.visit_path(ctxt, path), + ); + } + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, generic_args); + }, + ); + }, + ), + + TypeKind::Tuple(elems) => ctxt.with_new_ctxt( + |span| span.into_tuple_type(), + |ctxt| { + for (i, elem) in elems.iter().enumerate() { + let Some(elem) = elem.to_opt() else { + continue; + }; + ctxt.with_new_ctxt( + |span| span.elem_ty_moved(i), + |ctxt| { + visitor.visit_ty(ctxt, elem); + }, + ) + } + }, + ), + + TypeKind::Array(elem, body) => ctxt.with_new_ctxt( + |span| span.into_array_type(), + |ctxt| { + if let Some(elem) = elem.to_opt() { + ctxt.with_new_ctxt( + |span| span.elem_moved(), + |ctxt| { + visitor.visit_ty(ctxt, elem); + }, + ) + } + if let Some(body) = body.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + }, + ), + + TypeKind::SelfType => {} + } +} + +pub fn walk_type_bound_list( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTypeBoundListSpan>, + bounds: &[TypeBound], +) where + V: Visitor + ?Sized, +{ + for (idx, bound) in bounds.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.bound_moved(idx), + |ctxt| { + visitor.visit_type_bound(ctxt, bound); + }, + ) + } +} + +pub fn walk_type_bound( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTypeBoundSpan>, + bound: &TypeBound, +) where + V: Visitor + ?Sized, +{ + if let Some(path) = bound.path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + } + + if let Some(args) = bound.generic_args { + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, args); + }, + ) + } +} + +pub fn walk_where_clause( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyWhereClauseSpan>, + predicates: WhereClauseId, +) where + V: Visitor + ?Sized, +{ + for (idx, predicate) in predicates.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.predicate_moved(idx), + |ctxt| { + visitor.visit_where_predicate(ctxt, predicate); + }, + ) + } +} + +pub fn walk_where_predicate( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyWherePredicateSpan>, + predicate: &WherePredicate, +) where + V: Visitor + ?Sized, +{ + if let Some(ty) = predicate.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.bounds_moved(), + |ctxt| { + visitor.visit_type_bound_list(ctxt, &predicate.bounds); }, - DynLazySpan, LazySpan, LazySpanAtom, SpanDowncast, - }, - HirDb, -}; + ) +} + +use attr::{Attr, AttrListId}; -pub struct SpanCtxt +pub struct VisitorCtxt<'db, T> where T: LazySpan, { + db: &'db dyn HirDb, span: DynLazySpan, _t: PhantomData, } -impl SpanCtxt +impl<'db, T> VisitorCtxt<'db, T> where T: LazySpan, { - fn cast(self) -> SpanCtxt { - SpanCtxt { - span: self.span, - _t: PhantomData, - } + pub fn current_span(&self) -> Option + where + T: SpanDowncast, + { + let dyn_span: DynLazySpan = self.span.clone(); + T::downcast(dyn_span) } - fn pop_span(&mut self) { - self.span.0.as_mut().map(|chain| chain.pop_transition()); + fn with_new_ctxt(&mut self, f1: F1, f2: F2) + where + T: SpanDowncast, + F1: FnOnce(T) -> U, + F2: FnOnce(&mut VisitorCtxt), + U: LazySpan + SpanDowncast + Into, + { + let mut new_ctxt = self.transition(f1); + f2(&mut new_ctxt); + *self = new_ctxt.pop(); } - pub fn current_span(&self) -> Option + fn transition(&mut self, f: F) -> VisitorCtxt<'db, U> where T: SpanDowncast, + F: FnOnce(T) -> U, + U: LazySpan + SpanDowncast + Into, { - let dyn_span: DynLazySpan = self.span.clone(); - T::downcast(dyn_span) + let dyn_span = mem::replace(&mut self.span, DynLazySpan::invalid()); + let span = T::downcast(dyn_span).unwrap(); + let u = f(span); + + Self { + db: self.db, + span: u.into(), + _t: PhantomData, + } + .cast() + } + + fn pop(mut self) -> VisitorCtxt<'db, U> + where + U: LazySpan, + { + self.span.0.as_mut().unwrap().pop_transition(); + + Self { + db: self.db, + span: self.span, + _t: PhantomData, + } + .cast() + } + + fn cast(self) -> VisitorCtxt<'db, U> { + VisitorCtxt { + db: self.db, + span: self.span, + _t: PhantomData, + } + } + + fn body(&self) -> Body { + match self.span.0.as_ref().unwrap().root { + ChainRoot::Body(body) => body, + ChainRoot::Expr(expr) => expr.body, + ChainRoot::Stmt(stmt) => stmt.body, + ChainRoot::Pat(pat) => pat.body, + _ => panic!(), + } } } macro_rules! define_ctxt_ctor { - ($(($hir_ty:ty, $span_ty:ty, $ctor_name:ident),)*) => { - $(impl SpanCtxt<$span_ty> { - pub fn $ctor_name(item: $hir_ty) -> Self { + ($(( + $span_ty:ty, + $ctor:ident($($ctor_name:ident: $ctor_ty:ty),*)),)*) => { + $(impl<'db> VisitorCtxt<'db, $span_ty> { + pub fn $ctor(db: &'db dyn HirDb, $($ctor_name: $ctor_ty,)*) -> Self { Self { - span: item.lazy_span().into(), + db, + span: <$span_ty>::new($($ctor_name),*).into(), _t: PhantomData, } } @@ -63,18 +1822,105 @@ macro_rules! define_ctxt_ctor { } define_ctxt_ctor! { - (ItemKind, LazyItemSpan, with_item), - (TopLevelMod, LazyTopModSpan, with_top_mod), - (Mod, LazyModSpan, with_mod), - (Func, LazyFuncSpan, with_func), - (Struct, LazyStructSpan, with_struct), - (Contract, LazyContractSpan, with_contract), - (Enum, LazyEnumSpan, with_enum), - (TypeAlias, LazyTypeAliasSpan, with_type_alias), - (Impl, LazyImplSpan, with_impl), - (Trait, LazyTraitSpan, with_trait), - (ImplTrait, LazyImplTraitSpan, with_impl_trait), - (Const, LazyConstSpan, with_const), - (Use, LazyUseSpan, with_use), - (Body, LazyBodySpan, with_body), + (LazyItemSpan, with_item(item: ItemKind)), + (LazyTopModSpan, with_top_mod(top_mod: TopLevelMod)), + (LazyModSpan, with_mod(mod_: Mod)), + (LazyFuncSpan, with_func(func: Func)), + (LazyStructSpan, with_struct(struct_: Struct)), + (LazyContractSpan, with_contract(contract: Contract)), + (LazyEnumSpan, with_enum(enum_: Enum)), + (LazyTypeAliasSpan, with_type_alias(type_alias: TypeAlias)), + (LazyImplSpan, with_impl(impl_: Impl)), + (LazyTraitSpan, with_trait(trait_: Trait)), + (LazyImplTraitSpan, with_impl_trait(impl_trait: ImplTrait)), + (LazyConstSpan, with_const(const_: Const)), + (LazyUseSpan, with_use(use_: Use)), + (LazyBodySpan, with_body(body: Body)), + (LazyExprSpan, with_expr(body: Body, expr: ExprId)), + (LazyStmtSpan, with_stmt(body: Body, stmt: StmtId)), + (LazyPatSpan, with_pat(body: Body, pat: PatId)), + +} + +macro_rules! visit_node_in_body { + ($visitor:expr, $ctxt:expr, $id:expr, $inner:ident) => { + if let Partial::Present(data) = $id.data($ctxt.db, $ctxt.body()) { + paste::paste! { + $visitor.[](&mut VisitorCtxt::[]($ctxt.db, $ctxt.body(), *$id), data); + + } + } + } +} +use visit_node_in_body; + +#[cfg(test)] +mod tests { + + use crate::test_db::TestDb; + + use super::*; + struct MyVisitor { + generic_param_list: Option, + attributes: Vec, + lit_ints: Vec, + } + + impl Visitor for MyVisitor { + fn visit_attribute(&mut self, ctxt: &mut VisitorCtxt, _attrs: &Attr) { + self.attributes.push(ctxt.current_span().unwrap()); + } + + fn visit_generic_param_list( + &mut self, + ctxt: &mut VisitorCtxt, + _params: GenericParamListId, + ) { + self.generic_param_list = Some(ctxt.current_span().unwrap()); + } + + fn visit_lit(&mut self, ctxt: &mut VisitorCtxt, lit: LitKind) { + if let LitKind::Int(_) = lit { + self.lit_ints.push(ctxt.current_span().unwrap()); + } + } + } + + #[test] + fn visitor() { + let mut db = TestDb::default(); + let text = r#" + #[attr1] + #[attr2] + fn foo() { + 1 + "foo" + 42 + }"#; + + let func = db.expect_item::(text); + let top_mod = func.top_mod(&db); + + let mut visitor = MyVisitor { + generic_param_list: None, + attributes: Vec::new(), + lit_ints: Vec::new(), + }; + + let mut ctxt = VisitorCtxt::with_func(&db, func); + visitor.visit_func(&mut ctxt, func); + + assert_eq!( + "", + db.text_at(top_mod, &visitor.generic_param_list.unwrap()) + ); + + assert_eq!(visitor.attributes.len(), 2); + assert_eq!("#[attr1]", db.text_at(top_mod, &visitor.attributes[0])); + assert_eq!("#[attr2]", db.text_at(top_mod, &visitor.attributes[1])); + + assert_eq!(visitor.lit_ints.len(), 2); + assert_eq!("1", db.text_at(top_mod, &visitor.lit_ints[0])); + assert_eq!("42", db.text_at(top_mod, &visitor.lit_ints[1])); + } } diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index aeac65be3f..05711f35d6 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -176,7 +176,7 @@ impl RecordInitExpr { } /// Returns the fields of the record init expression. - pub fn fields(&self) -> Option { + pub fn fields(&self) -> Option { support::child(self.syntax()) } } @@ -351,7 +351,7 @@ pub enum ExprKind { ast_node! { /// `{ label1: expr1, expr2 }` - pub struct RecordFieldList, + pub struct FieldList, SK::RecordFieldList, IntoIterator } diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 283633a385..3ff5d7e528 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -160,7 +160,7 @@ impl Enum { } /// Returns the enum's variant def list. - pub fn variants(&self) -> Option { + pub fn variants(&self) -> Option { support::child(self.syntax()) } } @@ -351,17 +351,17 @@ impl RecordFieldDef { } ast_node! { - pub struct EnumVariantDefList, + pub struct VariantDefList, SK::VariantDefList, - IntoIterator + IntoIterator } ast_node! { /// `Foo(i32, u32)` - pub struct EnumVariantDef, + pub struct VariantDef, SK::VariantDef, } -impl EnumVariantDef { +impl VariantDef { /// Returns the name of the variant. /// `Foo` in `Foo(i32, u32)` pub fn name(&self) -> Option { From b140aa84481605fe6fa5c78df4bd89f534e067b0 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 5 Jun 2023 01:49:31 -0500 Subject: [PATCH 176/678] language server: diagnostics publishing --- crates/language-server/Cargo.toml | 6 ++ .../editors/vscode/src/extension.ts | 2 +- crates/language-server/src/db.rs | 1 + .../src/handlers/notifications.rs | 52 ++++++++++++ .../language-server/src/handlers/request.rs | 48 +++++++++++ crates/language-server/src/main.rs | 6 +- crates/language-server/src/server.rs | 10 ++- crates/language-server/src/state.rs | 85 +++++++------------ crates/language-server/src/util.rs | 65 ++++++++++++++ 9 files changed, 214 insertions(+), 61 deletions(-) create mode 100644 crates/language-server/src/db.rs create mode 100644 crates/language-server/src/handlers/notifications.rs create mode 100644 crates/language-server/src/util.rs diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index d6a8df34da..367750f31a 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -10,6 +10,8 @@ description = "An LSP language server for Fe lang" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +fe-analyzer = {path = "../analyzer", version = "^0.22.0"} +fe-common = {path = "../common", version = "^0.22.0"} anyhow = "1.0.71" clap = "4.2.7" crossbeam-channel = "0.5.8" @@ -17,3 +19,7 @@ lsp-server = "0.7.0" lsp-types = "0.94.0" serde = "1.0.162" serde_json = "1.0.96" +# salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +salsa = "0.16.1" +indexmap = "1.6.2" + diff --git a/crates/language-server/editors/vscode/src/extension.ts b/crates/language-server/editors/vscode/src/extension.ts index f4ac081950..0dd641855c 100644 --- a/crates/language-server/editors/vscode/src/extension.ts +++ b/crates/language-server/editors/vscode/src/extension.ts @@ -18,7 +18,7 @@ let client: LanguageClient; export async function activate( context: vscode.ExtensionContext ): Promise { - // todo: bundle binary with extension + // todo: bundle binary with extension (also: make this configurable?) const serverPath = join(__dirname, '..', '..', '..', '..', '..', 'target', 'debug', 'fe-language-server'); const serverExecutable: Executable = { diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs new file mode 100644 index 0000000000..934a11a9cf --- /dev/null +++ b/crates/language-server/src/db.rs @@ -0,0 +1 @@ +// to-do: implement a salsa database for the language server \ No newline at end of file diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs new file mode 100644 index 0000000000..d7c83e8ddb --- /dev/null +++ b/crates/language-server/src/handlers/notifications.rs @@ -0,0 +1,52 @@ +use anyhow::Result; +use fe_analyzer::{namespace::items::ModuleId, TestDb}; +use serde::Deserialize; + +use crate::{state::ServerState, util::diag_to_lsp}; + +fn string_diagnostics(path: &str, src: &str) -> Vec { + let mut db = TestDb::default(); + let module = ModuleId::new_standalone(&mut db, path, src); + + module.diagnostics(&db) +} + +// pub(crate) fn handle_document_did_change(state: &mut ServerState, req: lsp_server::Request) -> Result<(), Error> { +// let params = lsp_types::DidChangeTextDocumentParams::deserialize(req.params)?; +// let text = params.text_document.text; + +// } +pub(crate) fn handle_document_did_open( + state: &mut ServerState, + note: lsp_server::Notification, +) -> Result<(), anyhow::Error> { + let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; + let text = params.text_document.text; + + let diags = string_diagnostics( + params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), + text.as_str(), + ); + + state.log_info(format!("diagnostics: {:?}", diags))?; + + // send diagnostics using `state.send_response` for each diagnostic + + let diagnostics = diags.into_iter().flat_map(|diag| { + diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() + }); + + let result = lsp_types::PublishDiagnosticsParams { + uri: params.text_document.uri.clone(), + diagnostics: diagnostics.collect(), + version: None, + }; + let response = lsp_server::Message::Notification(lsp_server::Notification { + method: String::from("textDocument/publishDiagnostics"), + params: serde_json::to_value(result).unwrap(), + }); + + state.sender.send(response)?; + + Ok(()) +} diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index e69de29bb2..6374dd791c 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -0,0 +1,48 @@ +use std::io::BufRead; + +use lsp_server::Response; +use serde::Deserialize; + +use crate::state::ServerState; + +pub(crate) fn handle_hover( + state: &mut ServerState, + req: lsp_server::Request, +) -> Result<(), anyhow::Error> { + // TODO: get more relevant information for the hover + let params = lsp_types::HoverParams::deserialize(req.params)?; + let file = std::fs::File::open( + ¶ms + .text_document_position_params + .text_document + .uri + .path(), + )?; + let reader = std::io::BufReader::new(file); + let line = reader + .lines() + .nth(params.text_document_position_params.position.line as usize) + .unwrap() + .unwrap(); + let result = lsp_types::Hover { + contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent::from( + lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: format!( + "### Hovering over:\n```{}```\n\n{}", + &line, + serde_json::to_string_pretty(¶ms).unwrap() + ), + }, + )), + range: None, + }; + let response_message = Response { + id: req.id, + result: Some(serde_json::to_value(result)?), + error: None, + }; + + state.send_response(response_message)?; + Ok(()) +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 3e5fa0d79c..af37ada9c6 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,8 +1,10 @@ mod server; mod state; +mod db; +mod util; mod handlers { - // pub(crate) mod notification; + pub(crate) mod notifications; pub(crate) mod request; } @@ -10,6 +12,4 @@ use server::run_server; fn main() { let _ = run_server(); - // log "hello world" to the console - println!("Hello, world!"); } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 5777f0f88d..76068732cd 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -6,6 +6,10 @@ use lsp_types::{ServerCapabilities, HoverProviderCapability}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { hover_provider: Some(HoverProviderCapability::Simple(true)), + // full sync mode for now + text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind( + lsp_types::TextDocumentSyncKind::FULL, + )), ..Default::default() } } @@ -13,7 +17,7 @@ fn server_capabilities() -> ServerCapabilities { pub fn run_server() -> Result<()> { let (connection, io_threads) = Connection::stdio(); - let (request_id, initialize_params) = connection.initialize_start()?; + let (request_id, _initialize_params) = connection.initialize_start()?; // todo: actually use initialization params let capabilities = server_capabilities(); @@ -51,8 +55,8 @@ pub fn run_server() -> Result<()> { }) )?; - let result = ServerState::new(connection.sender).run(connection.receiver); + let result = ServerState::new(connection.sender).run(connection.receiver)?; io_threads.join().unwrap(); - result + Ok(result) } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 826964bd87..64ae0d177f 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,18 +1,20 @@ -use std::io::BufRead; - +use fe_analyzer::db::TestDb; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; -use lsp_server::{Message, Response}; +use lsp_server::{Message}; use lsp_types::{notification::Notification, request::Request}; -use serde::Deserialize; +// use super::db::LanguageServerDb; + +use crate::handlers::{request::handle_hover, notifications::handle_document_did_open}; pub struct ServerState { - sender: Sender, + pub sender: Sender, + pub analyzer_db: TestDb, } impl ServerState { pub fn new(sender: Sender) -> Self { - ServerState { sender } + ServerState { sender, analyzer_db: TestDb::default() } } pub fn run(&mut self, receiver: Receiver) -> Result<()> { @@ -23,12 +25,7 @@ impl ServerState { } } - self.handle_message(msg)?; - - // debugging spam - // if (std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs() % 1) == 0 { - // self.log_info(String::from("hi"))?; - // } + let _ = self.handle_message(msg); } Ok(()) } @@ -40,56 +37,36 @@ impl ServerState { } fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - // log the message with `self.log_info` - self.log_info(format!("MESSAGE: {:?}", msg))?; - if let lsp_server::Message::Request(req) = msg { - // log the request to the console - - // handle hover request - if req.method == lsp_types::request::HoverRequest::METHOD { - // log the hover request to the console - let params = lsp_types::HoverParams::deserialize(req.params)?; - - // open the file and read the line at the given position - let file = std::fs::File::open( - ¶ms - .text_document_position_params - .text_document - .uri - .path(), - )?; - let reader = std::io::BufReader::new(file); - let line = reader - .lines() - .nth(params.text_document_position_params.position.line as usize) - .unwrap() - .unwrap(); + self.log_info(format!("REQUEST: {:?}", req))?; - let result = lsp_types::Hover { - contents: lsp_types::HoverContents::Markup( - lsp_types::MarkupContent::from(lsp_types::MarkupContent { - kind: lsp_types::MarkupKind::Markdown, - value: format!("### Hovering over:\n```{}```\n\n{}", &line, serde_json::to_string_pretty(¶ms).unwrap()), - }), - ), - range: None, - }; - - let response_message = lsp_server::Message::Response(Response { - id: req.id, - result: Some(serde_json::to_value(result)?), - error: None, - }); - - self.sender.send(response_message)?; + match req.method.as_str() { + // TODO: implement actually useful hover handler + // lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, + _ => {} + } + + + } else if let lsp_server::Message::Notification(note) = msg { + // log the notification to the console + self.log_info(format!("NOTIFICATION: {:?}", note))?; + + match note.method.as_str() { + lsp_types::notification::DidOpenTextDocument::METHOD => handle_document_did_open(self, note)?, + lsp_types::notification::DidChangeTextDocument::METHOD => handle_document_did_open(self, note)?, + _ => {} } } Ok(()) } - fn log_info(&mut self, message: String) -> Result<()> { + pub(crate) fn send_response(&mut self, response: lsp_server::Response) -> Result<()> { + self.sender.send(lsp_server::Message::Response(response))?; + Ok(()) + } + + pub(crate) fn log_info(&mut self, message: String) -> Result<()> { self.sender.send(lsp_server::Message::Notification( lsp_server::Notification { method: String::from("window/logMessage"), diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs new file mode 100644 index 0000000000..4e6cc6903c --- /dev/null +++ b/crates/language-server/src/util.rs @@ -0,0 +1,65 @@ +use fe_common::diagnostics::{Severity, Diagnostic}; +use fe_common::Span; +use lsp_types::Position; + +// TODO: these could potentially be moved into the common crate +// for more idiomatic use in the analyzer and the language server + +pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { + // we need to get line and character offsets from the text, + // first we get the line offsets + let line_offsets: Vec = text + .lines() + .scan(0, |state, line| { + let offset = *state; + *state += line.len() + 1; + Some(offset) + }) + .collect(); + + // now we get the line and character offsets + let start_line = line_offsets + .binary_search(&span.start) + .unwrap_or_else(|x| x - 1); + + let end_line = line_offsets + .binary_search(&span.end) + .unwrap_or_else(|x| x - 1); + + let start_character = span.start - line_offsets[start_line]; + let end_character = span.end - line_offsets[end_line]; + + lsp_types::Range { + start: Position::new(start_line as u32, start_character as u32), + end: Position::new(end_line as u32, end_character as u32), + } +} +pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { + match severity { + Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, + Severity::Error => lsp_types::DiagnosticSeverity::ERROR, + Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, + Severity::Note => lsp_types::DiagnosticSeverity::HINT, + Severity::Help => lsp_types::DiagnosticSeverity::INFORMATION, + } +} + +pub(crate) fn diag_to_lsp(diag: Diagnostic, text: &str) -> Vec { + diag.labels + .into_iter() + .map(|label| { + let range = span_to_range(label.span, text); + lsp_types::Diagnostic { + range, + severity: Some(severity_to_lsp(diag.severity)), + code: None, + source: None, + message: diag.message.clone(), + related_information: None, + tags: None, + code_description: None, + data: None + } + }) + .collect() + } \ No newline at end of file From 4728d0230246eb3650064d77f639e433a79b2fea Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 7 Jun 2023 13:00:40 -0700 Subject: [PATCH 177/678] Improve umbiguous error message --- Cargo.lock | 15 ---- .../src/name_resolution/diagnostics.rs | 20 +++-- .../src/name_resolution/import_resolver.rs | 55 ++++++------ .../src/name_resolution/name_resolver.rs | 90 +++++++++---------- crates/hir-analysis/tests/import.rs | 2 +- 5 files changed, 88 insertions(+), 94 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d5ffe4917..b8e4baa9c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -902,21 +902,6 @@ dependencies = [ "vfs", ] -[[package]] -name = "fe-driver2" -version = "0.20.0-alpha" -dependencies = [ - "derive_more", - "either", - "fe-common2", - "fe-hir", - "fe-hir-analysis", - "itertools", - "rustc-hash", - "salsa-2022", - "smallvec", -] - [[package]] name = "fe-hir" version = "0.22.0" diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index b0786f13e7..47860ea8b1 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -40,8 +40,8 @@ impl ImportError { Self::new(span, ImportErrorKind::Invisible(name, name_span)) } - pub fn ambiguous(span: DynLazySpan, ident: IdentId) -> Self { - Self::new(span, ImportErrorKind::Ambiguous(ident)) + pub fn ambiguous(span: DynLazySpan, ident: IdentId, candidates: Vec) -> Self { + Self::new(span, ImportErrorKind::Ambiguous(ident, candidates)) } } @@ -76,7 +76,7 @@ pub enum ImportErrorKind { Invisible(IdentId, Option), /// The import path segment is ambiguous. - Ambiguous(IdentId), + Ambiguous(IdentId, Vec), } impl ImportErrorKind { @@ -85,7 +85,7 @@ impl ImportErrorKind { ImportErrorKind::Conflict(_) => 0, ImportErrorKind::NotFound(_) => 1, ImportErrorKind::Invisible(..) => 2, - ImportErrorKind::Ambiguous(_) => 3, + ImportErrorKind::Ambiguous(..) => 3, } } @@ -100,7 +100,7 @@ impl ImportErrorKind { ImportErrorKind::Invisible(name, _) => { format!("{} is not visible", name.data(db),) } - ImportErrorKind::Ambiguous(name) => format!("{} is ambiguous", name.data(db)), + ImportErrorKind::Ambiguous(name, _) => format!("{} is ambiguous", name.data(db)), } } @@ -112,7 +112,7 @@ impl ImportErrorKind { conflict_with.resolve(db), )], - ImportErrorKind::NotFound(_) | ImportErrorKind::Ambiguous(_) => vec![], + ImportErrorKind::NotFound(_) => vec![], ImportErrorKind::Invisible(_, span) => span .as_ref() @@ -124,6 +124,14 @@ impl ImportErrorKind { )] }) .unwrap_or_default(), + + ImportErrorKind::Ambiguous(_, candidates) => candidates + .iter() + .enumerate() + .map(|(i, span)| { + SubDiagnostic::new(Severity::Note, format!("candidate #{i}"), span.resolve(db)) + }) + .collect(), } } } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 35acb74d39..50eaea2fed 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -464,7 +464,7 @@ impl<'db> ImportResolver<'db> { // the use might be resolved to an external ingot. This means there is an // ambiguity between the external ingot and the name // imported by the glob import. - self.register_error(&i_use, NameResolutionError::Ambiguous); + self.register_error(&i_use, NameResolutionError::Ambiguous(vec![])); } } @@ -491,10 +491,7 @@ impl<'db> ImportResolver<'db> { fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { match err { - // We treat `Conflict` as the same as `NotFound`. - // NOTE: The conflict error is happen in the `resolve_query` method, this means that the - // name conflict happens in the scope that is being imported. - NameResolutionError::NotFound | NameResolutionError::Conflict => { + NameResolutionError::NotFound => { self.accumulated_errors.push(ImportError::not_found( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), @@ -506,10 +503,14 @@ impl<'db> ImportResolver<'db> { // parsing phase. } - NameResolutionError::Ambiguous => { + NameResolutionError::Ambiguous(cands) => { self.accumulated_errors.push(ImportError::ambiguous( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), + cands + .into_iter() + .filter_map(|name| name.kind.name_span(self.db)) + .collect(), )); } @@ -682,13 +683,7 @@ impl Importer for DefaultImporter { } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct ImportedBinding { - pub binding: NameBinding, - pub use_: Use, -} - -pub type NamedImportSet = FxHashMap; +pub type NamedImportSet = FxHashMap; #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct GlobImportSet { @@ -884,22 +879,32 @@ impl IntermediateResolvedImports { .or_default(); match imported_set.entry(imported_name) { - Entry::Occupied(mut e) => match e.get_mut().binding.merge(bind.iter()) { - Some(already_found) => { - return Err(ImportError::conflict( - i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), - already_found.derived_from(db).unwrap(), - )) + Entry::Occupied(mut e) => match e.get_mut().merge(bind.iter()) { + Ok(()) => Ok(()), + Err(NameResolutionError::Ambiguous(cands)) => { + for cand in cands { + match cand.derivation { + NameDerivation::NamedImported(use_) => { + if i_use.use_ == use_ { + continue; + } + + return Err(ImportError::conflict( + i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + cand.derived_from(db).unwrap(), + )); + } + _ => unreachable!(), + } + } + unreachable!() } - None => Ok(()), + + Err(_) => unreachable!(), }, Entry::Vacant(e) => { - let import_bind = ImportedBinding { - binding: bind, - use_: i_use.use_, - }; - e.insert(import_bind); + e.insert(bind); Ok(()) } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 6725b920a9..a42a7b341d 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -164,9 +164,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { NameDomain::from_scope(edge.dest), NameDerivation::Def, ); - if binding.push(&res).is_some() { - return Err(NameResolutionError::Ambiguous); - } + self.try_push(&mut binding, &res, query)?; } } @@ -185,7 +183,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { .named_imports(self.db, query.scope) .and_then(|imports| imports.get(&query.name)) { - self.try_merge(&mut binding, &imported.binding, query)?; + self.try_merge(&mut binding, imported, query)?; } // 3. Look for the name in the glob imports. @@ -231,19 +229,25 @@ impl<'db, 'a> NameResolver<'db, 'a> { .iter() .for_each(|(name, root_mod)| { if *name == query.name { - binding.push(&NameRes::new_scope( - ScopeId::root(*root_mod), - NameDomain::Item, - NameDerivation::External, - )); + // We don't care about the result of `push` because we assume ingots are + // guaranteed to be unique. + binding + .push(&NameRes::new_scope( + ScopeId::root(*root_mod), + NameDomain::Item, + NameDerivation::External, + )) + .unwrap(); } }); } // 6. Look for the name in the builtin types. for &prim in PrimTy::all_types() { + // We don't care about the result of `push` because we assume builtin types are + // guaranteed to be unique. if query.name == prim.name() { - binding.push(&NameRes::new_prim(prim)); + binding.push(&NameRes::new_prim(prim)).unwrap(); } } @@ -321,12 +325,14 @@ impl<'db, 'a> NameResolver<'db, 'a> { let mut found_domains_after_named = found_domains.clone(); if let Some(named_imports) = self.importer.named_imports(self.db, target) { for (&name, import) in named_imports { - if !is_use_visible(self.db, ref_scope, import.use_) { - continue; - } - let found_domain = found_domains.get(&name).copied().unwrap_or_default(); - for res in import.binding.iter() { + for res in import.iter().filter(|res| { + if let NameDerivation::NamedImported(use_) = res.derivation { + is_use_visible(self.db, ref_scope, use_) + } else { + false + } + }) { if (found_domain & res.domain as u8 != 0) || !found_kinds.insert((name, res.kind)) { @@ -461,16 +467,12 @@ impl<'db, 'a> NameResolver<'db, 'a> { from: &NameBinding, query: NameQuery, ) -> Result<(), NameResolutionError> { - if target + target .merge(from.filter_by_domain(query.directive.domain)) - .is_none() - { - Ok(()) - } else { - let err = NameResolutionError::Ambiguous; - self.cache_store.cache_result(query, Err(err.clone())); - Err(err) - } + .map_err(|err| { + self.cache_store.cache_result(query, Err(err.clone())); + err + }) } fn try_push( @@ -479,13 +481,10 @@ impl<'db, 'a> NameResolver<'db, 'a> { res: &NameRes, query: NameQuery, ) -> Result<(), NameResolutionError> { - if target.push(res).is_none() { - Ok(()) - } else { - let err = NameResolutionError::Ambiguous; + target.push(res).map_err(|err| { self.cache_store.cache_result(query, Err(err.clone())); - Err(err) - } + err + }) } } @@ -635,14 +634,12 @@ impl NameBinding { pub(super) fn merge<'a>( &mut self, resolutions: impl Iterator, - ) -> Option { + ) -> Result<(), NameResolutionError> { for res in resolutions { - if let Some(conflict) = self.push(res) { - return Some(conflict); - } + self.push(res)?; } - None + Ok(()) } pub(super) fn set_derivation(&mut self, derivation: NameDerivation) { @@ -652,30 +649,33 @@ impl NameBinding { } /// Push the `res` into the set. - fn push(&mut self, res: &NameRes) -> Option { + fn push(&mut self, res: &NameRes) -> Result<(), NameResolutionError> { let domain = res.domain; match self.resolutions.entry(domain) { Entry::Occupied(mut e) => { let old_derivation = e.get().derivation.clone(); match res.derivation.cmp(&old_derivation) { - cmp::Ordering::Less => None, + cmp::Ordering::Less => Ok(()), cmp::Ordering::Equal => { if e.get().kind == res.kind { - None + Ok(()) } else { - Some(res.clone()) + Err(NameResolutionError::Ambiguous(vec![ + e.get().clone(), + res.clone(), + ])) } } cmp::Ordering::Greater => { e.insert(res.clone()); - None + Ok(()) } } } Entry::Vacant(e) => { e.insert(res.clone()); - None + Ok(()) } } } @@ -883,9 +883,6 @@ pub enum NameResolutionError { /// The name is not found. NotFound, - /// Multiple candidates are found. - Conflict, - /// The name is invalid in parsing. Basically, no need to report it because /// the error is already emitted from parsing phase. Invalid, @@ -894,17 +891,16 @@ pub enum NameResolutionError { Invisible, /// The name is found, but it's ambiguous. - Ambiguous, + Ambiguous(Vec), } impl fmt::Display for NameResolutionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { NameResolutionError::NotFound => write!(f, "name not found"), - NameResolutionError::Conflict => write!(f, "multiple candidates found"), NameResolutionError::Invalid => write!(f, "invalid name"), NameResolutionError::Invisible => write!(f, "name is not visible"), - NameResolutionError::Ambiguous => write!(f, "name is ambiguous"), + NameResolutionError::Ambiguous(_) => write!(f, "name is ambiguous"), } } } diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 04877f3fbb..42c11a04d2 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -38,7 +38,7 @@ fn format_imports( let mut use_res_map: FxHashMap> = FxHashMap::default(); for name_resolved in imports.named_resolved.values().flat_map(|r| r.values()) { - for res in name_resolved.binding.iter() { + for res in name_resolved.iter() { match res.derivation { NameDerivation::NamedImported(use_) => use_res_map .entry(use_) From 5f97a565a545a35a3fe48bb595b24d00bf4db9b7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 7 Jun 2023 18:32:28 -0700 Subject: [PATCH 178/678] Implement `resolve_segments` method --- .../src/name_resolution/name_resolver.rs | 150 ++++++------------ 1 file changed, 51 insertions(+), 99 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index a42a7b341d..878e75ddca 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -12,7 +12,7 @@ use hir::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, VariantEdge, }, - IdentId, ItemKind, Partial, PathId, Use, + IdentId, ItemKind, Partial, Use, }, span::DynLazySpan, }; @@ -86,14 +86,10 @@ impl PathResolutionError { fn new(kind: NameResolutionError, failed_at: usize) -> Self { Self { kind, failed_at } } - - fn invalid(failed_at: usize) -> Self { - Self::new(NameResolutionError::Invalid, failed_at) - } } impl<'db, 'a> NameResolver<'db, 'a> { - /// Resolve the path to a set of possible resolutions. + /// Resolve the path segments to a set of possible resolutions. /// A path can be resolved to multiple resolutions because we have multiple /// name domains. /// @@ -107,35 +103,53 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// pub struct FOO {} /// } /// ``` - pub fn resolve_path( + pub fn resolve_segments( &mut self, - path: PathId, - scope: ScopeId, + segments: Vec>, + mut scope: ScopeId, + directive: QueryDirective, ) -> Result { - let segments = path.data(self.db.as_hir_db()); if segments.is_empty() { - return Err(PathResolutionError::invalid(0)); + return Err(PathResolutionError::new(NameResolutionError::Invalid, 0)); } - // Set pred segment to the current scope. - let mut pred = - NameRes::new_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); + let last_seg_idx = segments.len() - 1; + for (i, seg) in segments[0..last_seg_idx].iter().enumerate() { + let Partial::Present(ident) = seg else { + return Err(PathResolutionError::new(NameResolutionError::Invalid, i)); + }; + let query = NameQuery::new(*ident, scope); + scope = match self.resolve_query(query) { + Ok(resolved) => { + let res = resolved.name_by_domain(NameDomain::Item).unwrap(); + if res.is_type(self.db) { + return Ok(ResolvedPath::Partial { + resolved: res.clone(), + unresolved_from: i + 1, + }); + } else if let Some(scope) = res.scope() { + scope + } else { + return Err(PathResolutionError::new( + NameResolutionError::NotFound, + i + 1, + )); + } + } - let seg_len = segments.len(); - for (i, seg) in segments[0..seg_len - 1].iter().enumerate() { - pred = match self.resolve_segment(pred, *seg, i, false)? { - Either::Left(resolved) => { - return Ok(resolved); + Err(err) => { + return Err(PathResolutionError::new(err, i)); } - Either::Right(resolved) => resolved, - } + }; } - match self.resolve_segment(pred, *segments.last().unwrap(), seg_len - 1, true)? { - Either::Left(resolved) => Ok(resolved), - Either::Right(_) => { - unreachable!() - } + let Partial::Present(ident) = segments[last_seg_idx] else { + return Err(PathResolutionError::new(NameResolutionError::Invalid, last_seg_idx)); + }; + let query = NameQuery::with_directive(ident, scope, directive); + match self.resolve_query(query) { + Ok(resolved) => Ok(ResolvedPath::Full(resolved)), + Err(err) => Err(PathResolutionError::new(err, last_seg_idx)), } } @@ -390,77 +404,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { result } - /// Resolve the `segment`. `pred` is the resolution for the previous - /// segment, and `is_last` indicates the segment is the last segment of the - /// path. - /// - /// If the method returns `Right`, it means the path resolution is work in - /// progress and we need to continue look for the next segment. If the - /// method returns `Left`, that means the resolution for the entire path - /// is done. - /// - /// Even if the `is_last` is `false` the method may return `Left`, this will - /// happen if both 1. and 2. are satisfied: - /// 1. The `pred` is a type. - /// 2. The lookup for the `segment` results in `NotFound`. - /// This indicates we need further resolution for the `segment` in the later - /// trait solving phase. - /// In case the `is_last` is `true`, the function is guaranteed to return - /// `Ok(Left)` or `Error`. - /// - /// - /// We can return an error immediately in case the `is_last` is `false` and - /// multiple resolutions for the `segment` are found. - /// The reasoning is - /// 1. Our language allows only `Item` domain to have associated items. - /// 2. By 1., the middle segments should be resolved to the `Item` - /// domain. Otherwise, the following segment can't be resolved. - /// 3. By 2., if we obtain multiple resolutions from a middle segment, this - /// can be divided into two cases: - /// a. Name conflict occurs. We can immediately return `Conflict` error - /// in this case. - /// b. All resolutions belong to different domains. This - /// means that at least one of the resolutions belongs to non-`Item` - /// domain. This case can be regarded as `NotFound` error because the - /// following segment of the non-`Item` domain resolution can't be - /// resolved. - fn resolve_segment( - &mut self, - _pred: NameRes, - _segment: Partial, - _seg_idx: usize, - _is_last: bool, - ) -> Result, PathResolutionError> { - todo!() - // let Partial::Present(seg) = segment else { - // return Err(PathResolutionError::invalid(seg_idx)); - // }; - - // let scope = pred.scope; - // let query = NameQuery::new(seg, scope); - // let resolved_set = match self.resolve_query(query) { - // Ok(resolved) => resolved, - // Err(NameResolutionError::NotFound) if pred.is_type(self.db) => { - // // If the parent scope of the current segment is a type and - // the segment is not // found, then it should be - // resolved in the trait solving phase. return - // Ok(Either::Left(ResolvedPath::partial(pred, seg_idx))); } - // Err(e) => { - // return Err(PathResolutionError::new(e, seg_idx)); - // } - // }; - - // if is_last { - // Ok(Either::Left(ResolvedPath::Full(resolved_set))) - // } else if resolved_set.len() > 1 { - // // Case a. is already handled above. - // // Handles case b. here. - // return Err(PathResolutionError::not_found(seg_idx)); - // } else { - // Ok(Either::Right(resolved_set.into_iter().next().unwrap())) - // } - } - fn try_merge( &mut self, target: &mut NameBinding, @@ -725,6 +668,15 @@ impl NameRes { } } + /// Returns the scope of the name resolution if the name is not a builtin + /// type. + pub fn scope(&self) -> Option { + match self.kind { + NameResKind::Scope(scope) => Some(scope), + NameResKind::Prim(_) => None, + } + } + pub fn is_visible(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> bool { let scope_or_use = match self.derivation { NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { @@ -907,8 +859,8 @@ impl fmt::Display for NameResolutionError { impl std::error::Error for NameResolutionError {} -#[derive(Default)] -struct ResolvedQueryCacheStore { +#[derive(Default, Debug, PartialEq, Eq)] +pub(crate) struct ResolvedQueryCacheStore { cache: FxHashMap>, no_cache: bool, } From ea75630818342eda72eb8fc0bcfacb313f9067e2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 7 Jun 2023 19:02:50 -0700 Subject: [PATCH 179/678] Avoid unnecessary query resolution if the query is already fully resolved --- .../src/name_resolution/name_resolver.rs | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 878e75ddca..a997694e32 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -167,6 +167,16 @@ impl<'db, 'a> NameResolver<'db, 'a> { // This ordering means that greater one shadows lower ones in the same domain. let mut parent = None; + macro_rules! return_if_filled { + ($self:expr, $binding:expr, $directive:expr) => { + if binding.is_filled($directive) { + let resolved = binding.clone(); + $self.cache_store.cache_result(query, Ok(resolved.clone())); + return Ok(resolved); + } + }; + } + // 1. Look for the name in the current scope. let mut found_scopes = FxHashSet::default(); for edge in query.scope.edges(self.db.as_hir_db()) { @@ -190,6 +200,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { PropagationResult::UnPropagated => {} } } + return_if_filled!(self, binding, query.directive); // 2. Look for the name in the named imports of the current scope. if let Some(imported) = self @@ -199,6 +210,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { { self.try_merge(&mut binding, imported, query)?; } + return_if_filled!(self, binding, query.directive); // 3. Look for the name in the glob imports. if query.directive.allow_glob { @@ -208,6 +220,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } } } + return_if_filled!(self, binding, query.directive); // 4. Look for the name in the lexical scope if it exists. if let Some(parent) = parent { @@ -228,6 +241,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } } } + return_if_filled!(self, binding, query.directive); if !query.directive.allow_external { return self.finalize_query_result(query, binding); @@ -255,6 +269,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } }); } + return_if_filled!(self, binding, query.directive); // 6. Look for the name in the builtin types. for &prim in PrimTy::all_types() { @@ -634,6 +649,17 @@ impl NameBinding { res.derivation.lexed() } } + + fn is_filled(&self, directive: QueryDirective) -> bool { + for domain in NameDomain::all_domains() { + if directive.is_allowed_domain(*domain as u8) && !self.resolutions.contains_key(domain) + { + return false; + } + } + + true + } } impl IntoIterator for NameBinding { @@ -916,6 +942,10 @@ impl NameDomain { ScopeId::Variant(..) => Self::Value, } } + + fn all_domains() -> &'static [Self; 3] { + &[Self::Item, Self::Value, Self::Field] + } } trait QueryPropagator { From e40853f0f720b27c4a564f176e4ea53ec7df9f82 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 7 Jun 2023 20:42:45 -0700 Subject: [PATCH 180/678] Add name conflict check --- crates/hir-analysis/src/lib.rs | 4 +- .../src/name_resolution/diagnostics.rs | 8 +- .../src/name_resolution/import_resolver.rs | 16 +- .../hir-analysis/src/name_resolution/mod.rs | 211 +++++++++++++++++- .../src/name_resolution/name_resolver.rs | 2 +- crates/hir/src/hir_def/item.rs | 126 +++++------ crates/hir/src/hir_def/scope_graph.rs | 17 +- crates/hir/src/visitor.rs | 8 +- 8 files changed, 300 insertions(+), 92 deletions(-) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 424af1508e..47084cd828 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -2,8 +2,10 @@ use hir::{span::DynLazySpan, HirDb}; #[salsa::jar(db = HirAnalysisDb)] pub struct Jar( + /// Functions for import/name resolutions. + name_resolution::resolve_path_early, name_resolution::resolve_imports, - name_resolution::diagnostics::ImportErrorAccumulator, + name_resolution::diagnostics::NameResolutionDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 47860ea8b1..736253fdf5 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -13,15 +13,15 @@ use crate::HirAnalysisDb; use super::name_resolver::NameRes; #[salsa::accumulator] -pub struct ImportErrorAccumulator(ImportError); +pub struct NameResolutionDiagAccumulator(NameResolutionDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct ImportError { +pub struct NameResolutionDiag { span: DynLazySpan, kind: ImportErrorKind, } -impl ImportError { +impl NameResolutionDiag { pub fn new(span: DynLazySpan, kind: ImportErrorKind) -> Self { Self { span, kind } } @@ -45,7 +45,7 @@ impl ImportError { } } -impl DiagnosticVoucher for ImportError { +impl DiagnosticVoucher for NameResolutionDiag { fn error_code(&self) -> GlobalErrorCode { GlobalErrorCode::new(AnalysisPass::ImportResolution, self.kind.local_code()) } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 50eaea2fed..8a0c4c2643 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -14,14 +14,14 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; use super::{ - diagnostics::ImportError, + diagnostics::NameResolutionDiag, name_resolver::{ NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, NameResolutionError, NameResolver, QueryDirective, }, }; -pub struct ImportResolver<'db> { +pub(crate) struct ImportResolver<'db> { db: &'db dyn HirAnalysisDb, /// The ingot that is being resolved. @@ -34,7 +34,7 @@ pub struct ImportResolver<'db> { intermediate_uses: FxHashMap>, /// The errors that have been accumulated during the import resolution. - accumulated_errors: Vec, + accumulated_errors: Vec, /// The number of imported resolutions. /// This is used to judge if a import resolution doesn't change in each @@ -61,7 +61,7 @@ impl<'db> ImportResolver<'db> { } } - pub(crate) fn resolve_imports(mut self) -> (ResolvedImports, Vec) { + pub(crate) fn resolve_imports(mut self) -> (ResolvedImports, Vec) { self.initialize_i_uses(); let mut changed = true; @@ -492,7 +492,7 @@ impl<'db> ImportResolver<'db> { fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { match err { NameResolutionError::NotFound => { - self.accumulated_errors.push(ImportError::not_found( + self.accumulated_errors.push(NameResolutionDiag::not_found( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), )); @@ -504,7 +504,7 @@ impl<'db> ImportResolver<'db> { } NameResolutionError::Ambiguous(cands) => { - self.accumulated_errors.push(ImportError::ambiguous( + self.accumulated_errors.push(NameResolutionDiag::ambiguous( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), cands @@ -860,7 +860,7 @@ impl IntermediateResolvedImports { db: &dyn HirAnalysisDb, i_use: &IntermediateUse, mut bind: NameBinding, - ) -> Result<(), ImportError> { + ) -> Result<(), NameResolutionDiag> { let scope = i_use.original_scope; bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); @@ -889,7 +889,7 @@ impl IntermediateResolvedImports { continue; } - return Err(ImportError::conflict( + return Err(NameResolutionDiag::conflict( i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), cand.derived_from(db).unwrap(), )); diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index dfda5dea45..798ace0266 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -1,8 +1,27 @@ -use hir::hir_def::IngotId; +use hir::{ + hir_def::{ + scope_graph::ScopeId, FieldDefListId, FuncParamListId, GenericParamListId, IngotId, + ItemKind, TopLevelMod, VariantDefListId, + }, + span::{ + item::{LazyFieldDefListSpan, LazyItemSpan, LazyVariantDefListSpan}, + params::LazyFuncParamListSpan, + }, + visitor::{ + walk_field_def_list, walk_func_param_list, walk_generic_param_list, walk_item, + walk_variant_def_list, Visitor, VisitorCtxt, + }, +}; use crate::HirAnalysisDb; -use self::{diagnostics::ImportErrorAccumulator, import_resolver::ResolvedImports}; +use self::{ + diagnostics::NameResolutionDiagAccumulator, + import_resolver::{DefaultImporter, ResolvedImports}, + name_resolver::{ + NameDomain, NameQuery, NameResolutionError, QueryDirective, ResolvedQueryCacheStore, + }, +}; pub mod diagnostics; pub mod import_resolver; @@ -12,9 +31,9 @@ pub mod visibility_checker; #[salsa::tracked(return_ref)] pub fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImports { let resolver = import_resolver::ImportResolver::new(db, ingot); - let (imports, import_error) = resolver.resolve_imports(); - for error in import_error { - ImportErrorAccumulator::push(db, error); + let (imports, diags) = resolver.resolve_imports(); + for diag in diags { + NameResolutionDiagAccumulator::push(db, diag); } imports @@ -23,8 +42,186 @@ pub fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImport pub fn resolve_imports_with_diag( db: &dyn HirAnalysisDb, ingot: IngotId, -) -> (&ResolvedImports, Vec) { +) -> (&ResolvedImports, Vec) { let imports = resolve_imports(db, ingot); - let diagnostics = resolve_imports::accumulated::(db, ingot); + let diagnostics = resolve_imports::accumulated::(db, ingot); (imports, diagnostics) } + +/// Performs early path resolution in the given module and checks the conflict +/// of the definitions. +#[salsa::tracked(return_ref)] +pub(crate) fn resolve_path_early( + db: &dyn HirAnalysisDb, + top_mod: TopLevelMod, +) -> ResolvedQueryCacheStore { + let importer = DefaultImporter; + ModuleNameResolver::new(db, &importer).resolve_all(top_mod); + todo!() +} + +struct ModuleNameResolver<'db, 'a> { + db: &'db dyn HirAnalysisDb, + resolver: name_resolver::NameResolver<'db, 'a>, + diags: Vec, + item_stack: Vec, +} + +impl<'db, 'a> ModuleNameResolver<'db, 'a> { + fn new(db: &'db dyn HirAnalysisDb, importer: &'a DefaultImporter) -> Self { + let resolver = name_resolver::NameResolver::new(db, importer); + Self { + db: db.as_hir_analysis_db(), + resolver, + diags: Vec::new(), + item_stack: Vec::new(), + } + } + + fn resolve_all(&mut self, top_mod: TopLevelMod) { + let mut ctxt = VisitorCtxt::with_item(self.db.as_hir_db(), top_mod.into()); + self.visit_item(&mut ctxt, top_mod.into()); + } + + fn check_item_conflict(&mut self, item: ItemKind) { + let scope = ScopeId::from_item(item); + let Some(query) = self.make_query_for_conflict_check(scope) else { + return; + }; + + self.check_conflict(scope, query); + } + + fn check_field_conflict(&mut self, fields: FieldDefListId) { + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..fields.data(self.db.as_hir_db()).len() { + let scope = ScopeId::Field(parent_item, i); + let Some(query) = self.make_query_for_conflict_check(scope) else { + continue; + }; + + self.check_conflict(scope, query); + } + } + + fn check_variant_conflict(&mut self, variants: VariantDefListId) { + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..variants.data(self.db.as_hir_db()).len() { + let scope = ScopeId::Variant(parent_item, i); + let Some(query) = self.make_query_for_conflict_check(scope) else { + continue; + }; + + self.check_conflict(scope, query); + } + } + + fn check_func_param_conflict(&mut self, params: FuncParamListId) { + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..params.data(self.db.as_hir_db()).len() { + let scope = ScopeId::FuncParam(parent_item, i); + let Some(query) = self.make_query_for_conflict_check(scope) else { + continue; + }; + + self.check_conflict(scope, query); + } + } + + fn check_generic_param_conflict(&mut self, params: GenericParamListId) { + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..params.data(self.db.as_hir_db()).len() { + let scope = ScopeId::GenericParam(parent_item, i); + let Some(query) = self.make_query_for_conflict_check(scope) else { + continue; + }; + + self.check_conflict(scope, query); + } + } + + fn make_query_for_conflict_check(&self, scope: ScopeId) -> Option { + let name = scope.name(self.db.as_hir_db())?; + let mut directive = QueryDirective::new(); + directive + .set_domain(NameDomain::from_scope(scope)) + .disallow_lex() + .disallow_glob() + .disallow_external(); + + let parent_scope = scope.parent(self.db.as_hir_db())?; + Some(NameQuery::with_directive(name, parent_scope, directive)) + } + + fn check_conflict(&mut self, scope: ScopeId, query: NameQuery) { + match self.resolver.resolve_query(query) { + Ok(_) => {} + Err(NameResolutionError::Ambiguous(cands)) => { + let conflicted_span = cands + .into_iter() + .find_map(|res| { + let conflicted_scope = res.scope()?; + if conflicted_scope == scope { + None + } else { + conflicted_scope.name_span(self.db.as_hir_db()) + } + }) + .unwrap(); + + let diag = diagnostics::NameResolutionDiag::conflict( + scope.name_span(self.db.as_hir_db()).unwrap(), + conflicted_span, + ); + self.diags.push(diag); + } + Err(_) => unreachable!(), + }; + } +} + +impl<'db, 'a> Visitor for ModuleNameResolver<'db, 'a> { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { + self.check_item_conflict(item); + + self.item_stack.push(item); + walk_item(self, ctxt, item); + self.item_stack.pop(); + } + + fn visit_field_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFieldDefListSpan>, + field: FieldDefListId, + ) { + self.check_field_conflict(field); + walk_field_def_list(self, ctxt, field); + } + + fn visit_variant_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefListSpan>, + variant: VariantDefListId, + ) { + self.check_variant_conflict(variant); + walk_variant_def_list(self, ctxt, variant); + } + + fn visit_generic_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, hir::span::params::LazyGenericParamListSpan>, + params: GenericParamListId, + ) { + self.check_generic_param_conflict(params); + walk_generic_param_list(self, ctxt, params); + } + + fn visit_func_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyFuncParamListSpan>, + params: FuncParamListId, + ) { + self.check_func_param_conflict(params); + walk_func_param_list(self, ctxt, params) + } +} diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index a997694e32..b4a84672c3 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -932,7 +932,7 @@ pub enum NameDomain { } impl NameDomain { - fn from_scope(scope: ScopeId) -> Self { + pub(super) fn from_scope(scope: ScopeId) -> Self { match scope { ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FuncParam(..) => { Self::Value diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 585deda404..0c398f5054 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -61,71 +61,7 @@ impl ItemKind { pub fn lazy_span(self) -> LazyItemSpan { LazyItemSpan::new(self) } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] -pub enum GenericParamOwner { - Func(Func), - Struct(Struct), - Enum(Enum), - TypeAlias(TypeAlias), - Impl(Impl), - Trait(Trait), - ImplTrait(ImplTrait), -} - -impl GenericParamOwner { - pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { - match self { - GenericParamOwner::Func(func) => func.top_mod(db), - GenericParamOwner::Struct(struct_) => struct_.top_mod(db), - GenericParamOwner::Enum(enum_) => enum_.top_mod(db), - GenericParamOwner::TypeAlias(type_alias) => type_alias.top_mod(db), - GenericParamOwner::Impl(impl_) => impl_.top_mod(db), - GenericParamOwner::Trait(trait_) => trait_.top_mod(db), - GenericParamOwner::ImplTrait(impl_trait) => impl_trait.top_mod(db), - } - } - - pub fn params(&self, db: &dyn HirDb) -> GenericParamListId { - match self { - GenericParamOwner::Func(func) => func.generic_params(db), - GenericParamOwner::Struct(struct_) => struct_.generic_params(db), - GenericParamOwner::Enum(enum_) => enum_.generic_params(db), - GenericParamOwner::TypeAlias(type_alias) => type_alias.generic_params(db), - GenericParamOwner::Impl(impl_) => impl_.generic_params(db), - GenericParamOwner::Trait(trait_) => trait_.generic_params(db), - GenericParamOwner::ImplTrait(impl_trait) => impl_trait.generic_params(db), - } - } - - pub fn params_span(&self) -> LazyGenericParamListSpan { - match self { - GenericParamOwner::Func(func) => func.lazy_span().generic_params(), - GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params(), - GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params(), - GenericParamOwner::TypeAlias(type_alias) => type_alias.lazy_span().generic_params(), - GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params(), - GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params(), - GenericParamOwner::ImplTrait(impl_trait) => impl_trait.lazy_span().generic_params(), - } - } - - pub fn from_item_opt(item: ItemKind) -> Option { - match item { - ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), - ItemKind::Struct(struct_) => Some(GenericParamOwner::Struct(struct_)), - ItemKind::Enum(enum_) => Some(GenericParamOwner::Enum(enum_)), - ItemKind::TypeAlias(type_alias) => Some(GenericParamOwner::TypeAlias(type_alias)), - ItemKind::Impl(impl_) => Some(GenericParamOwner::Impl(impl_)), - ItemKind::Trait(trait_) => Some(GenericParamOwner::Trait(trait_)), - ItemKind::ImplTrait(impl_trait) => Some(GenericParamOwner::ImplTrait(impl_trait)), - _ => None, - } - } -} -impl ItemKind { pub fn name(self, db: &dyn HirDb) -> Option { use ItemKind::*; match self { @@ -205,6 +141,68 @@ impl ItemKind { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] +pub enum GenericParamOwner { + Func(Func), + Struct(Struct), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), +} + +impl GenericParamOwner { + pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { + match self { + GenericParamOwner::Func(func) => func.top_mod(db), + GenericParamOwner::Struct(struct_) => struct_.top_mod(db), + GenericParamOwner::Enum(enum_) => enum_.top_mod(db), + GenericParamOwner::TypeAlias(type_alias) => type_alias.top_mod(db), + GenericParamOwner::Impl(impl_) => impl_.top_mod(db), + GenericParamOwner::Trait(trait_) => trait_.top_mod(db), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.top_mod(db), + } + } + + pub fn params(&self, db: &dyn HirDb) -> GenericParamListId { + match self { + GenericParamOwner::Func(func) => func.generic_params(db), + GenericParamOwner::Struct(struct_) => struct_.generic_params(db), + GenericParamOwner::Enum(enum_) => enum_.generic_params(db), + GenericParamOwner::TypeAlias(type_alias) => type_alias.generic_params(db), + GenericParamOwner::Impl(impl_) => impl_.generic_params(db), + GenericParamOwner::Trait(trait_) => trait_.generic_params(db), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.generic_params(db), + } + } + + pub fn params_span(&self) -> LazyGenericParamListSpan { + match self { + GenericParamOwner::Func(func) => func.lazy_span().generic_params(), + GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params(), + GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params(), + GenericParamOwner::TypeAlias(type_alias) => type_alias.lazy_span().generic_params(), + GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params(), + GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params(), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.lazy_span().generic_params(), + } + } + + pub fn from_item_opt(item: ItemKind) -> Option { + match item { + ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), + ItemKind::Struct(struct_) => Some(GenericParamOwner::Struct(struct_)), + ItemKind::Enum(enum_) => Some(GenericParamOwner::Enum(enum_)), + ItemKind::TypeAlias(type_alias) => Some(GenericParamOwner::TypeAlias(type_alias)), + ItemKind::Impl(impl_) => Some(GenericParamOwner::Impl(impl_)), + ItemKind::Trait(trait_) => Some(GenericParamOwner::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(GenericParamOwner::ImplTrait(impl_trait)), + _ => None, + } + } +} + #[salsa::tracked] pub struct TopLevelMod { // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index b44eb23bf7..7b230ebac8 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -4,7 +4,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{hir_def::GenericParamOwner, span::DynLazySpan, HirDb}; -use super::{Enum, Func, IdentId, IngotId, ItemKind, TopLevelMod, Use, Visibility}; +use super::{Enum, Func, FuncParamLabel, IdentId, IngotId, ItemKind, TopLevelMod, Use, Visibility}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { @@ -184,7 +184,12 @@ impl ScopeId { ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); - func.params(db).to_opt()?.data(db)[idx].name() + let param = &func.params(db).to_opt()?.data(db)[idx]; + if let Some(FuncParamLabel::Ident(ident)) = param.label { + Some(ident) + } else { + param.name() + } } ScopeId::GenericParam(parent, idx) => { @@ -224,7 +229,13 @@ impl ScopeId { ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); - Some(func.lazy_span().params().param(idx).name().into()) + let param = &func.params(db).to_opt()?.data(db)[idx]; + let param_span = func.lazy_span().params().param(idx); + if let Some(FuncParamLabel::Ident(_)) = param.label { + Some(param_span.label().into()) + } else { + Some(param_span.name().into()) + } } ScopeId::GenericParam(parent, idx) => { diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index b396d40a77..7a9ddf4f2a 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1734,7 +1734,7 @@ impl<'db, T> VisitorCtxt<'db, T> where T: LazySpan, { - pub fn current_span(&self) -> Option + pub fn span(&self) -> Option where T: SpanDowncast, { @@ -1868,7 +1868,7 @@ mod tests { impl Visitor for MyVisitor { fn visit_attribute(&mut self, ctxt: &mut VisitorCtxt, _attrs: &Attr) { - self.attributes.push(ctxt.current_span().unwrap()); + self.attributes.push(ctxt.span().unwrap()); } fn visit_generic_param_list( @@ -1876,12 +1876,12 @@ mod tests { ctxt: &mut VisitorCtxt, _params: GenericParamListId, ) { - self.generic_param_list = Some(ctxt.current_span().unwrap()); + self.generic_param_list = Some(ctxt.span().unwrap()); } fn visit_lit(&mut self, ctxt: &mut VisitorCtxt, lit: LitKind) { if let LitKind::Int(_) = lit { - self.lit_ints.push(ctxt.current_span().unwrap()); + self.lit_ints.push(ctxt.span().unwrap()); } } } From 024ffa8cd1d79cdd0d7edfa738e6643760b1dedc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 8 Jun 2023 16:23:40 -0700 Subject: [PATCH 181/678] Improve cache efficiency --- .../src/name_resolution/import_resolver.rs | 132 +++--- .../hir-analysis/src/name_resolution/mod.rs | 23 +- .../src/name_resolution/name_resolver.rs | 444 ++++++------------ crates/hir-analysis/tests/import.rs | 1 - crates/hir/src/hir_def/scope_graph.rs | 2 +- 5 files changed, 235 insertions(+), 367 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 8a0c4c2643..2787f89e88 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -11,13 +11,16 @@ use hir::{ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; +use crate::{ + name_resolution::visibility_checker::{is_scope_visible, is_use_visible}, + HirAnalysisDb, +}; use super::{ diagnostics::NameResolutionDiag, name_resolver::{ NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, - NameResolutionError, NameResolver, QueryDirective, + NameResolutionError, NameResolutionResult, NameResolver, QueryDirective, }, }; @@ -233,15 +236,9 @@ impl<'db> ImportResolver<'db> { // Collect all bindings in the target scope. let mut resolver = NameResolver::new(self.db, &self.resolved_imports); - let mut directive = QueryDirective::default(); - directive - .add_domain(NameDomain::Value) - .disallow_lex() - .disallow_external(); let resolutions = resolver.collect_all_resolutions_for_glob( target_scope, original_scope, - directive, unresolved_named_imports, ); @@ -335,28 +332,47 @@ impl<'db> ImportResolver<'db> { }; let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); - let resolved = match resolver.resolve_query(query) { - Ok(resolved) => resolved, + let binding = resolver.resolve_query(query); + if i_use.is_base_resolved(self.db) { + if binding.is_empty() { + if self.is_decidable(i_use) { + self.register_error(i_use, NameResolutionError::NotFound); + return None; + } else { + return Some(IUseResolution::Unchanged(i_use.clone())); + } + } + if self.is_decidable(i_use) { + for err in binding.errors() { + self.register_error(i_use, err.clone()); + } + } + for res in binding.iter() { + if res.is_external(self.db, i_use) || res.is_derived_from_glob() { + self.suspicious_imports.insert(i_use.use_); + break; + } + } + return Some(IUseResolution::Full(binding)); + } + + let res = match binding.res_in_domain(NameDomain::Item) { + Ok(res) => res, Err(NameResolutionError::NotFound) if !self.is_decidable(i_use) => { return Some(IUseResolution::Unchanged(i_use.clone())) } - Err(err) => { - self.register_error(i_use, err); + self.register_error(i_use, err.clone()); return None; } }; - if i_use.is_base_resolved(self.db) { - return Some(IUseResolution::Full(resolved)); - } - - if resolved.contains_external(self.db, i_use) || resolved.contains_glob_imported() { + if res.is_external(self.db, i_use) || res.is_derived_from_glob() { self.suspicious_imports.insert(i_use.use_); } - let next_i_use = i_use.proceed(resolved); + let next_i_use = i_use.proceed(res.clone()); if next_i_use.is_base_resolved(self.db) { Some(IUseResolution::BasePath(next_i_use)) } else { @@ -383,7 +399,7 @@ impl<'db> ImportResolver<'db> { fn try_finalize_named_use(&mut self, i_use: IntermediateUse) -> bool { debug_assert!(i_use.is_base_resolved(self.db)); - let binding = match self.resolve_segment(&i_use) { + let mut binding = match self.resolve_segment(&i_use) { Some(IUseResolution::Full(binding)) => binding, Some(IUseResolution::Unchanged(_)) => { return false; @@ -395,9 +411,17 @@ impl<'db> ImportResolver<'db> { return true; } }; + binding.resolutions.retain(|_, res| { + let Ok(res) = res else { + return false; + }; + match res.scope() { + Some(scope) => is_scope_visible(self.db, i_use.original_scope, scope), + None => true, + } + }); - let filtered = binding.filter_by_visibility(self.db, i_use.original_scope); - let n_res = filtered.len(); + let n_res = binding.len(); let is_decidable = self.is_decidable(&i_use); if n_res == 0 && is_decidable { @@ -412,7 +436,7 @@ impl<'db> ImportResolver<'db> { self.num_imported_res.insert(i_use.use_, n_res); if let Err(err) = self .resolved_imports - .set_named_binds(self.db, &i_use, filtered) + .set_named_binds(self.db, &i_use, binding) { self.accumulated_errors.push(err); } @@ -524,7 +548,7 @@ impl<'db> ImportResolver<'db> { /// Makes a query for the current segment of the intermediate use to be /// resolved. - fn make_query(&self, i_use: &IntermediateUse) -> Result { + fn make_query(&self, i_use: &IntermediateUse) -> NameResolutionResult { let Some(seg_name) = i_use.current_segment_ident(self.db) else { return Err(NameResolutionError::Invalid); }; @@ -544,10 +568,6 @@ impl<'db> ImportResolver<'db> { directive.disallow_glob().disallow_external(); } - if i_use.is_base_resolved(self.db) { - directive.add_domain(NameDomain::Value); - } - Ok(NameQuery::with_directive( seg_name, current_scope, @@ -766,12 +786,10 @@ impl IntermediateUse { /// Proceed the resolution of the use path to the next segment. /// The binding must contain exactly one resolution. - fn proceed(&self, binding: NameBinding) -> Self { - debug_assert_eq!(binding.len(), 1); - let current_res = binding.into_iter().next(); + fn proceed(&self, next_res: NameRes) -> Self { Self { use_: self.use_, - current_res, + current_res: next_res.into(), original_scope: self.original_scope, unresolved_from: self.unresolved_from + 1, } @@ -879,29 +897,28 @@ impl IntermediateResolvedImports { .or_default(); match imported_set.entry(imported_name) { - Entry::Occupied(mut e) => match e.get_mut().merge(bind.iter()) { - Ok(()) => Ok(()), - Err(NameResolutionError::Ambiguous(cands)) => { + Entry::Occupied(mut e) => { + let bindings = e.get_mut(); + bindings.merge(bind.iter()); + for err in bindings.errors() { + let NameResolutionError::Ambiguous(cands) = err else { + continue; + }; for cand in cands { - match cand.derivation { - NameDerivation::NamedImported(use_) => { - if i_use.use_ == use_ { - continue; - } - - return Err(NameResolutionDiag::conflict( - i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), - cand.derived_from(db).unwrap(), - )); - } - _ => unreachable!(), + let NameDerivation::NamedImported(use_) = cand.derivation else { + continue; + }; + + if i_use.use_ != use_ { + return Err(NameResolutionDiag::conflict( + i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + cand.derived_from(db).unwrap(), + )); } } - unreachable!() } - - Err(_) => unreachable!(), - }, + Ok(()) + } Entry::Vacant(e) => { e.insert(bind); @@ -976,23 +993,22 @@ fn resolved_imports_for_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> &Resolv super::resolve_imports(db, ingot) } -impl NameBinding { +impl NameRes { /// Returns true if the binding contains an resolution that is not in the /// same ingot as the current resolution of the `i_use`. - fn contains_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { + fn is_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { let Some(current_ingot) = i_use.current_scope().map(|scope| scope.ingot(db.as_hir_db())) else { return false; }; - self.resolutions.values().any(|r| match r.kind { + + match self.kind { NameResKind::Scope(scope) => scope.ingot(db.as_hir_db()) != current_ingot, NameResKind::Prim(_) => true, - }) + } } /// Returns true if the binding contains a glob import. - fn contains_glob_imported(&self) -> bool { - self.resolutions - .values() - .any(|r| matches!(r.derivation, NameDerivation::GlobImported(_))) + fn is_derived_from_glob(&self) -> bool { + matches!(self.derivation, NameDerivation::GlobImported(_)) } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 798ace0266..3f14c28a48 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -89,7 +89,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { return; }; - self.check_conflict(scope, query); + self.check_conflict(scope, query, NameDomain::from_scope(scope)); } fn check_field_conflict(&mut self, fields: FieldDefListId) { @@ -100,7 +100,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { continue; }; - self.check_conflict(scope, query); + self.check_conflict(scope, query, NameDomain::from_scope(scope)); } } @@ -112,7 +112,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { continue; }; - self.check_conflict(scope, query); + self.check_conflict(scope, query, NameDomain::from_scope(scope)); } } @@ -124,7 +124,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { continue; }; - self.check_conflict(scope, query); + self.check_conflict(scope, query, NameDomain::from_scope(scope)); } } @@ -136,29 +136,26 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { continue; }; - self.check_conflict(scope, query); + self.check_conflict(scope, query, NameDomain::from_scope(scope)); } } fn make_query_for_conflict_check(&self, scope: ScopeId) -> Option { let name = scope.name(self.db.as_hir_db())?; let mut directive = QueryDirective::new(); - directive - .set_domain(NameDomain::from_scope(scope)) - .disallow_lex() - .disallow_glob() - .disallow_external(); + directive.disallow_lex().disallow_glob().disallow_external(); let parent_scope = scope.parent(self.db.as_hir_db())?; Some(NameQuery::with_directive(name, parent_scope, directive)) } - fn check_conflict(&mut self, scope: ScopeId, query: NameQuery) { - match self.resolver.resolve_query(query) { + fn check_conflict(&mut self, scope: ScopeId, query: NameQuery, domain: NameDomain) { + let binding = self.resolver.resolve_query(query); + match binding.res_in_domain(domain) { Ok(_) => {} Err(NameResolutionError::Ambiguous(cands)) => { let conflicted_span = cands - .into_iter() + .iter() .find_map(|res| { let conflicted_scope = res.scope()?; if conflicted_scope == scope { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index b4a84672c3..552f979577 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -119,9 +119,9 @@ impl<'db, 'a> NameResolver<'db, 'a> { return Err(PathResolutionError::new(NameResolutionError::Invalid, i)); }; let query = NameQuery::new(*ident, scope); - scope = match self.resolve_query(query) { - Ok(resolved) => { - let res = resolved.name_by_domain(NameDomain::Item).unwrap(); + let binding = self.resolve_query(query); + scope = match binding.res_in_domain(NameDomain::Item) { + Ok(res) => { if res.is_type(self.db) { return Ok(ResolvedPath::Partial { resolved: res.clone(), @@ -136,9 +136,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { )); } } - Err(err) => { - return Err(PathResolutionError::new(err, i)); + return Err(PathResolutionError::new(err.clone(), i)); } }; } @@ -147,13 +146,11 @@ impl<'db, 'a> NameResolver<'db, 'a> { return Err(PathResolutionError::new(NameResolutionError::Invalid, last_seg_idx)); }; let query = NameQuery::with_directive(ident, scope, directive); - match self.resolve_query(query) { - Ok(resolved) => Ok(ResolvedPath::Full(resolved)), - Err(err) => Err(PathResolutionError::new(err, last_seg_idx)), - } + let resolved = self.resolve_query(query); + Ok(ResolvedPath::Full(resolved)) } - pub fn resolve_query(&mut self, query: NameQuery) -> Result { + pub fn resolve_query(&mut self, query: NameQuery) -> NameBinding { // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(query) { return resolved.clone(); @@ -167,20 +164,10 @@ impl<'db, 'a> NameResolver<'db, 'a> { // This ordering means that greater one shadows lower ones in the same domain. let mut parent = None; - macro_rules! return_if_filled { - ($self:expr, $binding:expr, $directive:expr) => { - if binding.is_filled($directive) { - let resolved = binding.clone(); - $self.cache_store.cache_result(query, Ok(resolved.clone())); - return Ok(resolved); - } - }; - } - // 1. Look for the name in the current scope. let mut found_scopes = FxHashSet::default(); for edge in query.scope.edges(self.db.as_hir_db()) { - match edge.kind.propagate(query) { + match edge.kind.propagate(&query) { PropagationResult::Terminated => { if found_scopes.insert(edge.dest) { let res = NameRes::new_scope( @@ -188,7 +175,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { NameDomain::from_scope(edge.dest), NameDerivation::Def, ); - self.try_push(&mut binding, &res, query)?; + binding.push(&res); } } @@ -200,7 +187,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { PropagationResult::UnPropagated => {} } } - return_if_filled!(self, binding, query.directive); // 2. Look for the name in the named imports of the current scope. if let Some(imported) = self @@ -208,19 +194,17 @@ impl<'db, 'a> NameResolver<'db, 'a> { .named_imports(self.db, query.scope) .and_then(|imports| imports.get(&query.name)) { - self.try_merge(&mut binding, imported, query)?; + binding.merge(imported.iter()); } - return_if_filled!(self, binding, query.directive); // 3. Look for the name in the glob imports. if query.directive.allow_glob { if let Some(imported) = self.importer.glob_imports(self.db, query.scope) { for res in imported.name_res_for(query.name) { - self.try_push(&mut binding, res, query)?; + binding.push(res); } } } - return_if_filled!(self, binding, query.directive); // 4. Look for the name in the lexical scope if it exists. if let Some(parent) = parent { @@ -228,55 +212,40 @@ impl<'db, 'a> NameResolver<'db, 'a> { query_for_parent.scope = parent; query_for_parent.directive.disallow_external(); - match self.resolve_query(query_for_parent) { - Ok(mut resolved) => { - resolved.lexed(); - self.try_merge(&mut binding, &resolved, query)?; - } - - Err(NameResolutionError::NotFound) => {} - Err(err) => { - self.cache_store.cache_result(query, Err(err.clone())); - return Err(err); - } - } + let mut resolved = self.resolve_query(query_for_parent); + resolved.set_lexed_derivation(); + binding.merge(resolved.iter()); } - return_if_filled!(self, binding, query.directive); if !query.directive.allow_external { return self.finalize_query_result(query, binding); } // 5. Look for the name in the external ingots. - if query.directive.is_allowed_domain(NameDomain::Item as u8) { - query - .scope - .top_mod(self.db.as_hir_db()) - .ingot(self.db.as_hir_db()) - .external_ingots(self.db.as_hir_db()) - .iter() - .for_each(|(name, root_mod)| { - if *name == query.name { - // We don't care about the result of `push` because we assume ingots are - // guaranteed to be unique. - binding - .push(&NameRes::new_scope( - ScopeId::root(*root_mod), - NameDomain::Item, - NameDerivation::External, - )) - .unwrap(); - } - }); - } - return_if_filled!(self, binding, query.directive); + query + .scope + .top_mod(self.db.as_hir_db()) + .ingot(self.db.as_hir_db()) + .external_ingots(self.db.as_hir_db()) + .iter() + .for_each(|(name, root_mod)| { + if *name == query.name { + // We don't care about the result of `push` because we assume ingots are + // guaranteed to be unique. + binding.push(&NameRes::new_scope( + ScopeId::root(*root_mod), + NameDomain::Item, + NameDerivation::External, + )) + } + }); // 6. Look for the name in the builtin types. for &prim in PrimTy::all_types() { // We don't care about the result of `push` because we assume builtin types are // guaranteed to be unique. if query.name == prim.name() { - binding.push(&NameRes::new_prim(prim)).unwrap(); + binding.push(&NameRes::new_prim(prim)); } } @@ -326,7 +295,6 @@ impl<'db, 'a> NameResolver<'db, 'a> { &mut self, target: ScopeId, ref_scope: ScopeId, - directive: QueryDirective, unresolved_named_imports: FxHashSet, ) -> FxHashMap> { let mut res_collection: FxHashMap> = FxHashMap::default(); @@ -334,7 +302,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { let mut found_kinds: FxHashSet<(IdentId, NameResKind)> = FxHashSet::default(); for edge in target.edges(self.db.as_hir_db()) { - let scope = match edge.kind.propagate_glob(directive) { + let scope = match edge.kind.propagate_glob() { PropagationResult::Terminated => edge.dest, _ => { continue; @@ -405,44 +373,9 @@ impl<'db, 'a> NameResolver<'db, 'a> { } /// Finalize the query result and cache it to the cache store. - fn finalize_query_result( - &mut self, - query: NameQuery, - resolved_set: NameBinding, - ) -> Result { - let result = if resolved_set.is_empty() { - Err(NameResolutionError::NotFound) - } else { - Ok(resolved_set) - }; - self.cache_store.cache_result(query, result.clone()); - result - } - - fn try_merge( - &mut self, - target: &mut NameBinding, - from: &NameBinding, - query: NameQuery, - ) -> Result<(), NameResolutionError> { - target - .merge(from.filter_by_domain(query.directive.domain)) - .map_err(|err| { - self.cache_store.cache_result(query, Err(err.clone())); - err - }) - } - - fn try_push( - &mut self, - target: &mut NameBinding, - res: &NameRes, - query: NameQuery, - ) -> Result<(), NameResolutionError> { - target.push(res).map_err(|err| { - self.cache_store.cache_result(query, Err(err.clone())); - err - }) + fn finalize_query_result(&mut self, query: NameQuery, binding: NameBinding) -> NameBinding { + self.cache_store.cache_result(query, binding.clone()); + binding } } @@ -491,8 +424,6 @@ pub struct QueryDirective { /// If `allow_glob` is `true`, then the resolver uses the glob import to /// resolve the name. allow_glob: bool, - - domain: u8, } impl QueryDirective { @@ -504,23 +435,9 @@ impl QueryDirective { allow_lex: true, allow_external: true, allow_glob: true, - domain: NameDomain::Item as u8, } } - /// Set the `domain` to lookup, the allowed domain set that are already set - /// will be overwritten. - pub fn set_domain(&mut self, domain: NameDomain) -> &mut Self { - self.domain = domain as u8; - self - } - - /// Append the `domain` to the allowed domain set. - pub fn add_domain(&mut self, domain: NameDomain) -> &mut Self { - self.domain |= domain as u8; - self - } - /// Disallow lexical scope lookup. pub fn disallow_lex(&mut self) -> &mut Self { self.allow_lex = false; @@ -536,12 +453,6 @@ impl QueryDirective { self.allow_glob = false; self } - - /// Returns true if the `domain` is allowed to lookup in the current - /// setting. - pub(super) fn is_allowed_domain(&self, domain: u8) -> bool { - self.domain & domain != 0 - } } impl Default for QueryDirective { @@ -555,116 +466,109 @@ impl Default for QueryDirective { /// different name domains. #[derive(Clone, Debug, PartialEq, Eq, Default)] pub struct NameBinding { - pub(super) resolutions: FxHashMap, + pub(super) resolutions: FxHashMap>, } impl NameBinding { - /// Returns the number of resolutions. + /// Returns the number of resolutions in the binding. pub fn len(&self) -> usize { - self.resolutions.len() + self.iter().count() } pub fn is_empty(&self) -> bool { - self.resolutions.is_empty() + self.len() == 0 } pub fn iter(&self) -> impl Iterator { - self.resolutions.values() + self.resolutions + .values() + .filter_map(|res| res.as_ref().ok()) } - pub fn filter_by_visibility(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> Self { - let mut resolutions = FxHashMap::default(); - for (domain, res) in &self.resolutions { - if res.is_visible(db, from) { - resolutions.insert(*domain, res.clone()); - } - } - Self { resolutions } + pub fn iter_mut(&mut self) -> impl Iterator { + self.resolutions + .values_mut() + .filter_map(|res| res.as_mut().ok()) + } + + pub fn errors(&self) -> impl Iterator { + self.resolutions + .values() + .filter_map(|res| res.as_ref().err()) } /// Returns the resolution of the given `domain`. - pub fn name_by_domain(&self, domain: NameDomain) -> Option<&NameRes> { - self.resolutions.get(&domain) + pub fn res_in_domain(&self, domain: NameDomain) -> &NameResolutionResult { + self.resolutions + .get(&domain) + .unwrap_or(&Err(NameResolutionError::NotFound)) } /// Merge the `resolutions` into the set. If name conflict happens, the old /// resolution will be returned, otherwise `None` will be returned. - pub(super) fn merge<'a>( - &mut self, - resolutions: impl Iterator, - ) -> Result<(), NameResolutionError> { + pub(super) fn merge<'a>(&mut self, resolutions: impl Iterator) { for res in resolutions { - self.push(res)?; + self.push(res); } - - Ok(()) } pub(super) fn set_derivation(&mut self, derivation: NameDerivation) { - for res in self.resolutions.values_mut() { + for res in self.iter_mut() { res.derivation = derivation.clone(); } } /// Push the `res` into the set. - fn push(&mut self, res: &NameRes) -> Result<(), NameResolutionError> { + fn push(&mut self, res: &NameRes) { let domain = res.domain; match self.resolutions.entry(domain) { Entry::Occupied(mut e) => { - let old_derivation = e.get().derivation.clone(); + let old_res = match e.get() { + Ok(res) => res, + Err(NameResolutionError::NotFound) => { + e.insert(Ok(res.clone())).ok(); + return; + } + Err(_) => { + return; + } + }; + + let old_derivation = old_res.derivation.clone(); match res.derivation.cmp(&old_derivation) { - cmp::Ordering::Less => Ok(()), + cmp::Ordering::Less => {} cmp::Ordering::Equal => { - if e.get().kind == res.kind { - Ok(()) + if old_res.kind == res.kind { } else { - Err(NameResolutionError::Ambiguous(vec![ - e.get().clone(), + e.insert(Err(NameResolutionError::Ambiguous(vec![ + old_res.clone(), res.clone(), - ])) + ]))) + .ok(); } } cmp::Ordering::Greater => { - e.insert(res.clone()); - Ok(()) + e.insert(Ok(res.clone())).ok(); } } } Entry::Vacant(e) => { - e.insert(res.clone()); - Ok(()) + e.insert(Ok(res.clone())); } } } - fn filter_by_domain(&self, domain: u8) -> impl Iterator { - self.resolutions - .values() - .filter(move |res| ((res.domain as u8) & domain) != 0) - } - - fn lexed(&mut self) { - for res in self.resolutions.values_mut() { + fn set_lexed_derivation(&mut self) { + for res in self.iter_mut() { res.derivation.lexed() } } - - fn is_filled(&self, directive: QueryDirective) -> bool { - for domain in NameDomain::all_domains() { - if directive.is_allowed_domain(*domain as u8) && !self.resolutions.contains_key(domain) - { - return false; - } - } - - true - } } impl IntoIterator for NameBinding { - type Item = NameRes; - type IntoIter = IntoValues; + type Item = NameResolutionResult; + type IntoIter = IntoValues>; fn into_iter(self) -> Self::IntoIter { self.resolutions.into_values() @@ -672,9 +576,9 @@ impl IntoIterator for NameBinding { } impl From for NameBinding { - fn from(resolution: NameRes) -> Self { + fn from(res: NameRes) -> Self { let mut names = FxHashMap::default(); - names.insert(resolution.domain, resolution); + names.insert(res.domain, Ok(res)); Self { resolutions: names } } } @@ -872,6 +776,8 @@ pub enum NameResolutionError { Ambiguous(Vec), } +pub type NameResolutionResult = Result; + impl fmt::Display for NameResolutionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { @@ -887,19 +793,19 @@ impl std::error::Error for NameResolutionError {} #[derive(Default, Debug, PartialEq, Eq)] pub(crate) struct ResolvedQueryCacheStore { - cache: FxHashMap>, + cache: FxHashMap, no_cache: bool, } impl ResolvedQueryCacheStore { - fn cache_result(&mut self, query: NameQuery, result: Result) { + fn cache_result(&mut self, query: NameQuery, result: NameBinding) { if self.no_cache { return; } self.cache.insert(query, result); } - fn get(&self, query: NameQuery) -> Option<&Result> { + fn get(&self, query: NameQuery) -> Option<&NameBinding> { self.cache.get(&query) } } @@ -942,33 +848,11 @@ impl NameDomain { ScopeId::Variant(..) => Self::Value, } } - - fn all_domains() -> &'static [Self; 3] { - &[Self::Item, Self::Value, Self::Field] - } } trait QueryPropagator { - fn propagate(&self, query: NameQuery) -> PropagationResult { - if query.directive.is_allowed_domain(Self::ALLOWED_DOMAIN) { - self.propagate_impl(query) - } else { - PropagationResult::UnPropagated - } - } - - fn propagate_glob(&self, directive: QueryDirective) -> PropagationResult { - if directive.is_allowed_domain(Self::ALLOWED_DOMAIN) { - self.propagate_glob_impl() - } else { - PropagationResult::UnPropagated - } - } - - const ALLOWED_DOMAIN: u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult; - fn propagate_glob_impl(&self) -> PropagationResult; + fn propagate(self, query: &NameQuery) -> PropagationResult; + fn propagate_glob(self) -> PropagationResult; } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -979,9 +863,7 @@ enum PropagationResult { } impl QueryPropagator for LexEdge { - const ALLOWED_DOMAIN: u8 = ALL_DOMAINS; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if query.directive.allow_lex { PropagationResult::Continuation } else { @@ -989,15 +871,13 @@ impl QueryPropagator for LexEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for ModEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { @@ -1005,15 +885,13 @@ impl QueryPropagator for ModEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::Terminated } } impl QueryPropagator for TypeEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { @@ -1021,45 +899,41 @@ impl QueryPropagator for TypeEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::Terminated } } impl QueryPropagator for TraitEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { PropagationResult::UnPropagated } } - fn propagate_glob_impl(&self) -> PropagationResult { + + fn propagate_glob(self) -> PropagationResult { PropagationResult::Terminated } } impl QueryPropagator for ValueEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Value as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { PropagationResult::UnPropagated } } - fn propagate_glob_impl(&self) -> PropagationResult { + + fn propagate_glob(self) -> PropagationResult { PropagationResult::Terminated } } impl QueryPropagator for GenericParamEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { @@ -1067,15 +941,13 @@ impl QueryPropagator for GenericParamEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for FieldEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Field as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { @@ -1083,15 +955,13 @@ impl QueryPropagator for FieldEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for VariantEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if self.0 == query.name { PropagationResult::Terminated } else { @@ -1099,15 +969,13 @@ impl QueryPropagator for VariantEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::Terminated } } impl QueryPropagator for SuperEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if query.name.is_super() { PropagationResult::Terminated } else { @@ -1115,15 +983,13 @@ impl QueryPropagator for SuperEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for IngotEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if query.name.is_ingot() { PropagationResult::Terminated } else { @@ -1131,15 +997,13 @@ impl QueryPropagator for IngotEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for SelfTyEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if query.name.is_self_ty() { PropagationResult::Terminated } else { @@ -1147,15 +1011,13 @@ impl QueryPropagator for SelfTyEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for SelfEdge { - const ALLOWED_DOMAIN: u8 = NameDomain::Item as u8; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { if query.name.is_self() { PropagationResult::Terminated } else { @@ -1163,61 +1025,55 @@ impl QueryPropagator for SelfEdge { } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for AnonEdge { - const ALLOWED_DOMAIN: u8 = 0; - - fn propagate_impl(&self, _query: NameQuery) -> PropagationResult { + fn propagate(self, _query: &NameQuery) -> PropagationResult { PropagationResult::UnPropagated } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { PropagationResult::UnPropagated } } impl QueryPropagator for EdgeKind { - const ALLOWED_DOMAIN: u8 = ALL_DOMAINS; - - fn propagate_impl(&self, query: NameQuery) -> PropagationResult { + fn propagate(self, query: &NameQuery) -> PropagationResult { match self { - EdgeKind::Lex(edge) => edge.propagate_impl(query), - EdgeKind::Mod(edge) => edge.propagate_impl(query), - EdgeKind::Type(edge) => edge.propagate_impl(query), - EdgeKind::Trait(edge) => edge.propagate_impl(query), - EdgeKind::GenericParam(edge) => edge.propagate_impl(query), - EdgeKind::Value(edge) => edge.propagate_impl(query), - EdgeKind::Field(edge) => edge.propagate_impl(query), - EdgeKind::Variant(edge) => edge.propagate_impl(query), - EdgeKind::Super(edge) => edge.propagate_impl(query), - EdgeKind::Ingot(edge) => edge.propagate_impl(query), - EdgeKind::Self_(edge) => edge.propagate_impl(query), - EdgeKind::SelfTy(edge) => edge.propagate_impl(query), - EdgeKind::Anon(edge) => edge.propagate_impl(query), + EdgeKind::Lex(edge) => edge.propagate(query), + EdgeKind::Mod(edge) => edge.propagate(query), + EdgeKind::Type(edge) => edge.propagate(query), + EdgeKind::Trait(edge) => edge.propagate(query), + EdgeKind::GenericParam(edge) => edge.propagate(query), + EdgeKind::Value(edge) => edge.propagate(query), + EdgeKind::Field(edge) => edge.propagate(query), + EdgeKind::Variant(edge) => edge.propagate(query), + EdgeKind::Super(edge) => edge.propagate(query), + EdgeKind::Ingot(edge) => edge.propagate(query), + EdgeKind::Self_(edge) => edge.propagate(query), + EdgeKind::SelfTy(edge) => edge.propagate(query), + EdgeKind::Anon(edge) => edge.propagate(query), } } - fn propagate_glob_impl(&self) -> PropagationResult { + fn propagate_glob(self) -> PropagationResult { match self { - EdgeKind::Lex(edge) => edge.propagate_glob_impl(), - EdgeKind::Mod(edge) => edge.propagate_glob_impl(), - EdgeKind::Type(edge) => edge.propagate_glob_impl(), - EdgeKind::Trait(edge) => edge.propagate_glob_impl(), - EdgeKind::GenericParam(edge) => edge.propagate_glob_impl(), - EdgeKind::Value(edge) => edge.propagate_glob_impl(), - EdgeKind::Field(edge) => edge.propagate_glob_impl(), - EdgeKind::Variant(edge) => edge.propagate_glob_impl(), - EdgeKind::Super(edge) => edge.propagate_glob_impl(), - EdgeKind::Ingot(edge) => edge.propagate_glob_impl(), - EdgeKind::Self_(edge) => edge.propagate_glob_impl(), - EdgeKind::SelfTy(edge) => edge.propagate_glob_impl(), - EdgeKind::Anon(edge) => edge.propagate_glob_impl(), + EdgeKind::Lex(edge) => edge.propagate_glob(), + EdgeKind::Mod(edge) => edge.propagate_glob(), + EdgeKind::Type(edge) => edge.propagate_glob(), + EdgeKind::Trait(edge) => edge.propagate_glob(), + EdgeKind::GenericParam(edge) => edge.propagate_glob(), + EdgeKind::Value(edge) => edge.propagate_glob(), + EdgeKind::Field(edge) => edge.propagate_glob(), + EdgeKind::Variant(edge) => edge.propagate_glob(), + EdgeKind::Super(edge) => edge.propagate_glob(), + EdgeKind::Ingot(edge) => edge.propagate_glob(), + EdgeKind::Self_(edge) => edge.propagate_glob(), + EdgeKind::SelfTy(edge) => edge.propagate_glob(), + EdgeKind::Anon(edge) => edge.propagate_glob(), } } } - -const ALL_DOMAINS: u8 = NameDomain::Item as u8 | NameDomain::Value as u8 | NameDomain::Field as u8; diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 42c11a04d2..1e2ca56d5d 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -50,7 +50,6 @@ fn format_imports( } for (_, glob_set) in imports.glob_resolved.iter() { - dbg!(glob_set.iter().count()); for (&use_, res_set) in glob_set.iter() { for res in res_set.values().flatten() { use_res_map diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 7b230ebac8..186adcf242 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -311,7 +311,7 @@ pub struct ScopeEdge { pub kind: EdgeKind, } -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub enum EdgeKind { Lex(LexEdge), Mod(ModEdge), From a7dcbef09ea84c2e155a003c194fbd6cc2fc8516 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 9 Jun 2023 11:00:21 -0700 Subject: [PATCH 182/678] Implement diagnostics conversion --- Cargo.lock | 12 ++++++++ crates/common2/src/diagnostics.rs | 23 ++++++++++++--- crates/driver2/Cargo.toml | 19 +++++++++++++ crates/driver2/src/diagnostics.rs | 47 +++++++++++++++++++++++++++++++ crates/driver2/src/lib.rs | 42 +++++++++++++++++++++++++++ 5 files changed, 139 insertions(+), 4 deletions(-) create mode 100644 crates/driver2/Cargo.toml create mode 100644 crates/driver2/src/diagnostics.rs create mode 100644 crates/driver2/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index b8e4baa9c4..ffde713f0f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -557,6 +557,18 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" +[[package]] +name = "driver2" +version = "0.20.0-alpha" +dependencies = [ + "codespan-reporting", + "fe-common2", + "fe-hir", + "fe-hir-analysis", + "fe-macros", + "salsa-2022", +] + [[package]] name = "ecdsa" version = "0.16.6" diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 3cbc974358..68c7597320 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -39,6 +39,10 @@ impl GlobalErrorCode { pub fn new(pass: AnalysisPass, local_code: u16) -> Self { Self { pass, local_code } } + + pub fn to_string(&self) -> String { + format!("{}-{:04}", self.pass.code(), self.local_code) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -99,17 +103,28 @@ pub enum Severity { Note, } -#[repr(u16)] #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AnalysisPass { - Parse = 1, + Parse, - ImportResolution, NameResolution, TyCheck, - ExternalAnalysis(ExternalAnalysisKey) = u16::MAX, + ExternalAnalysis(ExternalAnalysisKey), +} + +impl AnalysisPass { + pub fn code(&self) -> u16 { + match self { + Self::Parse => 1, + Self::NameResolution => 2, + + Self::TyCheck => 3, + + Self::ExternalAnalysis(_) => std::u16::MAX, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml new file mode 100644 index 0000000000..447b9a4532 --- /dev/null +++ b/crates/driver2/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "driver2" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR semantic analysis for Fe lang" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +codespan-reporting = "0.11" + +hir = { path = "../hir", package = "fe-hir" } +common = { path = "../common2", package = "fe-common2" } +macros = { path = "../macros", package = "fe-macros" } +hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs new file mode 100644 index 0000000000..70baa49f76 --- /dev/null +++ b/crates/driver2/src/diagnostics.rs @@ -0,0 +1,47 @@ +use codespan_reporting::diagnostic::{self as cs}; +use common::{ + diagnostics::{CompleteDiagnostic, Severity}, + InputFile, +}; + +use crate::DriverDb; + +pub trait ToCsrDiag { + fn to_csr_diag(&self, db: &dyn DriverDb) -> cs::Diagnostic; +} + +impl ToCsrDiag for CompleteDiagnostic { + fn to_csr_diag(&self, _db: &dyn DriverDb) -> cs::Diagnostic { + let severity = convert_severity(self.severity); + let code = Some(self.code.to_string()); + let message = self.message; + let span = self.span.expect("primary diagnostic must have a span"); + + let mut labels = vec![ + cs::Label::new(cs::LabelStyle::Primary, span.file, span.range).with_message(message), + ]; + labels.extend(self.sub_diagnostics.iter().filter_map(|sub_diag| { + let span = sub_diag.span?; + let range = sub_diag.range; + cs::Label::new(cs::LabelStyle::Secondary, span.file, span.range) + .with_message(sub_diag.message) + .into() + })); + + cs::Diagnostic { + severity, + code, + message, + labels, + notes: vec![], + } + } +} + +fn convert_severity(severity: Severity) -> cs::Severity { + match severity { + Severity::Error => cs::Severity::Error, + Severity::Warning => cs::Severity::Warning, + Severity::Note => cs::Severity::Note, + } +} diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs new file mode 100644 index 0000000000..d114123613 --- /dev/null +++ b/crates/driver2/src/lib.rs @@ -0,0 +1,42 @@ +mod diagnostics; + +use common::InputDb; +use hir::{HirDb, LowerHirDb, SpannedHirDb}; +use hir_analysis::HirAnalysisDb; + +#[salsa::jar(db = DriverDb)] +pub struct Jar(); + +pub trait DriverDb: + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +impl DriverDb for DB where + DB: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +#[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] +pub(crate) struct DriverDataBase { + storage: salsa::Storage, +} + +impl HirDb for DriverDataBase {} +impl SpannedHirDb for DriverDataBase {} +impl LowerHirDb for DriverDataBase {} +impl salsa::Database for DriverDataBase { + fn salsa_event(&self, _: salsa::Event) { + // TODO: logger. + } +} + +impl Default for DriverDataBase { + fn default() -> Self { + let db = Self { + storage: Default::default(), + }; + db.prefill(); + db + } +} From 3a3d71de4fce0451a2e001879452f8aa99aaed50 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 9 Jun 2023 12:30:50 -0700 Subject: [PATCH 183/678] Refactor `CompleteDiagnostics` --- crates/common2/src/diagnostics.rs | 18 ++- crates/driver2/src/diagnostics.rs | 48 ++++--- crates/driver2/src/lib.rs | 4 +- .../src/name_resolution/diagnostics.rs | 120 +++++++++++------- .../src/name_resolution/import_resolver.rs | 1 + .../hir-analysis/src/name_resolution/mod.rs | 62 ++++----- crates/hir/src/diagnostics.rs | 10 ++ crates/hir/src/lower/parse.rs | 13 +- 8 files changed, 166 insertions(+), 110 deletions(-) diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 68c7597320..aa1b61637c 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -6,8 +6,8 @@ use crate::InputFile; pub struct CompleteDiagnostic { pub severity: Severity, pub message: String, - pub span: Option, pub sub_diagnostics: Vec, + pub notes: Vec, pub error_code: GlobalErrorCode, } @@ -15,15 +15,15 @@ impl CompleteDiagnostic { pub fn new( severity: Severity, message: String, - span: Option, sub_diagnostics: Vec, + notes: Vec, error_code: GlobalErrorCode, ) -> Self { Self { severity, message, - span, sub_diagnostics, + notes, error_code, } } @@ -47,21 +47,27 @@ impl GlobalErrorCode { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct SubDiagnostic { - pub severity: Severity, + pub style: LabelStyle, pub message: String, pub span: Option, } impl SubDiagnostic { - pub fn new(severity: Severity, message: String, span: Option) -> Self { + pub fn new(style: LabelStyle, message: String, span: Option) -> Self { Self { - severity, + style, message, span, } } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum LabelStyle { + Primary, + Secondary, +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Span { pub file: InputFile, diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index 70baa49f76..cabe0d134b 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -1,32 +1,44 @@ use codespan_reporting::diagnostic::{self as cs}; use common::{ - diagnostics::{CompleteDiagnostic, Severity}, + diagnostics::{LabelStyle, Severity}, InputFile, }; +use hir::diagnostics::DiagnosticVoucher; -use crate::DriverDb; +use crate::DriverDataBase; -pub trait ToCsrDiag { - fn to_csr_diag(&self, db: &dyn DriverDb) -> cs::Diagnostic; +pub trait IntoCsrDiag { + fn into_csr(self, db: &DriverDataBase) -> cs::Diagnostic; } -impl ToCsrDiag for CompleteDiagnostic { - fn to_csr_diag(&self, _db: &dyn DriverDb) -> cs::Diagnostic { - let severity = convert_severity(self.severity); - let code = Some(self.code.to_string()); - let message = self.message; - let span = self.span.expect("primary diagnostic must have a span"); +impl IntoCsrDiag for T +where + T: DiagnosticVoucher, +{ + fn into_csr(self, db: &DriverDataBase) -> cs::Diagnostic { + let complete = self.to_complete(db); - let mut labels = vec![ - cs::Label::new(cs::LabelStyle::Primary, span.file, span.range).with_message(message), - ]; - labels.extend(self.sub_diagnostics.iter().filter_map(|sub_diag| { - let span = sub_diag.span?; - let range = sub_diag.range; - cs::Label::new(cs::LabelStyle::Secondary, span.file, span.range) + let severity = convert_severity(complete.severity); + let code = Some(complete.error_code.to_string()); + let message = complete.message; + + let labels = complete + .sub_diagnostics + .into_iter() + .filter_map(|sub_diag| { + let span = sub_diag.span?; + match sub_diag.style { + LabelStyle::Primary => { + cs::Label::new(cs::LabelStyle::Primary, span.file, span.range) + } + LabelStyle::Secondary => { + cs::Label::new(cs::LabelStyle::Secondary, span.file, span.range) + } + } .with_message(sub_diag.message) .into() - })); + }) + .collect(); cs::Diagnostic { severity, diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index d114123613..fa42cef08d 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -13,12 +13,12 @@ pub trait DriverDb: } impl DriverDb for DB where - DB: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb + DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb { } #[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] -pub(crate) struct DriverDataBase { +pub struct DriverDataBase { storage: salsa::Storage, } diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 736253fdf5..b15c6963dc 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -1,5 +1,5 @@ use common::diagnostics::{ - AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, SubDiagnostic, + AnalysisPass, CompleteDiagnostic, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, }; use hir::{ diagnostics::DiagnosticVoucher, @@ -26,8 +26,8 @@ impl NameResolutionDiag { Self { span, kind } } - pub fn conflict(span: DynLazySpan, conflict_with: DynLazySpan) -> Self { - Self::new(span, ImportErrorKind::Conflict(conflict_with)) + pub fn conflict(span: DynLazySpan, name: IdentId, conflict_with: DynLazySpan) -> Self { + Self::new(span, ImportErrorKind::Conflict(name, conflict_with)) } pub fn not_found(span: DynLazySpan, ident: IdentId) -> Self { @@ -47,28 +47,22 @@ impl NameResolutionDiag { impl DiagnosticVoucher for NameResolutionDiag { fn error_code(&self) -> GlobalErrorCode { - GlobalErrorCode::new(AnalysisPass::ImportResolution, self.kind.local_code()) + GlobalErrorCode::new(AnalysisPass::NameResolution, self.kind.local_code()) } fn to_complete(self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { - let span = self.span.resolve(db); + let error_code = self.error_code(); let message = self.kind.message(db.as_hir_db()); - let sub_diags = self.kind.sub_diagnostics(db); - - CompleteDiagnostic::new( - self.kind.severity(), - message, - span, - sub_diags, - self.error_code(), - ) + let sub_diags = self.kind.sub_diagnostics(db, self.span); + + CompleteDiagnostic::new(self.kind.severity(), message, sub_diags, vec![], error_code) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ImportErrorKind { /// The import conflicts with another import. - Conflict(DynLazySpan), + Conflict(IdentId, DynLazySpan), /// The import path segment is not found. NotFound(IdentId), @@ -82,7 +76,7 @@ pub enum ImportErrorKind { impl ImportErrorKind { fn local_code(&self) -> u16 { match self { - ImportErrorKind::Conflict(_) => 0, + ImportErrorKind::Conflict(..) => 0, ImportErrorKind::NotFound(_) => 1, ImportErrorKind::Invisible(..) => 2, ImportErrorKind::Ambiguous(..) => 3, @@ -95,7 +89,9 @@ impl ImportErrorKind { fn message(&self, db: &dyn HirDb) -> String { match self { - ImportErrorKind::Conflict(_) => "import conflicts with another import".to_string(), + ImportErrorKind::Conflict(name, _) => { + format!("{} conflicts with other definitions", name.data(db)) + } ImportErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), ImportErrorKind::Invisible(name, _) => { format!("{} is not visible", name.data(db),) @@ -104,34 +100,72 @@ impl ImportErrorKind { } } - fn sub_diagnostics(&self, db: &dyn hir::SpannedHirDb) -> Vec { + fn sub_diagnostics( + &self, + db: &dyn hir::SpannedHirDb, + prim_span: DynLazySpan, + ) -> Vec { match self { - ImportErrorKind::Conflict(conflict_with) => vec![SubDiagnostic::new( - Severity::Note, - "conflicts with this import".to_string(), - conflict_with.resolve(db), - )], - - ImportErrorKind::NotFound(_) => vec![], - - ImportErrorKind::Invisible(_, span) => span - .as_ref() - .map(|span| { - vec![SubDiagnostic::new( - Severity::Note, - "not visible because of this declaration".to_string(), + ImportErrorKind::Conflict(ident, conflict_with) => { + let ident = ident.data(db.as_hir_db()); + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "`{ident}` conflicts with another definition".to_string(), + prim_span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "{ident} redefined here ".to_string(), + conflict_with.resolve(db), + ), + ] + } + + ImportErrorKind::NotFound(ident) => { + let ident = ident.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is not found"), + prim_span.resolve(db), + )] + } + + ImportErrorKind::Invisible(ident, span) => { + let ident = ident.data(db.as_hir_db()); + + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is not visible"), + prim_span.resolve(db), + )]; + if let Some(span) = span { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("`{ident} is defined here"), span.resolve(db), - )] - }) - .unwrap_or_default(), - - ImportErrorKind::Ambiguous(_, candidates) => candidates - .iter() - .enumerate() - .map(|(i, span)| { - SubDiagnostic::new(Severity::Note, format!("candidate #{i}"), span.resolve(db)) - }) - .collect(), + )) + } + diags + } + + ImportErrorKind::Ambiguous(ident, candidates) => { + let ident = ident.data(db.as_hir_db()); + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is ambiguous"), + prim_span.resolve(db), + )]; + diags.extend(candidates.iter().enumerate().map(|(i, span)| { + SubDiagnostic::new( + LabelStyle::Secondary, + format!("candidate #{i}"), + span.resolve(db), + ) + })); + + diags + } } } } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 2787f89e88..8c534a85c6 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -912,6 +912,7 @@ impl IntermediateResolvedImports { if i_use.use_ != use_ { return Err(NameResolutionDiag::conflict( i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + imported_name, cand.derived_from(db).unwrap(), )); } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 3f14c28a48..66ed3db056 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -56,18 +56,18 @@ pub(crate) fn resolve_path_early( top_mod: TopLevelMod, ) -> ResolvedQueryCacheStore { let importer = DefaultImporter; - ModuleNameResolver::new(db, &importer).resolve_all(top_mod); + PathResolver::new(db, &importer).resolve_all(top_mod); todo!() } -struct ModuleNameResolver<'db, 'a> { +struct PathResolver<'db, 'a> { db: &'db dyn HirAnalysisDb, resolver: name_resolver::NameResolver<'db, 'a>, diags: Vec, item_stack: Vec, } -impl<'db, 'a> ModuleNameResolver<'db, 'a> { +impl<'db, 'a> PathResolver<'db, 'a> { fn new(db: &'db dyn HirAnalysisDb, importer: &'a DefaultImporter) -> Self { let resolver = name_resolver::NameResolver::new(db, importer); Self { @@ -85,22 +85,14 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { fn check_item_conflict(&mut self, item: ItemKind) { let scope = ScopeId::from_item(item); - let Some(query) = self.make_query_for_conflict_check(scope) else { - return; - }; - - self.check_conflict(scope, query, NameDomain::from_scope(scope)); + self.check_conflict(scope); } fn check_field_conflict(&mut self, fields: FieldDefListId) { let parent_item = *self.item_stack.last().unwrap(); for i in 0..fields.data(self.db.as_hir_db()).len() { let scope = ScopeId::Field(parent_item, i); - let Some(query) = self.make_query_for_conflict_check(scope) else { - continue; - }; - - self.check_conflict(scope, query, NameDomain::from_scope(scope)); + self.check_conflict(scope); } } @@ -108,11 +100,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { let parent_item = *self.item_stack.last().unwrap(); for i in 0..variants.data(self.db.as_hir_db()).len() { let scope = ScopeId::Variant(parent_item, i); - let Some(query) = self.make_query_for_conflict_check(scope) else { - continue; - }; - - self.check_conflict(scope, query, NameDomain::from_scope(scope)); + self.check_conflict(scope); } } @@ -120,11 +108,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { let parent_item = *self.item_stack.last().unwrap(); for i in 0..params.data(self.db.as_hir_db()).len() { let scope = ScopeId::FuncParam(parent_item, i); - let Some(query) = self.make_query_for_conflict_check(scope) else { - continue; - }; - - self.check_conflict(scope, query, NameDomain::from_scope(scope)); + self.check_conflict(scope); } } @@ -132,24 +116,16 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { let parent_item = *self.item_stack.last().unwrap(); for i in 0..params.data(self.db.as_hir_db()).len() { let scope = ScopeId::GenericParam(parent_item, i); - let Some(query) = self.make_query_for_conflict_check(scope) else { - continue; - }; - - self.check_conflict(scope, query, NameDomain::from_scope(scope)); + self.check_conflict(scope); } } - fn make_query_for_conflict_check(&self, scope: ScopeId) -> Option { - let name = scope.name(self.db.as_hir_db())?; - let mut directive = QueryDirective::new(); - directive.disallow_lex().disallow_glob().disallow_external(); - - let parent_scope = scope.parent(self.db.as_hir_db())?; - Some(NameQuery::with_directive(name, parent_scope, directive)) - } + fn check_conflict(&mut self, scope: ScopeId) { + let Some(query) = self.make_query_for_conflict_check(scope) else { + return; + }; - fn check_conflict(&mut self, scope: ScopeId, query: NameQuery, domain: NameDomain) { + let domain = NameDomain::from_scope(scope); let binding = self.resolver.resolve_query(query); match binding.res_in_domain(domain) { Ok(_) => {} @@ -168,6 +144,7 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { let diag = diagnostics::NameResolutionDiag::conflict( scope.name_span(self.db.as_hir_db()).unwrap(), + scope.name(self.db.as_hir_db()).unwrap(), conflicted_span, ); self.diags.push(diag); @@ -175,9 +152,18 @@ impl<'db, 'a> ModuleNameResolver<'db, 'a> { Err(_) => unreachable!(), }; } + + fn make_query_for_conflict_check(&self, scope: ScopeId) -> Option { + let name = scope.name(self.db.as_hir_db())?; + let mut directive = QueryDirective::new(); + directive.disallow_lex().disallow_glob().disallow_external(); + + let parent_scope = scope.parent(self.db.as_hir_db())?; + Some(NameQuery::with_directive(name, parent_scope, directive)) + } } -impl<'db, 'a> Visitor for ModuleNameResolver<'db, 'a> { +impl<'db, 'a> Visitor for PathResolver<'db, 'a> { fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { self.check_item_conflict(item); diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 837daded19..ce16b46b84 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -25,3 +25,13 @@ pub trait DiagnosticVoucher: Send { /// Consumes voucher and makes a [`CompleteDiagnostic`]. fn to_complete(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; } + +impl DiagnosticVoucher for CompleteDiagnostic { + fn error_code(&self) -> GlobalErrorCode { + self.error_code.clone() + } + + fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + self + } +} diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index b4ceec444f..bc8db2b517 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -1,5 +1,8 @@ use common::{ - diagnostics::{AnalysisPass, CompleteDiagnostic, GlobalErrorCode, Severity, Span, SpanKind}, + diagnostics::{ + AnalysisPass, CompleteDiagnostic, GlobalErrorCode, LabelStyle, Severity, Span, SpanKind, + SubDiagnostic, + }, InputFile, }; use parser::GreenNode; @@ -41,8 +44,12 @@ impl DiagnosticVoucher for ParserError { let span = Span::new(self.file, self.error.range, SpanKind::Original); CompleteDiagnostic::new( Severity::Error, - self.error.msg, - span.into(), + self.error.msg.clone(), + vec![SubDiagnostic::new( + LabelStyle::Primary, + self.error.msg, + Some(span), + )], vec![], error_code, ) From 642872fc610c46819b1e627c9abed0a5b7b5582f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 9 Jun 2023 18:40:27 -0700 Subject: [PATCH 184/678] Allow driver to provide span specific information --- Cargo.lock | 1 + crates/common2/src/diagnostics.rs | 8 +- crates/driver2/Cargo.toml | 1 + crates/driver2/src/diagnostics.rs | 85 ++++++++++++++++--- crates/driver2/src/lib.rs | 4 +- .../src/name_resolution/diagnostics.rs | 4 +- .../hir-analysis/src/name_resolution/mod.rs | 1 + 7 files changed, 87 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ffde713f0f..9a61ec3367 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -561,6 +561,7 @@ checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" name = "driver2" version = "0.20.0-alpha" dependencies = [ + "camino", "codespan-reporting", "fe-common2", "fe-hir", diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index aa1b61637c..be3ce3754e 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -1,3 +1,5 @@ +use std::fmt; + use parser::TextRange; use crate::InputFile; @@ -39,9 +41,11 @@ impl GlobalErrorCode { pub fn new(pass: AnalysisPass, local_code: u16) -> Self { Self { pass, local_code } } +} - pub fn to_string(&self) -> String { - format!("{}-{:04}", self.pass.code(), self.local_code) +impl fmt::Display for GlobalErrorCode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}-{:04}", self.pass.code(), self.local_code) } } diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml index 447b9a4532..95b8c86d04 100644 --- a/crates/driver2/Cargo.toml +++ b/crates/driver2/Cargo.toml @@ -17,3 +17,4 @@ hir = { path = "../hir", package = "fe-hir" } common = { path = "../common2", package = "fe-common2" } macros = { path = "../macros", package = "fe-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } +camino = "1.1.4" diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index cabe0d134b..89ede6326b 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -1,21 +1,26 @@ -use codespan_reporting::diagnostic::{self as cs}; +use std::ops::Range; + +use camino::Utf8Path; +use codespan_reporting as cs; +use cs::{diagnostic as cs_diag, files as cs_files}; + use common::{ diagnostics::{LabelStyle, Severity}, InputFile, }; use hir::diagnostics::DiagnosticVoucher; -use crate::DriverDataBase; +use crate::{DriverDataBase, DriverDb}; pub trait IntoCsrDiag { - fn into_csr(self, db: &DriverDataBase) -> cs::Diagnostic; + fn into_csr(self, db: &DriverDataBase) -> cs_diag::Diagnostic; } impl IntoCsrDiag for T where T: DiagnosticVoucher, { - fn into_csr(self, db: &DriverDataBase) -> cs::Diagnostic { + fn into_csr(self, db: &DriverDataBase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); let severity = convert_severity(complete.severity); @@ -29,10 +34,10 @@ where let span = sub_diag.span?; match sub_diag.style { LabelStyle::Primary => { - cs::Label::new(cs::LabelStyle::Primary, span.file, span.range) + cs_diag::Label::new(cs_diag::LabelStyle::Primary, span.file, span.range) } LabelStyle::Secondary => { - cs::Label::new(cs::LabelStyle::Secondary, span.file, span.range) + cs_diag::Label::new(cs_diag::LabelStyle::Secondary, span.file, span.range) } } .with_message(sub_diag.message) @@ -40,7 +45,7 @@ where }) .collect(); - cs::Diagnostic { + cs_diag::Diagnostic { severity, code, message, @@ -50,10 +55,68 @@ where } } -fn convert_severity(severity: Severity) -> cs::Severity { +fn convert_severity(severity: Severity) -> cs_diag::Severity { match severity { - Severity::Error => cs::Severity::Error, - Severity::Warning => cs::Severity::Warning, - Severity::Note => cs::Severity::Note, + Severity::Error => cs_diag::Severity::Error, + Severity::Warning => cs_diag::Severity::Warning, + Severity::Note => cs_diag::Severity::Note, + } +} + +#[salsa::tracked(return_ref)] +pub fn file_line_starts(db: &dyn DriverDb, file: InputFile) -> Vec { + cs::files::line_starts(file.text(db.as_input_db())).collect() +} + +impl<'a> cs_files::Files<'a> for DriverDataBase { + type FileId = InputFile; + type Name = &'a str; + type Source = &'a Utf8Path; + + fn name(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.text(self)) + } + + fn source(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.path(self).as_path()) + } + + fn line_index( + &'a self, + file_id: Self::FileId, + byte_index: usize, + ) -> Result { + let starts = file_line_starts(self, file_id); + Ok(starts + .binary_search(&byte_index) + .unwrap_or_else(|next_line| next_line - 1)) + } + + fn line_range( + &'a self, + file_id: Self::FileId, + line_index: usize, + ) -> Result, cs_files::Error> { + let line_starts = file_line_starts(self, file_id); + + let start = *line_starts + .get(line_index) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })?; + + let end = if line_index == line_starts.len() - 1 { + file_id.text(self).len() + } else { + *line_starts + .get(line_index + 1) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })? + }; + + Ok(Range { start, end }) } } diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index fa42cef08d..93452a6b78 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -1,11 +1,11 @@ -mod diagnostics; +pub mod diagnostics; use common::InputDb; use hir::{HirDb, LowerHirDb, SpannedHirDb}; use hir_analysis::HirAnalysisDb; #[salsa::jar(db = DriverDb)] -pub struct Jar(); +pub struct Jar(diagnostics::file_line_starts); pub trait DriverDb: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index b15c6963dc..0b27f330aa 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -111,12 +111,12 @@ impl ImportErrorKind { vec![ SubDiagnostic::new( LabelStyle::Primary, - "`{ident}` conflicts with another definition".to_string(), + format! {"`{ident}` conflicts with another definition"}, prim_span.resolve(db), ), SubDiagnostic::new( LabelStyle::Secondary, - "{ident} redefined here ".to_string(), + format! {"{ident} redefined here "}, conflict_with.resolve(db), ), ] diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 66ed3db056..abaeb83843 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -51,6 +51,7 @@ pub fn resolve_imports_with_diag( /// Performs early path resolution in the given module and checks the conflict /// of the definitions. #[salsa::tracked(return_ref)] +#[allow(unused)] pub(crate) fn resolve_path_early( db: &dyn HirAnalysisDb, top_mod: TopLevelMod, From b4df3b3df79dcd234182ae63a80d91568016d337 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 22 Jun 2023 16:07:49 +0200 Subject: [PATCH 185/678] Define analysis pass traits --- crates/hir/src/analysis_pass.rs | 16 ++++++++++++++++ crates/hir/src/lib.rs | 1 + 2 files changed, 17 insertions(+) create mode 100644 crates/hir/src/analysis_pass.rs diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs new file mode 100644 index 0000000000..5c706d369a --- /dev/null +++ b/crates/hir/src/analysis_pass.rs @@ -0,0 +1,16 @@ +use crate::{ + diagnostics::DiagnosticVoucher, + hir_def::{Func, TopLevelMod}, +}; + +/// All analysis passes that run analysis on the HIR function granularity should +/// implement this trait. +pub trait FunctionPass { + fn run_on_func(&mut self, function: Func) -> Vec>; +} + +/// All analysis passes that run analysis on the HIR top level module +/// granularity should implement this trait. +pub trait ModulePass { + fn run_on_module(&mut self, module: TopLevelMod) -> Vec>; +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7fd1c5865a..cb7481a59b 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -7,6 +7,7 @@ use lower::{ scope_graph_impl, }; +pub mod analysis_pass; pub mod diagnostics; pub mod hir_def; pub mod lower; From d9adf18beea223fc124aea72a89d9ad375962c55 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 22 Jun 2023 16:09:25 +0200 Subject: [PATCH 186/678] Enable to obtain `TopLevelModule` from lazy-spans --- crates/hir/src/span/mod.rs | 6 +++++- crates/hir/src/span/transition.rs | 28 +++++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 7cccb88e7e..ec1d41e275 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -11,7 +11,7 @@ use crate::{ TypeAlias, Use, }, lower::top_mod_ast, - SpannedHirDb, + HirDb, SpannedHirDb, }; pub mod attr; @@ -36,6 +36,10 @@ impl DynLazySpan { pub fn invalid() -> Self { Self(None) } + + pub fn top_mod(&self, db: &dyn HirDb) -> Option { + self.0.as_ref().map(|chain| chain.top_mod(db)) + } } impl LazySpan for DynLazySpan { fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 97667b3029..028f25520e 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -12,7 +12,7 @@ use crate::{ Trait, TypeAlias, Use, }, lower::top_mod_ast, - SpannedHirDb, + HirDb, SpannedHirDb, }; use super::{ @@ -55,6 +55,28 @@ impl SpanTransitionChain { } } + pub(super) fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { + match self.root { + ChainRoot::ItemKind(item) => item.top_mod(db), + ChainRoot::TopMod(top_mod) => top_mod, + ChainRoot::Mod(m) => m.top_mod(db), + ChainRoot::Func(f) => f.top_mod(db), + ChainRoot::Struct(s) => s.top_mod(db), + ChainRoot::Contract(c) => c.top_mod(db), + ChainRoot::Enum(e) => e.top_mod(db), + ChainRoot::TypeAlias(t) => t.top_mod(db), + ChainRoot::Impl(i) => i.top_mod(db), + ChainRoot::Trait(t) => t.top_mod(db), + ChainRoot::ImplTrait(i) => i.top_mod(db), + ChainRoot::Const(c) => c.top_mod(db), + ChainRoot::Use(u) => u.top_mod(db), + ChainRoot::Body(b) => b.top_mod(db), + ChainRoot::Stmt(s) => s.body.top_mod(db), + ChainRoot::Expr(e) => e.body.top_mod(db), + ChainRoot::Pat(p) => p.body.top_mod(db), + } + } + pub(super) fn push(&mut self, transition: LazyTransitionFn) { self.chain.push(transition); } @@ -289,6 +311,10 @@ macro_rules! define_lazy_span_node { Self(crate::span::transition::SpanTransitionChain::new(hir)) })? + pub fn top_mod(&self, db: &dyn crate::HirDb) -> Option { + Some(self.0.top_mod(db)) + } + $($( pub fn $name_token(&self) -> crate::span::LazySpanAtom { let cloned = self.clone(); From d6c2750b92f153e830bccb81329c5e0c59369542 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 22 Jun 2023 18:06:26 +0200 Subject: [PATCH 187/678] Implement `ModuleAnalysisPass` for existing analyses --- .../src/name_resolution/diagnostics.rs | 6 +-- .../hir-analysis/src/name_resolution/mod.rs | 40 ++++++++++++++----- crates/hir-analysis/tests/import.rs | 10 +++-- crates/hir/src/analysis_pass.rs | 8 ++-- crates/hir/src/lib.rs | 29 ++++++++++++++ crates/hir/src/lower/mod.rs | 28 ++----------- 6 files changed, 75 insertions(+), 46 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 0b27f330aa..74fe207e65 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -13,12 +13,12 @@ use crate::HirAnalysisDb; use super::name_resolver::NameRes; #[salsa::accumulator] -pub struct NameResolutionDiagAccumulator(NameResolutionDiag); +pub struct NameResolutionDiagAccumulator(pub(super) NameResolutionDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct NameResolutionDiag { - span: DynLazySpan, - kind: ImportErrorKind, + pub span: DynLazySpan, + pub kind: ImportErrorKind, } impl NameResolutionDiag { diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index abaeb83843..b47692e422 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -1,4 +1,6 @@ use hir::{ + analysis_pass::ModuleAnalysisPass, + diagnostics::DiagnosticVoucher, hir_def::{ scope_graph::ScopeId, FieldDefListId, FuncParamListId, GenericParamListId, IngotId, ItemKind, TopLevelMod, VariantDefListId, @@ -28,8 +30,35 @@ pub mod import_resolver; pub mod name_resolver; pub mod visibility_checker; +pub struct ImportAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ImportAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } + + pub fn resolve_imports(&self, ingot: IngotId) -> &'db ResolvedImports { + resolve_imports(self.db, ingot) + } +} + +impl<'db> ModuleAnalysisPass for ImportAnalysisPass<'db> { + fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { + let ingot = top_mod.ingot(self.db.as_hir_db()); + resolve_imports::accumulated::(self.db, ingot) + .into_iter() + .filter_map(|diag| { + (diag.span.top_mod(self.db.as_hir_db()) == Some(top_mod)) + .then(|| Box::new(diag) as _) + }) + .collect() + } +} + #[salsa::tracked(return_ref)] -pub fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImports { +pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImports { let resolver = import_resolver::ImportResolver::new(db, ingot); let (imports, diags) = resolver.resolve_imports(); for diag in diags { @@ -39,15 +68,6 @@ pub fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImport imports } -pub fn resolve_imports_with_diag( - db: &dyn HirAnalysisDb, - ingot: IngotId, -) -> (&ResolvedImports, Vec) { - let imports = resolve_imports(db, ingot); - let diagnostics = resolve_imports::accumulated::(db, ingot); - (imports, diagnostics) -} - /// Performs early path resolution in the given module and checks the conflict /// of the definitions. #[salsa::tracked(return_ref)] diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 1e2ca56d5d..6b3e9fc435 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -6,9 +6,9 @@ use std::path::Path; use dir_test::{dir_test, Fixture}; use fe_compiler_test_utils::snap_test; use fe_hir_analysis::name_resolution::{ - import_resolver::ResolvedImports, name_resolver::NameDerivation, resolve_imports_with_diag, + import_resolver::ResolvedImports, name_resolver::NameDerivation, ImportAnalysisPass, }; -use hir::hir_def::Use; +use hir::{analysis_pass::ModuleAnalysisPass, hir_def::Use}; use rustc_hash::FxHashMap; #[dir_test( @@ -21,9 +21,11 @@ fn test_standalone(fixture: Fixture<&str>) { let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); let (top_mod, mut prop_formatter) = db.new_stand_alone(file_name, fixture.content()); - let (resolved_imports, diags) = resolve_imports_with_diag(&db, top_mod.ingot(&db)); + let mut pass = ImportAnalysisPass::new(&db); + let resolved_imports = pass.resolve_imports(top_mod.ingot(&db)); + let diags = pass.run_on_module(top_mod); if !diags.is_empty() { - panic!("Failed to resolve imports: {:?}", diags); + panic!("Failed to resolve imports"); } let res = format_imports(&db, &mut prop_formatter, resolved_imports); diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs index 5c706d369a..c7914613db 100644 --- a/crates/hir/src/analysis_pass.rs +++ b/crates/hir/src/analysis_pass.rs @@ -5,12 +5,12 @@ use crate::{ /// All analysis passes that run analysis on the HIR function granularity should /// implement this trait. -pub trait FunctionPass { - fn run_on_func(&mut self, function: Func) -> Vec>; +pub trait FuncAnalysisPass { + fn run_on_func(&mut self, func: Func) -> Vec>; } /// All analysis passes that run analysis on the HIR top level module /// granularity should implement this trait. -pub trait ModulePass { - fn run_on_module(&mut self, module: TopLevelMod) -> Vec>; +pub trait ModuleAnalysisPass { + fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec>; } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index cb7481a59b..b31f28067f 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,3 +1,4 @@ +use analysis_pass::ModuleAnalysisPass; use common::{InputDb, InputIngot}; use hir_def::{module_tree_impl, IdentId, TopLevelMod}; pub use lower::parse::ParserError; @@ -6,6 +7,7 @@ use lower::{ parse::{parse_file_impl, ParseErrorAccumulator}, scope_graph_impl, }; +use parser::GreenNode; pub mod analysis_pass; pub mod diagnostics; @@ -58,6 +60,33 @@ pub struct Jar( external_ingots_impl, ); +#[derive(Clone, Copy)] +pub struct ParsingPass<'db> { + db: &'db dyn HirDb, +} + +impl<'db> ParsingPass<'db> { + pub fn new(db: &'db dyn HirDb) -> Self { + Self { db } + } + + pub fn green_node(self, top_mod: TopLevelMod) -> GreenNode { + parse_file_impl(self.db, top_mod) + } +} + +impl<'db> ModuleAnalysisPass for ParsingPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod, + ) -> Vec> { + parse_file_impl::accumulated::(self.db, top_mod) + .into_iter() + .map(|d| Box::new(d) as _) + .collect() + } +} + /// Returns the root modules and names of external ingots that the given `ingot` /// depends on. /// From the outside of the crate, this functionality can be accessed via diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 18300715e7..43c3c570bf 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -3,7 +3,7 @@ use num_bigint::BigUint; use num_traits::Num; use parser::{ ast::{self, prelude::*}, - GreenNode, SyntaxNode, SyntaxToken, + SyntaxNode, SyntaxToken, }; use crate::{ @@ -11,14 +11,10 @@ use crate::{ module_tree_impl, scope_graph::ScopeGraph, IdentId, IngotId, IntegerId, ItemKind, LitKind, ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, }, - HirDb, LowerHirDb, ParserError, + HirDb, LowerHirDb, }; -use self::{ - item::lower_module_items, - parse::{parse_file_impl, ParseErrorAccumulator}, - scope_builder::ScopeGraphBuilder, -}; +use self::{item::lower_module_items, parse::parse_file_impl, scope_builder::ScopeGraphBuilder}; pub(crate) mod parse; @@ -48,24 +44,6 @@ pub fn scope_graph(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ScopeGraph { scope_graph_impl(db.as_hir_db(), top_mod) } -/// Returns the root node of the given top-level module. -/// This function also returns the diagnostics produced by parsing the file. -pub fn parse_file_with_diag( - db: &dyn LowerHirDb, - top_mod: TopLevelMod, -) -> (GreenNode, Vec) { - ( - parse_file_impl(db.as_hir_db(), top_mod), - parse_file_impl::accumulated::(db.as_hir_db(), top_mod), - ) -} - -/// Returns the root node of the given top-level module. -/// If diagnostics are needed, use [`parse_file_with_diag`] instead. -pub fn parse_file(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> GreenNode { - parse_file_impl(db.as_hir_db(), top_mod) -} - /// Returns the ingot module tree of the given ingot. pub fn module_tree(db: &dyn LowerHirDb, ingot: InputIngot) -> &ModuleTree { module_tree_impl(db.as_hir_db(), ingot) From 3d5b1b1f15a2403839ba6712acb20cb53fca9620 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 27 Jun 2023 12:19:37 +0200 Subject: [PATCH 188/678] Add simple analysis pass manager --- crates/hir/src/analysis_pass.rs | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs index c7914613db..454c6f9644 100644 --- a/crates/hir/src/analysis_pass.rs +++ b/crates/hir/src/analysis_pass.rs @@ -14,3 +14,41 @@ pub trait FuncAnalysisPass { pub trait ModuleAnalysisPass { fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec>; } + +pub struct AnalysisPassManager<'db> { + module_passes: Vec>, + func_passes: Vec>, +} + +impl<'db> AnalysisPassManager<'db> { + pub fn new() -> Self { + Self { + module_passes: vec![], + func_passes: vec![], + } + } + + pub fn add_module_pass(&mut self, pass: Box) { + self.module_passes.push(pass); + } + + pub fn add_func_pass(&mut self, pass: Box) { + self.func_passes.push(pass); + } + + pub fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { + let mut diags = vec![]; + for pass in self.module_passes.iter_mut() { + diags.extend(pass.run_on_module(top_mod.clone())); + } + diags + } + + pub fn run_on_func(&mut self, func: Func) -> Vec> { + let mut diags = vec![]; + for pass in self.func_passes.iter_mut() { + diags.extend(pass.run_on_func(func.clone())); + } + diags + } +} From 41e3aa860c9be1c8526bf5b50d60db77dd058e2c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 27 Jun 2023 12:55:16 +0200 Subject: [PATCH 189/678] Implement driver for standalone usage --- crates/driver2/Cargo.toml | 4 +- crates/driver2/src/diagnostics.rs | 8 +- crates/driver2/src/lib.rs | 94 +++++++++++++++++-- .../src/name_resolution/diagnostics.rs | 52 +++++----- .../src/name_resolution/import_resolver.rs | 14 +-- .../hir-analysis/src/name_resolution/mod.rs | 38 +++++++- crates/hir/src/diagnostics.rs | 18 +++- crates/hir/src/lower/parse.rs | 4 +- 8 files changed, 176 insertions(+), 56 deletions(-) diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml index 95b8c86d04..e7c87c4c9f 100644 --- a/crates/driver2/Cargo.toml +++ b/crates/driver2/Cargo.toml @@ -1,11 +1,11 @@ [package] -name = "driver2" +name = "fe-driver2" version = "0.20.0-alpha" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" repository = "https://github.com/ethereum/fe" -description = "Provides HIR semantic analysis for Fe lang" +description = "Provides Fe driver" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index 89ede6326b..660d2ab0c3 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -12,15 +12,15 @@ use hir::diagnostics::DiagnosticVoucher; use crate::{DriverDataBase, DriverDb}; -pub trait IntoCsrDiag { - fn into_csr(self, db: &DriverDataBase) -> cs_diag::Diagnostic; +pub trait IntoCsDiag { + fn into_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic; } -impl IntoCsrDiag for T +impl IntoCsDiag for T where T: DiagnosticVoucher, { - fn into_csr(self, db: &DriverDataBase) -> cs_diag::Diagnostic { + fn into_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); let severity = convert_severity(complete.severity); diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 93452a6b78..07f1b1c179 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -1,8 +1,25 @@ pub mod diagnostics; -use common::InputDb; -use hir::{HirDb, LowerHirDb, SpannedHirDb}; -use hir_analysis::HirAnalysisDb; +use std::{collections::BTreeSet, path}; + +use codespan_reporting::term::{ + self, + termcolor::{BufferWriter, ColorChoice}, +}; +use common::{ + input::{IngotKind, Version}, + InputDb, InputFile, InputIngot, +}; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, lower::map_file_to_mod, + HirDb, LowerHirDb, ParsingPass, SpannedHirDb, +}; +use hir_analysis::{ + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass}, + HirAnalysisDb, +}; + +use crate::diagnostics::IntoCsDiag; #[salsa::jar(db = DriverDb)] pub struct Jar(diagnostics::file_line_starts); @@ -20,23 +37,88 @@ impl DriverDb for DB where #[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] pub struct DriverDataBase { storage: salsa::Storage, + diags: Vec>, +} + +impl DriverDataBase { + // TODO: An temporary implementation for ui testing. + pub fn run_on_file(&mut self, file_path: &path::Path) { + let kind = IngotKind::StandAlone; + // We set the ingot version to 0.0.0 for stand-alone file. + let version = Version::new(0, 0, 0); + let root_file = file_path; + let ingot = InputIngot::new( + self, + file_path.parent().unwrap().as_os_str().to_str().unwrap(), + kind, + version, + BTreeSet::new(), + ); + + let file_name = root_file.file_name().unwrap().to_str().unwrap(); + let file_content = std::fs::read_to_string(root_file).unwrap(); + let file = InputFile::new(self, ingot, file_name.into(), file_content); + + ingot.set_root_file(self, file); + ingot.set_files(self, [file].into()); + + let top_mod = map_file_to_mod(self, file); + + self.diags = { + let mut pass_manager = initialize_analysis_pass(self); + pass_manager.run_on_module(top_mod) + }; + } + + /// Prints accumulated diagnostics to stderr. + pub fn emit_diags(&self) { + let writer = BufferWriter::stderr(ColorChoice::Auto); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in &self.diags { + term::emit(&mut buffer, &config, self, &diag.into_cs(self)).unwrap(); + } + + eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); + } + + /// Format the accumulated diagnostics to a string. + pub fn format_diags(&self) -> String { + let writer = BufferWriter::stderr(ColorChoice::Never); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in &self.diags { + term::emit(&mut buffer, &config, self, &diag.into_cs(self)) + .expect("failed to emit diagnostic"); + } + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() + } } impl HirDb for DriverDataBase {} impl SpannedHirDb for DriverDataBase {} impl LowerHirDb for DriverDataBase {} impl salsa::Database for DriverDataBase { - fn salsa_event(&self, _: salsa::Event) { - // TODO: logger. - } + fn salsa_event(&self, _: salsa::Event) {} } impl Default for DriverDataBase { fn default() -> Self { let db = Self { storage: Default::default(), + diags: Vec::new(), }; db.prefill(); db } } + +fn initialize_analysis_pass<'db>(db: &'db DriverDataBase) -> AnalysisPassManager<'db> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 74fe207e65..9ef886e0c7 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -13,54 +13,54 @@ use crate::HirAnalysisDb; use super::name_resolver::NameRes; #[salsa::accumulator] -pub struct NameResolutionDiagAccumulator(pub(super) NameResolutionDiag); +pub struct NameResolutionDiagAccumulator(pub(super) NameResDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct NameResolutionDiag { +pub struct NameResDiag { pub span: DynLazySpan, - pub kind: ImportErrorKind, + pub kind: NameResErrorKind, } -impl NameResolutionDiag { - pub fn new(span: DynLazySpan, kind: ImportErrorKind) -> Self { +impl NameResDiag { + pub fn new(span: DynLazySpan, kind: NameResErrorKind) -> Self { Self { span, kind } } pub fn conflict(span: DynLazySpan, name: IdentId, conflict_with: DynLazySpan) -> Self { - Self::new(span, ImportErrorKind::Conflict(name, conflict_with)) + Self::new(span, NameResErrorKind::Conflict(name, conflict_with)) } pub fn not_found(span: DynLazySpan, ident: IdentId) -> Self { - Self::new(span, ImportErrorKind::NotFound(ident)) + Self::new(span, NameResErrorKind::NotFound(ident)) } pub fn invisible(db: &dyn HirAnalysisDb, span: DynLazySpan, resolved: NameRes) -> Self { let name = resolved.kind.name(db).unwrap(); let name_span = resolved.kind.name_span(db); - Self::new(span, ImportErrorKind::Invisible(name, name_span)) + Self::new(span, NameResErrorKind::Invisible(name, name_span)) } pub fn ambiguous(span: DynLazySpan, ident: IdentId, candidates: Vec) -> Self { - Self::new(span, ImportErrorKind::Ambiguous(ident, candidates)) + Self::new(span, NameResErrorKind::Ambiguous(ident, candidates)) } } -impl DiagnosticVoucher for NameResolutionDiag { +impl DiagnosticVoucher for NameResDiag { fn error_code(&self) -> GlobalErrorCode { GlobalErrorCode::new(AnalysisPass::NameResolution, self.kind.local_code()) } - fn to_complete(self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { let error_code = self.error_code(); let message = self.kind.message(db.as_hir_db()); - let sub_diags = self.kind.sub_diagnostics(db, self.span); + let sub_diags = self.kind.sub_diagnostics(db, self.span.clone()); CompleteDiagnostic::new(self.kind.severity(), message, sub_diags, vec![], error_code) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ImportErrorKind { +pub enum NameResErrorKind { /// The import conflicts with another import. Conflict(IdentId, DynLazySpan), /// The import path segment is not found. @@ -73,13 +73,13 @@ pub enum ImportErrorKind { Ambiguous(IdentId, Vec), } -impl ImportErrorKind { +impl NameResErrorKind { fn local_code(&self) -> u16 { match self { - ImportErrorKind::Conflict(..) => 0, - ImportErrorKind::NotFound(_) => 1, - ImportErrorKind::Invisible(..) => 2, - ImportErrorKind::Ambiguous(..) => 3, + NameResErrorKind::Conflict(..) => 0, + NameResErrorKind::NotFound(_) => 1, + NameResErrorKind::Invisible(..) => 2, + NameResErrorKind::Ambiguous(..) => 3, } } @@ -89,14 +89,14 @@ impl ImportErrorKind { fn message(&self, db: &dyn HirDb) -> String { match self { - ImportErrorKind::Conflict(name, _) => { + NameResErrorKind::Conflict(name, _) => { format!("{} conflicts with other definitions", name.data(db)) } - ImportErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), - ImportErrorKind::Invisible(name, _) => { + NameResErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), + NameResErrorKind::Invisible(name, _) => { format!("{} is not visible", name.data(db),) } - ImportErrorKind::Ambiguous(name, _) => format!("{} is ambiguous", name.data(db)), + NameResErrorKind::Ambiguous(name, _) => format!("{} is ambiguous", name.data(db)), } } @@ -106,7 +106,7 @@ impl ImportErrorKind { prim_span: DynLazySpan, ) -> Vec { match self { - ImportErrorKind::Conflict(ident, conflict_with) => { + NameResErrorKind::Conflict(ident, conflict_with) => { let ident = ident.data(db.as_hir_db()); vec![ SubDiagnostic::new( @@ -122,7 +122,7 @@ impl ImportErrorKind { ] } - ImportErrorKind::NotFound(ident) => { + NameResErrorKind::NotFound(ident) => { let ident = ident.data(db.as_hir_db()); vec![SubDiagnostic::new( LabelStyle::Primary, @@ -131,7 +131,7 @@ impl ImportErrorKind { )] } - ImportErrorKind::Invisible(ident, span) => { + NameResErrorKind::Invisible(ident, span) => { let ident = ident.data(db.as_hir_db()); let mut diags = vec![SubDiagnostic::new( @@ -149,7 +149,7 @@ impl ImportErrorKind { diags } - ImportErrorKind::Ambiguous(ident, candidates) => { + NameResErrorKind::Ambiguous(ident, candidates) => { let ident = ident.data(db.as_hir_db()); let mut diags = vec![SubDiagnostic::new( LabelStyle::Primary, diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 8c534a85c6..ab26a09a78 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -17,7 +17,7 @@ use crate::{ }; use super::{ - diagnostics::NameResolutionDiag, + diagnostics::NameResDiag, name_resolver::{ NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, NameResolutionError, NameResolutionResult, NameResolver, QueryDirective, @@ -37,7 +37,7 @@ pub(crate) struct ImportResolver<'db> { intermediate_uses: FxHashMap>, /// The errors that have been accumulated during the import resolution. - accumulated_errors: Vec, + accumulated_errors: Vec, /// The number of imported resolutions. /// This is used to judge if a import resolution doesn't change in each @@ -64,7 +64,7 @@ impl<'db> ImportResolver<'db> { } } - pub(crate) fn resolve_imports(mut self) -> (ResolvedImports, Vec) { + pub(crate) fn resolve_imports(mut self) -> (ResolvedImports, Vec) { self.initialize_i_uses(); let mut changed = true; @@ -516,7 +516,7 @@ impl<'db> ImportResolver<'db> { fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { match err { NameResolutionError::NotFound => { - self.accumulated_errors.push(NameResolutionDiag::not_found( + self.accumulated_errors.push(NameResDiag::not_found( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), )); @@ -528,7 +528,7 @@ impl<'db> ImportResolver<'db> { } NameResolutionError::Ambiguous(cands) => { - self.accumulated_errors.push(NameResolutionDiag::ambiguous( + self.accumulated_errors.push(NameResDiag::ambiguous( i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), cands @@ -878,7 +878,7 @@ impl IntermediateResolvedImports { db: &dyn HirAnalysisDb, i_use: &IntermediateUse, mut bind: NameBinding, - ) -> Result<(), NameResolutionDiag> { + ) -> Result<(), NameResDiag> { let scope = i_use.original_scope; bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); @@ -910,7 +910,7 @@ impl IntermediateResolvedImports { }; if i_use.use_ != use_ { - return Err(NameResolutionDiag::conflict( + return Err(NameResDiag::conflict( i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), imported_name, cand.derived_from(db).unwrap(), diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index b47692e422..161cb54a4c 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -18,7 +18,7 @@ use hir::{ use crate::HirAnalysisDb; use self::{ - diagnostics::NameResolutionDiagAccumulator, + diagnostics::{NameResErrorKind, NameResolutionDiagAccumulator}, import_resolver::{DefaultImporter, ResolvedImports}, name_resolver::{ NameDomain, NameQuery, NameResolutionError, QueryDirective, ResolvedQueryCacheStore, @@ -57,6 +57,31 @@ impl<'db> ModuleAnalysisPass for ImportAnalysisPass<'db> { } } +pub struct DefConflictAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> DefConflictAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass for DefConflictAnalysisPass<'db> { + fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { + let errors = + resolve_path_early::accumulated::(self.db, top_mod); + + // TODO: Impl collector. + errors + .into_iter() + .filter_map(|err| { + matches!(err.kind, NameResErrorKind::Conflict(..)).then(|| Box::new(err) as _) + }) + .collect() + } +} + #[salsa::tracked(return_ref)] pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> ResolvedImports { let resolver = import_resolver::ImportResolver::new(db, ingot); @@ -68,8 +93,11 @@ pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> Resolve imports } -/// Performs early path resolution in the given module and checks the conflict -/// of the definitions. +/// Performs early path resolution and cache the resolutions for paths appeared +/// in the given module. Also checks the conflict of the item definitions +/// +/// NOTE: This method doesn't check the conflict in impl blocks since it +/// requires ingot granularity analysis. #[salsa::tracked(return_ref)] #[allow(unused)] pub(crate) fn resolve_path_early( @@ -84,7 +112,7 @@ pub(crate) fn resolve_path_early( struct PathResolver<'db, 'a> { db: &'db dyn HirAnalysisDb, resolver: name_resolver::NameResolver<'db, 'a>, - diags: Vec, + diags: Vec, item_stack: Vec, } @@ -163,7 +191,7 @@ impl<'db, 'a> PathResolver<'db, 'a> { }) .unwrap(); - let diag = diagnostics::NameResolutionDiag::conflict( + let diag = diagnostics::NameResDiag::conflict( scope.name_span(self.db.as_hir_db()).unwrap(), scope.name(self.db.as_hir_db()).unwrap(), conflicted_span, diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index ce16b46b84..5845c33b28 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -22,8 +22,8 @@ use crate::SpannedHirDb; /// `[LazySpan]`(crate::span::LazySpan) and types that implement `LazySpan`. pub trait DiagnosticVoucher: Send { fn error_code(&self) -> GlobalErrorCode; - /// Consumes voucher and makes a [`CompleteDiagnostic`]. - fn to_complete(self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; + /// Makes a [`CompleteDiagnostic`]. + fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; } impl DiagnosticVoucher for CompleteDiagnostic { @@ -31,7 +31,17 @@ impl DiagnosticVoucher for CompleteDiagnostic { self.error_code.clone() } - fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { - self + fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + self.clone() + } +} + +impl DiagnosticVoucher for Box { + fn error_code(&self) -> GlobalErrorCode { + self.as_ref().error_code() + } + + fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic { + self.as_ref().to_complete(db) } } diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index bc8db2b517..c462fdd3b0 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -39,7 +39,7 @@ impl DiagnosticVoucher for ParserError { GlobalErrorCode::new(AnalysisPass::Parse, 0) } - fn to_complete(self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { let error_code = self.error_code(); let span = Span::new(self.file, self.error.range, SpanKind::Original); CompleteDiagnostic::new( @@ -47,7 +47,7 @@ impl DiagnosticVoucher for ParserError { self.error.msg.clone(), vec![SubDiagnostic::new( LabelStyle::Primary, - self.error.msg, + self.error.msg.clone(), Some(span), )], vec![], From 7bd3c6f387944eed80e11837cf60892ada73aeb3 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 27 Jun 2023 23:26:25 +0200 Subject: [PATCH 190/678] Add uitest for definition conflicts --- Cargo.lock | 24 ++- crates/common2/src/diagnostics.rs | 39 ++++- crates/driver2/src/diagnostics.rs | 8 +- crates/driver2/src/lib.rs | 20 ++- .../src/name_resolution/diagnostics.rs | 153 +++++++++--------- .../src/name_resolution/import_resolver.rs | 6 +- .../hir-analysis/src/name_resolution/mod.rs | 79 ++++----- .../src/name_resolution/name_resolver.rs | 19 ++- crates/hir/src/analysis_pass.rs | 2 +- crates/hir/src/hir_def/scope_graph.rs | 8 +- crates/hir/src/lower/parse.rs | 4 +- crates/uitest/Cargo.toml | 15 ++ crates/uitest/build.rs | 4 + .../fixtures/name_resolution/conflict.fe | 10 ++ .../fixtures/name_resolution/conflict.snap | 28 ++++ .../name_resolution/field_conflict.fe | 4 + .../name_resolution/field_conflict.snap | 14 ++ .../name_resolution/generic_param_conflict.fe | 4 + .../generic_param_conflict.snap | 14 ++ .../name_resolution/variant_conflict.fe | 5 + .../name_resolution/variant_conflict.snap | 15 ++ crates/uitest/src/lib.rs | 38 +++++ 22 files changed, 361 insertions(+), 152 deletions(-) create mode 100644 crates/uitest/Cargo.toml create mode 100644 crates/uitest/build.rs create mode 100644 crates/uitest/fixtures/name_resolution/conflict.fe create mode 100644 crates/uitest/fixtures/name_resolution/conflict.snap create mode 100644 crates/uitest/fixtures/name_resolution/field_conflict.fe create mode 100644 crates/uitest/fixtures/name_resolution/field_conflict.snap create mode 100644 crates/uitest/fixtures/name_resolution/generic_param_conflict.fe create mode 100644 crates/uitest/fixtures/name_resolution/generic_param_conflict.snap create mode 100644 crates/uitest/fixtures/name_resolution/variant_conflict.fe create mode 100644 crates/uitest/fixtures/name_resolution/variant_conflict.snap create mode 100644 crates/uitest/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 9a61ec3367..9cf7228ba3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -561,13 +561,10 @@ checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" name = "driver2" version = "0.20.0-alpha" dependencies = [ - "camino", - "codespan-reporting", - "fe-common2", - "fe-hir", - "fe-hir-analysis", - "fe-macros", - "salsa-2022", + "dir-test", + "fe-compiler-test-utils", + "fe-driver2", + "wasm-bindgen-test", ] [[package]] @@ -915,6 +912,19 @@ dependencies = [ "vfs", ] +[[package]] +name = "fe-driver2" +version = "0.20.0-alpha" +dependencies = [ + "camino", + "codespan-reporting", + "fe-common2", + "fe-hir", + "fe-hir-analysis", + "fe-macros", + "salsa-2022", +] + [[package]] name = "fe-hir" version = "0.22.0" diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index be3ce3754e..042a38c5fb 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -29,16 +29,26 @@ impl CompleteDiagnostic { error_code, } } + + pub fn primary_span(&self) -> Span { + self.sub_diagnostics + .iter() + .find_map(|sub| match sub.style { + LabelStyle::Primary => Some(sub.span.clone().unwrap()), + _ => None, + }) + .unwrap() + } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct GlobalErrorCode { - pub pass: AnalysisPass, + pub pass: DiagnosticPass, pub local_code: u16, } impl GlobalErrorCode { - pub fn new(pass: AnalysisPass, local_code: u16) -> Self { + pub fn new(pass: DiagnosticPass, local_code: u16) -> Self { Self { pass, local_code } } } @@ -79,6 +89,21 @@ pub struct Span { pub kind: SpanKind, } +impl PartialOrd for Span { + fn partial_cmp(&self, other: &Self) -> Option { + match self.file.cmp(&other.file) { + std::cmp::Ordering::Equal => self.range.start().partial_cmp(&other.range.start()), + ord => return Some(ord), + } + } +} + +impl Ord for Span { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.partial_cmp(other).unwrap() + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum SpanKind { /// A node corresponding is originally written in the source code. @@ -113,8 +138,8 @@ pub enum Severity { Note, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum AnalysisPass { +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum DiagnosticPass { Parse, NameResolution, @@ -124,7 +149,7 @@ pub enum AnalysisPass { ExternalAnalysis(ExternalAnalysisKey), } -impl AnalysisPass { +impl DiagnosticPass { pub fn code(&self) -> u16 { match self { Self::Parse => 1, @@ -137,7 +162,7 @@ impl AnalysisPass { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ExternalAnalysisKey { name: String, } diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index 660d2ab0c3..30db202383 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -70,15 +70,15 @@ pub fn file_line_starts(db: &dyn DriverDb, file: InputFile) -> Vec { impl<'a> cs_files::Files<'a> for DriverDataBase { type FileId = InputFile; - type Name = &'a str; - type Source = &'a Utf8Path; + type Name = &'a Utf8Path; + type Source = &'a str; fn name(&'a self, file_id: Self::FileId) -> Result { - Ok(file_id.text(self)) + Ok(file_id.path(self).as_path()) } fn source(&'a self, file_id: Self::FileId) -> Result { - Ok(file_id.path(self).as_path()) + Ok(file_id.text(self)) } fn line_index( diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 07f1b1c179..ddca62f782 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -7,6 +7,7 @@ use codespan_reporting::term::{ termcolor::{BufferWriter, ColorChoice}, }; use common::{ + diagnostics::CompleteDiagnostic, input::{IngotKind, Version}, InputDb, InputFile, InputIngot, }; @@ -43,6 +44,8 @@ pub struct DriverDataBase { impl DriverDataBase { // TODO: An temporary implementation for ui testing. pub fn run_on_file(&mut self, file_path: &path::Path) { + self.diags.clear(); + let kind = IngotKind::StandAlone; // We set the ingot version to 0.0.0 for stand-alone file. let version = Version::new(0, 0, 0); @@ -76,7 +79,7 @@ impl DriverDataBase { let mut buffer = writer.buffer(); let config = term::Config::default(); - for diag in &self.diags { + for diag in self.finalize_diags() { term::emit(&mut buffer, &config, self, &diag.into_cs(self)).unwrap(); } @@ -89,12 +92,21 @@ impl DriverDataBase { let mut buffer = writer.buffer(); let config = term::Config::default(); - for diag in &self.diags { - term::emit(&mut buffer, &config, self, &diag.into_cs(self)) - .expect("failed to emit diagnostic"); + for diag in self.finalize_diags() { + term::emit(&mut buffer, &config, self, &diag.into_cs(self)).unwrap(); } + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() } + + fn finalize_diags(&self) -> Vec { + let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + diags + } } impl HirDb for DriverDataBase {} diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 9ef886e0c7..1fdd0a902c 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -1,9 +1,9 @@ use common::diagnostics::{ - AnalysisPass, CompleteDiagnostic, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, }; use hir::{ diagnostics::DiagnosticVoucher, - hir_def::IdentId, + hir_def::{IdentId, TopLevelMod}, span::{DynLazySpan, LazySpan}, HirDb, }; @@ -16,70 +16,59 @@ use super::name_resolver::NameRes; pub struct NameResolutionDiagAccumulator(pub(super) NameResDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct NameResDiag { - pub span: DynLazySpan, - pub kind: NameResErrorKind, +pub enum NameResDiag { + /// The definition conflicts with other definitions. + Conflict(IdentId, Vec), + + /// The name is not found. + NotFound(DynLazySpan, IdentId), + + /// The resolved name is not visible. + Invisible(DynLazySpan, IdentId, Option), + + /// The resolved name is ambiguous. + Ambiguous(DynLazySpan, IdentId, Vec), } impl NameResDiag { - pub fn new(span: DynLazySpan, kind: NameResErrorKind) -> Self { - Self { span, kind } - } - - pub fn conflict(span: DynLazySpan, name: IdentId, conflict_with: DynLazySpan) -> Self { - Self::new(span, NameResErrorKind::Conflict(name, conflict_with)) + pub fn conflict(name: IdentId, conflict_with: Vec) -> Self { + Self::Conflict(name, conflict_with) } pub fn not_found(span: DynLazySpan, ident: IdentId) -> Self { - Self::new(span, NameResErrorKind::NotFound(ident)) + Self::NotFound(span, ident) } pub fn invisible(db: &dyn HirAnalysisDb, span: DynLazySpan, resolved: NameRes) -> Self { let name = resolved.kind.name(db).unwrap(); let name_span = resolved.kind.name_span(db); - Self::new(span, NameResErrorKind::Invisible(name, name_span)) + Self::Invisible(span, name, name_span) } pub fn ambiguous(span: DynLazySpan, ident: IdentId, candidates: Vec) -> Self { - Self::new(span, NameResErrorKind::Ambiguous(ident, candidates)) - } -} - -impl DiagnosticVoucher for NameResDiag { - fn error_code(&self) -> GlobalErrorCode { - GlobalErrorCode::new(AnalysisPass::NameResolution, self.kind.local_code()) + Self::Ambiguous(span, ident, candidates) } - fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { - let error_code = self.error_code(); - let message = self.kind.message(db.as_hir_db()); - let sub_diags = self.kind.sub_diagnostics(db, self.span.clone()); - - CompleteDiagnostic::new(self.kind.severity(), message, sub_diags, vec![], error_code) + // Returns the top-level module where the diagnostic is located. + pub fn top_mod(&self, db: &dyn HirAnalysisDb) -> TopLevelMod { + match self { + Self::Conflict(_, conflicts) => conflicts + .iter() + .filter_map(|span| span.top_mod(db.as_hir_db())) + .min() + .unwrap(), + Self::NotFound(span, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::Invisible(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::Ambiguous(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + } } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum NameResErrorKind { - /// The import conflicts with another import. - Conflict(IdentId, DynLazySpan), - /// The import path segment is not found. - NotFound(IdentId), - - /// The import path segment is not visible. - Invisible(IdentId, Option), - /// The import path segment is ambiguous. - Ambiguous(IdentId, Vec), -} - -impl NameResErrorKind { fn local_code(&self) -> u16 { match self { - NameResErrorKind::Conflict(..) => 0, - NameResErrorKind::NotFound(_) => 1, - NameResErrorKind::Invisible(..) => 2, - NameResErrorKind::Ambiguous(..) => 3, + Self::Conflict(..) => 1, + Self::NotFound(..) => 2, + Self::Invisible(..) => 3, + Self::Ambiguous(..) => 4, } } @@ -89,40 +78,46 @@ impl NameResErrorKind { fn message(&self, db: &dyn HirDb) -> String { match self { - NameResErrorKind::Conflict(name, _) => { - format!("{} conflicts with other definitions", name.data(db)) + Self::Conflict(name, _) => { + format!("`{}` conflicts with other definitions", name.data(db)) } - NameResErrorKind::NotFound(name) => format!("{} is not found", name.data(db)), - NameResErrorKind::Invisible(name, _) => { - format!("{} is not visible", name.data(db),) + Self::NotFound(_, name) => format!("`{}` is not found", name.data(db)), + Self::Invisible(_, name, _) => { + format!("`{}` is not visible", name.data(db),) } - NameResErrorKind::Ambiguous(name, _) => format!("{} is ambiguous", name.data(db)), + Self::Ambiguous(_, name, _) => format!("`{}` is ambiguous", name.data(db)), } } - fn sub_diagnostics( - &self, - db: &dyn hir::SpannedHirDb, - prim_span: DynLazySpan, - ) -> Vec { + fn sub_diagnostics(&self, db: &dyn hir::SpannedHirDb) -> Vec { match self { - NameResErrorKind::Conflict(ident, conflict_with) => { + Self::Conflict(ident, conflicts) => { let ident = ident.data(db.as_hir_db()); - vec![ - SubDiagnostic::new( - LabelStyle::Primary, - format! {"`{ident}` conflicts with another definition"}, - prim_span.resolve(db), - ), - SubDiagnostic::new( + let mut diags = Vec::with_capacity(conflicts.len()); + let mut spans: Vec<_> = conflicts + .iter() + .filter_map(|span| span.resolve(db)) + .collect(); + spans.sort_unstable(); + let mut spans = spans.into_iter(); + + diags.push(SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is defined here"), + spans.next(), + )); + for sub_span in spans { + diags.push(SubDiagnostic::new( LabelStyle::Secondary, - format! {"{ident} redefined here "}, - conflict_with.resolve(db), - ), - ] + format! {"`{ident}` is redefined here"}, + Some(sub_span), + )); + } + + diags } - NameResErrorKind::NotFound(ident) => { + Self::NotFound(prim_span, ident) => { let ident = ident.data(db.as_hir_db()); vec![SubDiagnostic::new( LabelStyle::Primary, @@ -131,7 +126,7 @@ impl NameResErrorKind { )] } - NameResErrorKind::Invisible(ident, span) => { + Self::Invisible(prim_span, ident, span) => { let ident = ident.data(db.as_hir_db()); let mut diags = vec![SubDiagnostic::new( @@ -149,7 +144,7 @@ impl NameResErrorKind { diags } - NameResErrorKind::Ambiguous(ident, candidates) => { + Self::Ambiguous(prim_span, ident, candidates) => { let ident = ident.data(db.as_hir_db()); let mut diags = vec![SubDiagnostic::new( LabelStyle::Primary, @@ -169,3 +164,17 @@ impl NameResErrorKind { } } } + +impl DiagnosticVoucher for NameResDiag { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::NameResolution, self.local_code()) + } + + fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diagnostics(db); + + CompleteDiagnostic::new(self.severity(), message, sub_diags, vec![], error_code) + } +} diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index ab26a09a78..6eff42ed9b 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -911,9 +911,11 @@ impl IntermediateResolvedImports { if i_use.use_ != use_ { return Err(NameResDiag::conflict( - i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), imported_name, - cand.derived_from(db).unwrap(), + vec![ + i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + cand.derived_from(db).unwrap(), + ], )); } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 161cb54a4c..98423e9569 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -2,23 +2,21 @@ use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, hir_def::{ - scope_graph::ScopeId, FieldDefListId, FuncParamListId, GenericParamListId, IngotId, - ItemKind, TopLevelMod, VariantDefListId, - }, - span::{ - item::{LazyFieldDefListSpan, LazyItemSpan, LazyVariantDefListSpan}, - params::LazyFuncParamListSpan, + scope_graph::ScopeId, FieldDefListId, GenericParamListId, IngotId, ItemKind, TopLevelMod, + VariantDefListId, }, + span::item::{LazyFieldDefListSpan, LazyItemSpan, LazyVariantDefListSpan}, visitor::{ - walk_field_def_list, walk_func_param_list, walk_generic_param_list, walk_item, - walk_variant_def_list, Visitor, VisitorCtxt, + walk_field_def_list, walk_generic_param_list, walk_item, walk_variant_def_list, Visitor, + VisitorCtxt, }, }; +use rustc_hash::FxHashSet; use crate::HirAnalysisDb; use self::{ - diagnostics::{NameResErrorKind, NameResolutionDiagAccumulator}, + diagnostics::{NameResDiag, NameResolutionDiagAccumulator}, import_resolver::{DefaultImporter, ResolvedImports}, name_resolver::{ NameDomain, NameQuery, NameResolutionError, QueryDirective, ResolvedQueryCacheStore, @@ -49,10 +47,7 @@ impl<'db> ModuleAnalysisPass for ImportAnalysisPass<'db> { let ingot = top_mod.ingot(self.db.as_hir_db()); resolve_imports::accumulated::(self.db, ingot) .into_iter() - .filter_map(|diag| { - (diag.span.top_mod(self.db.as_hir_db()) == Some(top_mod)) - .then(|| Box::new(diag) as _) - }) + .filter_map(|diag| (diag.top_mod(self.db) == top_mod).then(|| Box::new(diag) as _)) .collect() } } @@ -75,9 +70,7 @@ impl<'db> ModuleAnalysisPass for DefConflictAnalysisPass<'db> { // TODO: Impl collector. errors .into_iter() - .filter_map(|err| { - matches!(err.kind, NameResErrorKind::Conflict(..)).then(|| Box::new(err) as _) - }) + .filter_map(|err| matches!(err, NameResDiag::Conflict(..)).then(|| Box::new(err) as _)) .collect() } } @@ -105,15 +98,24 @@ pub(crate) fn resolve_path_early( top_mod: TopLevelMod, ) -> ResolvedQueryCacheStore { let importer = DefaultImporter; - PathResolver::new(db, &importer).resolve_all(top_mod); - todo!() + let mut resolver = PathResolver::new(db, &importer); + resolver.resolve_all(top_mod); + + for diag in resolver.diags { + NameResolutionDiagAccumulator::push(db, diag); + } + resolver.inner.into_cache_store() } struct PathResolver<'db, 'a> { db: &'db dyn HirAnalysisDb, - resolver: name_resolver::NameResolver<'db, 'a>, + inner: name_resolver::NameResolver<'db, 'a>, diags: Vec, item_stack: Vec, + + /// The set of scopes that have already been conflicted to avoid duplicate + /// diagnostics. + already_conflicted: FxHashSet, } impl<'db, 'a> PathResolver<'db, 'a> { @@ -121,9 +123,10 @@ impl<'db, 'a> PathResolver<'db, 'a> { let resolver = name_resolver::NameResolver::new(db, importer); Self { db: db.as_hir_analysis_db(), - resolver, + inner: resolver, diags: Vec::new(), item_stack: Vec::new(), + already_conflicted: FxHashSet::default(), } } @@ -153,14 +156,6 @@ impl<'db, 'a> PathResolver<'db, 'a> { } } - fn check_func_param_conflict(&mut self, params: FuncParamListId) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..params.data(self.db.as_hir_db()).len() { - let scope = ScopeId::FuncParam(parent_item, i); - self.check_conflict(scope); - } - } - fn check_generic_param_conflict(&mut self, params: GenericParamListId) { let parent_item = *self.item_stack.last().unwrap(); for i in 0..params.data(self.db.as_hir_db()).len() { @@ -170,29 +165,30 @@ impl<'db, 'a> PathResolver<'db, 'a> { } fn check_conflict(&mut self, scope: ScopeId) { + if !self.already_conflicted.insert(scope) { + return; + } + let Some(query) = self.make_query_for_conflict_check(scope) else { return; }; let domain = NameDomain::from_scope(scope); - let binding = self.resolver.resolve_query(query); + let binding = self.inner.resolve_query(query); match binding.res_in_domain(domain) { Ok(_) => {} + Err(NameResolutionError::Ambiguous(cands)) => { let conflicted_span = cands .iter() - .find_map(|res| { + .filter_map(|res| { let conflicted_scope = res.scope()?; - if conflicted_scope == scope { - None - } else { - conflicted_scope.name_span(self.db.as_hir_db()) - } + self.already_conflicted.insert(conflicted_scope); + conflicted_scope.name_span(self.db.as_hir_db()) }) - .unwrap(); + .collect(); let diag = diagnostics::NameResDiag::conflict( - scope.name_span(self.db.as_hir_db()).unwrap(), scope.name(self.db.as_hir_db()).unwrap(), conflicted_span, ); @@ -247,13 +243,4 @@ impl<'db, 'a> Visitor for PathResolver<'db, 'a> { self.check_generic_param_conflict(params); walk_generic_param_list(self, ctxt, params); } - - fn visit_func_param_list( - &mut self, - ctxt: &mut VisitorCtxt<'_, LazyFuncParamListSpan>, - params: FuncParamListId, - ) { - self.check_func_param_conflict(params); - walk_func_param_list(self, ctxt, params) - } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 552f979577..d7597b2d10 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -51,6 +51,10 @@ impl<'db, 'a> NameResolver<'db, 'a> { cache_store, } } + + pub(super) fn into_cache_store(self) -> ResolvedQueryCacheStore { + self.cache_store + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -523,12 +527,18 @@ impl NameBinding { let domain = res.domain; match self.resolutions.entry(domain) { Entry::Occupied(mut e) => { - let old_res = match e.get() { + let old_res = match e.get_mut() { Ok(res) => res, Err(NameResolutionError::NotFound) => { e.insert(Ok(res.clone())).ok(); return; } + Err(NameResolutionError::Ambiguous(ambiguous_set)) => { + if ambiguous_set[0].derivation == res.derivation { + ambiguous_set.push(res.clone()); + } + return; + } Err(_) => { return; } @@ -538,10 +548,11 @@ impl NameBinding { match res.derivation.cmp(&old_derivation) { cmp::Ordering::Less => {} cmp::Ordering::Equal => { - if old_res.kind == res.kind { - } else { + if old_res.kind != res.kind { + let old_res_cloned = old_res.clone(); + let res = res.clone(); e.insert(Err(NameResolutionError::Ambiguous(vec![ - old_res.clone(), + old_res_cloned, res.clone(), ]))) .ok(); diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs index 454c6f9644..91e917b4b7 100644 --- a/crates/hir/src/analysis_pass.rs +++ b/crates/hir/src/analysis_pass.rs @@ -32,7 +32,7 @@ impl<'db> AnalysisPassManager<'db> { self.module_passes.push(pass); } - pub fn add_func_pass(&mut self, pass: Box) { + pub fn add_func_pass(&mut self, pass: Box) { self.func_passes.push(pass); } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 186adcf242..e957124c03 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -26,9 +26,11 @@ impl ScopeGraph { /// Returns the direct child items of the scope. pub fn child_items(&self, scope: ScopeId) -> impl Iterator + '_ { self.edges(scope).filter_map(|edge| match edge.kind { - EdgeKind::Lex(_) | EdgeKind::Super(_) | EdgeKind::Ingot(_) | EdgeKind::SelfTy(_) => { - None - } + EdgeKind::Lex(_) + | EdgeKind::Super(_) + | EdgeKind::Ingot(_) + | EdgeKind::SelfTy(_) + | EdgeKind::Self_(_) => None, _ => edge.dest.to_item(), }) diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index c462fdd3b0..c1e6fed2df 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -1,6 +1,6 @@ use common::{ diagnostics::{ - AnalysisPass, CompleteDiagnostic, GlobalErrorCode, LabelStyle, Severity, Span, SpanKind, + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, Span, SpanKind, SubDiagnostic, }, InputFile, @@ -36,7 +36,7 @@ pub struct ParserError { // information. impl DiagnosticVoucher for ParserError { fn error_code(&self) -> GlobalErrorCode { - GlobalErrorCode::new(AnalysisPass::Parse, 0) + GlobalErrorCode::new(DiagnosticPass::Parse, 1) } fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { diff --git a/crates/uitest/Cargo.toml b/crates/uitest/Cargo.toml new file mode 100644 index 0000000000..b23c5b1d2b --- /dev/null +++ b/crates/uitest/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "driver2" +version = "0.20.0-alpha" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides Fe driver" +publish = false + +[dependencies] +driver = { path = "../driver2", package = "fe-driver2" } +fe-compiler-test-utils = { path = "../test-utils" } +dir-test = "0.1" +wasm-bindgen-test = "0.3" diff --git a/crates/uitest/build.rs b/crates/uitest/build.rs new file mode 100644 index 0000000000..854eb71fab --- /dev/null +++ b/crates/uitest/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./fixtures"); +} diff --git a/crates/uitest/fixtures/name_resolution/conflict.fe b/crates/uitest/fixtures/name_resolution/conflict.fe new file mode 100644 index 0000000000..aacdd64a21 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict.fe @@ -0,0 +1,10 @@ +// Value domain. +pub fn Foo() {} +pub const Foo: i32 = 1 + +// Type domain. +pub enum Foo {} +pub struct Foo {} +mod Foo {} +enum Foo {} +type Foo = i32 \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict.snap b/crates/uitest/fixtures/name_resolution/conflict.snap new file mode 100644 index 0000000000..cc692ac138 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict.snap @@ -0,0 +1,28 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/conflict.fe +--- +error[2-0001]: `Foo` conflicts with other definitions + ┌─ conflict.fe:2:8 + │ +2 │ pub fn Foo() {} + │ ^^^ `Foo` is defined here +3 │ pub const Foo: i32 = 1 + │ --- `Foo` is redefined here + +error[2-0001]: `Foo` conflicts with other definitions + ┌─ conflict.fe:6:10 + │ + 6 │ pub enum Foo {} + │ ^^^ `Foo` is defined here + 7 │ pub struct Foo {} + │ --- `Foo` is redefined here + 8 │ mod Foo {} + │ --- `Foo` is redefined here + 9 │ enum Foo {} + │ --- `Foo` is redefined here +10 │ type Foo = i32 + │ --- `Foo` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/field_conflict.fe b/crates/uitest/fixtures/name_resolution/field_conflict.fe new file mode 100644 index 0000000000..d51f1da9d0 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/field_conflict.fe @@ -0,0 +1,4 @@ +pub struct MyS { + x: i32 + x: u32 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/field_conflict.snap b/crates/uitest/fixtures/name_resolution/field_conflict.snap new file mode 100644 index 0000000000..829e512972 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/field_conflict.snap @@ -0,0 +1,14 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/field_conflict.fe +--- +error[2-0001]: `x` conflicts with other definitions + ┌─ field_conflict.fe:2:5 + │ +2 │ x: i32 + │ ^ `x` is defined here +3 │ x: u32 + │ - `x` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/generic_param_conflict.fe b/crates/uitest/fixtures/name_resolution/generic_param_conflict.fe new file mode 100644 index 0000000000..218dc861ac --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/generic_param_conflict.fe @@ -0,0 +1,4 @@ +pub struct MyS { + x: T + y: U +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/generic_param_conflict.snap b/crates/uitest/fixtures/name_resolution/generic_param_conflict.snap new file mode 100644 index 0000000000..cc8a5f5825 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/generic_param_conflict.snap @@ -0,0 +1,14 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/generic_param_conflict.fe +--- +error[2-0001]: `T` conflicts with other definitions + ┌─ generic_param_conflict.fe:1:16 + │ +1 │ pub struct MyS { + │ ^ - `T` is redefined here + │ │ + │ `T` is defined here + + diff --git a/crates/uitest/fixtures/name_resolution/variant_conflict.fe b/crates/uitest/fixtures/name_resolution/variant_conflict.fe new file mode 100644 index 0000000000..864432edcc --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/variant_conflict.fe @@ -0,0 +1,5 @@ +pub enum MyE { + Var1 + Var2 + Var1 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/variant_conflict.snap b/crates/uitest/fixtures/name_resolution/variant_conflict.snap new file mode 100644 index 0000000000..97e706e6db --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/variant_conflict.snap @@ -0,0 +1,15 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/variant_conflict.fe +--- +error[2-0001]: `Var1` conflicts with other definitions + ┌─ variant_conflict.fe:2:5 + │ +2 │ Var1 + │ ^^^^ `Var1` is defined here +3 │ Var2 +4 │ Var1 + │ ---- `Var1` is redefined here + + diff --git a/crates/uitest/src/lib.rs b/crates/uitest/src/lib.rs new file mode 100644 index 0000000000..07da22bf9d --- /dev/null +++ b/crates/uitest/src/lib.rs @@ -0,0 +1,38 @@ +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use driver::DriverDataBase; +use fe_compiler_test_utils::snap_test; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", + glob: "*.fe" +)] +#[allow(unused)] +fn run_name_resolution(fixture: Fixture<&str>) { + run_name_resolution_impl(fixture); +} + +fn run_name_resolution_impl(fixture: Fixture<&str>) { + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + driver.run_on_file(path); + let diags = driver.format_diags(); + snap_test!(diags, fixture.path()); +} + +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", + glob: "*.fe", + postfix: "wasm" + )] + #[dir_test_attr(#[wasm_bindgen_test])] + fn run_name_resolution(fixture: Fixture<&str>) { + run_name_resolution_impl(fixture); + } +} From 065288b4ca99954f3a9f7a3c7d92f5eb269334ec Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 27 Jun 2023 23:35:07 +0200 Subject: [PATCH 191/678] Make clippy happy --- crates/common2/src/diagnostics.rs | 2 +- crates/driver2/src/diagnostics.rs | 8 ++++---- crates/driver2/src/lib.rs | 8 ++++---- .../hir-analysis/src/name_resolution/name_resolver.rs | 2 +- crates/hir/src/analysis_pass.rs | 10 ++++------ 5 files changed, 14 insertions(+), 16 deletions(-) diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 042a38c5fb..564fd70a61 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -93,7 +93,7 @@ impl PartialOrd for Span { fn partial_cmp(&self, other: &Self) -> Option { match self.file.cmp(&other.file) { std::cmp::Ordering::Equal => self.range.start().partial_cmp(&other.range.start()), - ord => return Some(ord), + ord => Some(ord), } } } diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index 30db202383..66f3e0e812 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -12,15 +12,15 @@ use hir::diagnostics::DiagnosticVoucher; use crate::{DriverDataBase, DriverDb}; -pub trait IntoCsDiag { - fn into_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic; +pub trait ToCsDiag { + fn to_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic; } -impl IntoCsDiag for T +impl ToCsDiag for T where T: DiagnosticVoucher, { - fn into_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic { + fn to_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); let severity = convert_severity(complete.severity); diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index ddca62f782..309b304b69 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -20,7 +20,7 @@ use hir_analysis::{ HirAnalysisDb, }; -use crate::diagnostics::IntoCsDiag; +use crate::diagnostics::ToCsDiag; #[salsa::jar(db = DriverDb)] pub struct Jar(diagnostics::file_line_starts); @@ -80,7 +80,7 @@ impl DriverDataBase { let config = term::Config::default(); for diag in self.finalize_diags() { - term::emit(&mut buffer, &config, self, &diag.into_cs(self)).unwrap(); + term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); } eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); @@ -93,7 +93,7 @@ impl DriverDataBase { let config = term::Config::default(); for diag in self.finalize_diags() { - term::emit(&mut buffer, &config, self, &diag.into_cs(self)).unwrap(); + term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); } std::str::from_utf8(buffer.as_slice()).unwrap().to_string() @@ -127,7 +127,7 @@ impl Default for DriverDataBase { } } -fn initialize_analysis_pass<'db>(db: &'db DriverDataBase) -> AnalysisPassManager<'db> { +fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { let mut pass_manager = AnalysisPassManager::new(); pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index d7597b2d10..1831828b62 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -553,7 +553,7 @@ impl NameBinding { let res = res.clone(); e.insert(Err(NameResolutionError::Ambiguous(vec![ old_res_cloned, - res.clone(), + res, ]))) .ok(); } diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs index 91e917b4b7..4f17df45dd 100644 --- a/crates/hir/src/analysis_pass.rs +++ b/crates/hir/src/analysis_pass.rs @@ -15,6 +15,7 @@ pub trait ModuleAnalysisPass { fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec>; } +#[derive(Default)] pub struct AnalysisPassManager<'db> { module_passes: Vec>, func_passes: Vec>, @@ -22,10 +23,7 @@ pub struct AnalysisPassManager<'db> { impl<'db> AnalysisPassManager<'db> { pub fn new() -> Self { - Self { - module_passes: vec![], - func_passes: vec![], - } + Self::default() } pub fn add_module_pass(&mut self, pass: Box) { @@ -39,7 +37,7 @@ impl<'db> AnalysisPassManager<'db> { pub fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { let mut diags = vec![]; for pass in self.module_passes.iter_mut() { - diags.extend(pass.run_on_module(top_mod.clone())); + diags.extend(pass.run_on_module(top_mod)); } diags } @@ -47,7 +45,7 @@ impl<'db> AnalysisPassManager<'db> { pub fn run_on_func(&mut self, func: Func) -> Vec> { let mut diags = vec![]; for pass in self.func_passes.iter_mut() { - diags.extend(pass.run_on_func(func.clone())); + diags.extend(pass.run_on_func(func)); } diags } From 2d4ebec1161f4a27989150bcbbad20640263d624 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 28 Jun 2023 00:07:31 +0200 Subject: [PATCH 192/678] Improve error message for private item import --- .../src/name_resolution/import_resolver.rs | 30 ++++++++++++++----- .../src/name_resolution/name_resolver.rs | 4 +-- crates/hir/src/hir_def/scope_graph.rs | 7 +++++ 3 files changed, 32 insertions(+), 9 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 6eff42ed9b..a98953eb21 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -411,21 +411,35 @@ impl<'db> ImportResolver<'db> { return true; } }; + let mut representative_invisible_res = None; binding.resolutions.retain(|_, res| { let Ok(res) = res else { return false; }; match res.scope() { - Some(scope) => is_scope_visible(self.db, i_use.original_scope, scope), + Some(scope) => { + if is_scope_visible(self.db, i_use.original_scope, scope) { + true + } else { + if scope.is_importable() { + representative_invisible_res.get_or_insert_with(|| res.clone()); + } + false + } + } None => true, } }); let n_res = binding.len(); let is_decidable = self.is_decidable(&i_use); - if n_res == 0 && is_decidable { - self.register_error(&i_use, NameResolutionError::NotFound); + let error = if let Some(invisible_res) = representative_invisible_res { + NameResolutionError::Invisible(invisible_res) + } else { + NameResolutionError::NotFound + }; + self.register_error(&i_use, error); return true; } @@ -538,10 +552,12 @@ impl<'db> ImportResolver<'db> { )); } - // `Invisible` is not expected to be returned from `resolve_query` since `NameResolver` - // doesn't care about visibility. - NameResolutionError::Invisible => { - unreachable!() + NameResolutionError::Invisible(name_res) => { + self.accumulated_errors.push(NameResDiag::invisible( + self.db, + i_use.current_segment_span(), + name_res, + )); } } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 1831828b62..b2d1972a42 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -781,7 +781,7 @@ pub enum NameResolutionError { Invalid, /// The name is found, but it's not visible from the reference site. - Invisible, + Invisible(NameRes), /// The name is found, but it's ambiguous. Ambiguous(Vec), @@ -794,7 +794,7 @@ impl fmt::Display for NameResolutionError { match self { NameResolutionError::NotFound => write!(f, "name not found"), NameResolutionError::Invalid => write!(f, "invalid name"), - NameResolutionError::Invisible => write!(f, "name is not visible"), + NameResolutionError::Invisible(_) => write!(f, "name is not visible"), NameResolutionError::Ambiguous(_) => write!(f, "name is ambiguous"), } } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index e957124c03..62818149bb 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -79,6 +79,13 @@ impl ScopeId { Self::Item(top_mod.into()) } + pub fn is_importable(self) -> bool { + match self { + ScopeId::GenericParam(..) | ScopeId::FuncParam(..) | ScopeId::Field(..) => false, + _ => true, + } + } + /// Returns the scope graph containing this scope. pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { self.top_mod(db).scope_graph(db) From e15f3cd7b4a3017b632986fef5ceccb862c2034d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 28 Jun 2023 00:07:46 +0200 Subject: [PATCH 193/678] Add uitest for import error --- crates/hir/src/hir_def/scope_graph.rs | 8 +++---- .../{field_conflict.fe => conflict_field.fe} | 0 ...ield_conflict.snap => conflict_field.snap} | 4 ++-- ...param_conflict.fe => conflict_generics.fe} | 0 ...m_conflict.snap => conflict_generics.snap} | 4 ++-- ...ariant_conflict.fe => conflict_variant.fe} | 0 ...nt_conflict.snap => conflict_variant.snap} | 4 ++-- .../name_resolution/import_conflict.fe | 10 ++++++++ .../name_resolution/import_conflict.snap | 22 +++++++++++++++++ .../fixtures/name_resolution/import_cycle.fe | 5 ++++ .../name_resolution/import_cycle.snap | 24 +++++++++++++++++++ .../name_resolution/import_invisible.fe | 5 ++++ .../name_resolution/import_invisible.snap | 15 ++++++++++++ .../name_resolution/import_missing.fe | 14 +++++++++++ .../name_resolution/import_missing.snap | 24 +++++++++++++++++++ 15 files changed, 129 insertions(+), 10 deletions(-) rename crates/uitest/fixtures/name_resolution/{field_conflict.fe => conflict_field.fe} (100%) rename crates/uitest/fixtures/name_resolution/{field_conflict.snap => conflict_field.snap} (68%) rename crates/uitest/fixtures/name_resolution/{generic_param_conflict.fe => conflict_generics.fe} (100%) rename crates/uitest/fixtures/name_resolution/{generic_param_conflict.snap => conflict_generics.snap} (70%) rename crates/uitest/fixtures/name_resolution/{variant_conflict.fe => conflict_variant.fe} (100%) rename crates/uitest/fixtures/name_resolution/{variant_conflict.snap => conflict_variant.snap} (70%) create mode 100644 crates/uitest/fixtures/name_resolution/import_conflict.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_conflict.snap create mode 100644 crates/uitest/fixtures/name_resolution/import_cycle.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_cycle.snap create mode 100644 crates/uitest/fixtures/name_resolution/import_invisible.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_invisible.snap create mode 100644 crates/uitest/fixtures/name_resolution/import_missing.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_missing.snap diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 62818149bb..f7b2966736 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -80,10 +80,10 @@ impl ScopeId { } pub fn is_importable(self) -> bool { - match self { - ScopeId::GenericParam(..) | ScopeId::FuncParam(..) | ScopeId::Field(..) => false, - _ => true, - } + !matches!( + self, + ScopeId::GenericParam(..) | ScopeId::FuncParam(..) | ScopeId::Field(..) + ) } /// Returns the scope graph containing this scope. diff --git a/crates/uitest/fixtures/name_resolution/field_conflict.fe b/crates/uitest/fixtures/name_resolution/conflict_field.fe similarity index 100% rename from crates/uitest/fixtures/name_resolution/field_conflict.fe rename to crates/uitest/fixtures/name_resolution/conflict_field.fe diff --git a/crates/uitest/fixtures/name_resolution/field_conflict.snap b/crates/uitest/fixtures/name_resolution/conflict_field.snap similarity index 68% rename from crates/uitest/fixtures/name_resolution/field_conflict.snap rename to crates/uitest/fixtures/name_resolution/conflict_field.snap index 829e512972..ef92860faf 100644 --- a/crates/uitest/fixtures/name_resolution/field_conflict.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_field.snap @@ -1,10 +1,10 @@ --- source: crates/uitest/src/lib.rs expression: diags -input_file: crates/uitest/fixtures/name_resolution/field_conflict.fe +input_file: crates/uitest/fixtures/name_resolution/conflict_field.fe --- error[2-0001]: `x` conflicts with other definitions - ┌─ field_conflict.fe:2:5 + ┌─ conflict_field.fe:2:5 │ 2 │ x: i32 │ ^ `x` is defined here diff --git a/crates/uitest/fixtures/name_resolution/generic_param_conflict.fe b/crates/uitest/fixtures/name_resolution/conflict_generics.fe similarity index 100% rename from crates/uitest/fixtures/name_resolution/generic_param_conflict.fe rename to crates/uitest/fixtures/name_resolution/conflict_generics.fe diff --git a/crates/uitest/fixtures/name_resolution/generic_param_conflict.snap b/crates/uitest/fixtures/name_resolution/conflict_generics.snap similarity index 70% rename from crates/uitest/fixtures/name_resolution/generic_param_conflict.snap rename to crates/uitest/fixtures/name_resolution/conflict_generics.snap index cc8a5f5825..c0cc9d2791 100644 --- a/crates/uitest/fixtures/name_resolution/generic_param_conflict.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.snap @@ -1,10 +1,10 @@ --- source: crates/uitest/src/lib.rs expression: diags -input_file: crates/uitest/fixtures/name_resolution/generic_param_conflict.fe +input_file: crates/uitest/fixtures/name_resolution/conflict_generics.fe --- error[2-0001]: `T` conflicts with other definitions - ┌─ generic_param_conflict.fe:1:16 + ┌─ conflict_generics.fe:1:16 │ 1 │ pub struct MyS { │ ^ - `T` is redefined here diff --git a/crates/uitest/fixtures/name_resolution/variant_conflict.fe b/crates/uitest/fixtures/name_resolution/conflict_variant.fe similarity index 100% rename from crates/uitest/fixtures/name_resolution/variant_conflict.fe rename to crates/uitest/fixtures/name_resolution/conflict_variant.fe diff --git a/crates/uitest/fixtures/name_resolution/variant_conflict.snap b/crates/uitest/fixtures/name_resolution/conflict_variant.snap similarity index 70% rename from crates/uitest/fixtures/name_resolution/variant_conflict.snap rename to crates/uitest/fixtures/name_resolution/conflict_variant.snap index 97e706e6db..6cfb7dd8b0 100644 --- a/crates/uitest/fixtures/name_resolution/variant_conflict.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_variant.snap @@ -1,10 +1,10 @@ --- source: crates/uitest/src/lib.rs expression: diags -input_file: crates/uitest/fixtures/name_resolution/variant_conflict.fe +input_file: crates/uitest/fixtures/name_resolution/conflict_variant.fe --- error[2-0001]: `Var1` conflicts with other definitions - ┌─ variant_conflict.fe:2:5 + ┌─ conflict_variant.fe:2:5 │ 2 │ Var1 │ ^^^^ `Var1` is defined here diff --git a/crates/uitest/fixtures/name_resolution/import_conflict.fe b/crates/uitest/fixtures/name_resolution/import_conflict.fe new file mode 100644 index 0000000000..d98c1fb101 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_conflict.fe @@ -0,0 +1,10 @@ +use foo1::S +use foo2::S + +pub mod foo1 { + pub struct S {} +} + +pub mod foo2 { + pub struct S {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_conflict.snap b/crates/uitest/fixtures/name_resolution/import_conflict.snap new file mode 100644 index 0000000000..8895a26b14 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_conflict.snap @@ -0,0 +1,22 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_conflict.fe +--- +error[2-0001]: `S` conflicts with other definitions + ┌─ import_conflict.fe:1:11 + │ +1 │ use foo1::S + │ ^ `S` is defined here +2 │ use foo2::S + │ - `S` is redefined here + +error[2-0001]: `S` conflicts with other definitions + ┌─ import_conflict.fe:1:11 + │ +1 │ use foo1::S + │ ^ `S` is defined here +2 │ use foo2::S + │ - `S` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.fe b/crates/uitest/fixtures/name_resolution/import_cycle.fe new file mode 100644 index 0000000000..c1f0a861bc --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_cycle.fe @@ -0,0 +1,5 @@ +use Foo as Bar +use Bar as Baz +use Baz as Foo + + diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.snap b/crates/uitest/fixtures/name_resolution/import_cycle.snap new file mode 100644 index 0000000000..9f0ae1e7c3 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_cycle.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_cycle.fe +--- +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:1:5 + │ +1 │ use Foo as Bar + │ ^^^ `Foo` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_cycle.fe:2:5 + │ +2 │ use Bar as Baz + │ ^^^ `Bar` is not found + +error[2-0002]: `Baz` is not found + ┌─ import_cycle.fe:3:5 + │ +3 │ use Baz as Foo + │ ^^^ `Baz` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.fe b/crates/uitest/fixtures/name_resolution/import_invisible.fe new file mode 100644 index 0000000000..e7b986e3cb --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_invisible.fe @@ -0,0 +1,5 @@ +use foo::Bar + +mod foo { + struct Bar {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.snap b/crates/uitest/fixtures/name_resolution/import_invisible.snap new file mode 100644 index 0000000000..6ecebaf192 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_invisible.snap @@ -0,0 +1,15 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_invisible.fe +--- +error[2-0003]: `Bar` is not visible + ┌─ import_invisible.fe:1:10 + │ +1 │ use foo::Bar + │ ^^^ `Bar` is not visible + · +4 │ struct Bar {} + │ --- `Bar is defined here + + diff --git a/crates/uitest/fixtures/name_resolution/import_missing.fe b/crates/uitest/fixtures/name_resolution/import_missing.fe new file mode 100644 index 0000000000..7ba3ff09c5 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_missing.fe @@ -0,0 +1,14 @@ +use foo::Bar + +use foo::{Foo, Bar} + +use foo::bar::Foo + + +mod foo { + pub struct Foo {} + + mod baz { + pub struct Baz {} + } +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_missing.snap b/crates/uitest/fixtures/name_resolution/import_missing.snap new file mode 100644 index 0000000000..03941e60b1 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_missing.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_missing.fe +--- +error[2-0002]: `Bar` is not found + ┌─ import_missing.fe:1:10 + │ +1 │ use foo::Bar + │ ^^^ `Bar` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_missing.fe:3:16 + │ +3 │ use foo::{Foo, Bar} + │ ^^^ `Bar` is not found + +error[2-0002]: `bar` is not found + ┌─ import_missing.fe:5:10 + │ +5 │ use foo::bar::Foo + │ ^^^ `bar` is not found + + From b0a2b59a1c8023d1009b47ff2eca2b63c87ab936 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 29 Jun 2023 14:29:49 +0200 Subject: [PATCH 194/678] Improve error message for ambigous import --- .../src/name_resolution/diagnostics.rs | 11 +++++-- .../src/name_resolution/import_resolver.rs | 25 ++++++++++++---- .../name_resolution/import_ambiguous.fe | 16 ++++++++++ .../name_resolution/import_ambiguous.snap | 30 +++++++++++++++++++ crates/uitest/src/lib.rs | 1 - 5 files changed, 74 insertions(+), 9 deletions(-) create mode 100644 crates/uitest/fixtures/name_resolution/import_ambiguous.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_ambiguous.snap diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 1fdd0a902c..4031f2a9d0 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -151,11 +151,16 @@ impl NameResDiag { format!("`{ident}` is ambiguous"), prim_span.resolve(db), )]; - diags.extend(candidates.iter().enumerate().map(|(i, span)| { + let mut cand_spans: Vec<_> = candidates + .iter() + .filter_map(|span| span.resolve(db)) + .collect(); + cand_spans.sort_unstable(); + diags.extend(cand_spans.into_iter().enumerate().map(|(i, span)| { SubDiagnostic::new( LabelStyle::Secondary, - format!("candidate #{i}"), - span.resolve(db), + format!("candidate `#{i}`"), + Some(span), ) })); diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index a98953eb21..1e59d44f10 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -334,7 +334,25 @@ impl<'db> ImportResolver<'db> { let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); let binding = resolver.resolve_query(query); + // Try to resolve the last segment of the use path. + // We need to check the all domains of the resolution and make an appropriate + // error by considering all domain's result. + // The following cases should be considered: + // - If the resolution is not found in all domains and the use path is + // decidable, we can report an error. + // - If one of the domains has an error other than `NotFound` or `Invalid`, we + // can report the error and stop the resolution immediately. if i_use.is_base_resolved(self.db) { + for err in binding.errors() { + if !matches!( + err, + NameResolutionError::NotFound | NameResolutionError::Invalid + ) { + self.register_error(i_use, err.clone()); + return None; + } + } + if binding.is_empty() { if self.is_decidable(i_use) { self.register_error(i_use, NameResolutionError::NotFound); @@ -343,17 +361,14 @@ impl<'db> ImportResolver<'db> { return Some(IUseResolution::Unchanged(i_use.clone())); } } - if self.is_decidable(i_use) { - for err in binding.errors() { - self.register_error(i_use, err.clone()); - } - } + for res in binding.iter() { if res.is_external(self.db, i_use) || res.is_derived_from_glob() { self.suspicious_imports.insert(i_use.use_); break; } } + return Some(IUseResolution::Full(binding)); } diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.fe b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe new file mode 100644 index 0000000000..38b1161766 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe @@ -0,0 +1,16 @@ +use foo::* +pub use S + +mod foo { + pub use inner1::* + pub use inner2::* + pub use S + + + pub mod inner1 { + struct S {} + } + mod inner2 { + struct S {} + } +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.snap b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap new file mode 100644 index 0000000000..de87ba263d --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_ambiguous.fe +--- +error[2-0004]: `S` is ambiguous + ┌─ import_ambiguous.fe:2:9 + │ + 2 │ pub use S + │ ^ `S` is ambiguous + · +11 │ struct S {} + │ - candidate `#0` + · +14 │ struct S {} + │ - candidate `#1` + +error[2-0004]: `S` is ambiguous + ┌─ import_ambiguous.fe:7:13 + │ + 7 │ pub use S + │ ^ `S` is ambiguous + · +11 │ struct S {} + │ - candidate `#0` + · +14 │ struct S {} + │ - candidate `#1` + + diff --git a/crates/uitest/src/lib.rs b/crates/uitest/src/lib.rs index 07da22bf9d..3ffd3f66e7 100644 --- a/crates/uitest/src/lib.rs +++ b/crates/uitest/src/lib.rs @@ -29,7 +29,6 @@ mod wasm { #[dir_test( dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", glob: "*.fe", - postfix: "wasm" )] #[dir_test_attr(#[wasm_bindgen_test])] fn run_name_resolution(fixture: Fixture<&str>) { From 9ea48dc28ae592aa0dfa4b590a1355922663705b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 29 Jun 2023 17:57:20 +0200 Subject: [PATCH 195/678] Allow uitest to run on wasm --- crates/driver2/src/lib.rs | 5 ++--- .../{src/lib.rs => tests/name_resolution.rs} | 21 ++++++++++--------- 2 files changed, 13 insertions(+), 13 deletions(-) rename crates/uitest/{src/lib.rs => tests/name_resolution.rs} (61%) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 309b304b69..ad20c6e200 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -43,7 +43,7 @@ pub struct DriverDataBase { impl DriverDataBase { // TODO: An temporary implementation for ui testing. - pub fn run_on_file(&mut self, file_path: &path::Path) { + pub fn run_on_file(&mut self, file_path: &path::Path, source: &str) { self.diags.clear(); let kind = IngotKind::StandAlone; @@ -59,8 +59,7 @@ impl DriverDataBase { ); let file_name = root_file.file_name().unwrap().to_str().unwrap(); - let file_content = std::fs::read_to_string(root_file).unwrap(); - let file = InputFile::new(self, ingot, file_name.into(), file_content); + let file = InputFile::new(self, ingot, file_name.into(), source.to_string()); ingot.set_root_file(self, file); ingot.set_files(self, [file].into()); diff --git a/crates/uitest/src/lib.rs b/crates/uitest/tests/name_resolution.rs similarity index 61% rename from crates/uitest/src/lib.rs rename to crates/uitest/tests/name_resolution.rs index 3ffd3f66e7..ed3039c395 100644 --- a/crates/uitest/src/lib.rs +++ b/crates/uitest/tests/name_resolution.rs @@ -8,15 +8,10 @@ use fe_compiler_test_utils::snap_test; dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", glob: "*.fe" )] -#[allow(unused)] fn run_name_resolution(fixture: Fixture<&str>) { - run_name_resolution_impl(fixture); -} - -fn run_name_resolution_impl(fixture: Fixture<&str>) { let mut driver = DriverDataBase::default(); let path = Path::new(fixture.path()); - driver.run_on_file(path); + driver.run_on_file(path, fixture.content()); let diags = driver.format_diags(); snap_test!(diags, fixture.path()); } @@ -27,11 +22,17 @@ mod wasm { use wasm_bindgen_test::wasm_bindgen_test; #[dir_test( - dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", - glob: "*.fe", + dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", + glob: "*.fe", + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] )] - #[dir_test_attr(#[wasm_bindgen_test])] fn run_name_resolution(fixture: Fixture<&str>) { - run_name_resolution_impl(fixture); + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + driver.run_on_file(path, fixture.content()); + let diags = driver.format_diags(); } } From d5057a1f71268bdbddc807d12674dcd45a4d8886 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 29 Jun 2023 21:39:09 +0200 Subject: [PATCH 196/678] Allow driver to run with custom pass manager --- crates/driver2/src/lib.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index ad20c6e200..a5af12f653 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -44,6 +44,17 @@ pub struct DriverDataBase { impl DriverDataBase { // TODO: An temporary implementation for ui testing. pub fn run_on_file(&mut self, file_path: &path::Path, source: &str) { + self.run_on_file_with_pass_manager(file_path, source, initialize_analysis_pass); + } + + pub fn run_on_file_with_pass_manager( + &mut self, + file_path: &path::Path, + source: &str, + pm_builder: F, + ) where + F: FnOnce(&DriverDataBase) -> AnalysisPassManager<'_>, + { self.diags.clear(); let kind = IngotKind::StandAlone; @@ -67,7 +78,7 @@ impl DriverDataBase { let top_mod = map_file_to_mod(self, file); self.diags = { - let mut pass_manager = initialize_analysis_pass(self); + let mut pass_manager = pm_builder(self); pass_manager.run_on_module(top_mod) }; } From 5d8f57cc48d7dc6664a8a1d5435ed8221975383c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 29 Jun 2023 23:13:54 +0200 Subject: [PATCH 197/678] Improve error message when use path contains invisible segment --- .../src/name_resolution/diagnostics.rs | 12 +- .../src/name_resolution/import_resolver.rs | 172 ++++++++---------- .../hir-analysis/src/name_resolution/mod.rs | 2 +- .../src/name_resolution/name_resolver.rs | 10 +- .../name_resolution/import_invisible.fe | 21 ++- .../name_resolution/import_invisible.snap | 24 ++- crates/uitest/tests/name_resolution.rs | 1 - 7 files changed, 130 insertions(+), 112 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 4031f2a9d0..34e2db78f5 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -10,8 +10,6 @@ use hir::{ use crate::HirAnalysisDb; -use super::name_resolver::NameRes; - #[salsa::accumulator] pub struct NameResolutionDiagAccumulator(pub(super) NameResDiag); @@ -39,10 +37,12 @@ impl NameResDiag { Self::NotFound(span, ident) } - pub fn invisible(db: &dyn HirAnalysisDb, span: DynLazySpan, resolved: NameRes) -> Self { - let name = resolved.kind.name(db).unwrap(); - let name_span = resolved.kind.name_span(db); - Self::Invisible(span, name, name_span) + pub fn invisible( + span: DynLazySpan, + name: IdentId, + invisible_span: Option, + ) -> Self { + Self::Invisible(span, name, invisible_span) } pub fn ambiguous(span: DynLazySpan, ident: IdentId, candidates: Vec) -> Self { diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 1e59d44f10..9bcf56779d 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -332,66 +332,88 @@ impl<'db> ImportResolver<'db> { }; let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); - let binding = resolver.resolve_query(query); - - // Try to resolve the last segment of the use path. - // We need to check the all domains of the resolution and make an appropriate - // error by considering all domain's result. - // The following cases should be considered: - // - If the resolution is not found in all domains and the use path is - // decidable, we can report an error. - // - If one of the domains has an error other than `NotFound` or `Invalid`, we - // can report the error and stop the resolution immediately. - if i_use.is_base_resolved(self.db) { - for err in binding.errors() { - if !matches!( - err, - NameResolutionError::NotFound | NameResolutionError::Invalid - ) { - self.register_error(i_use, err.clone()); - return None; - } - } + let mut binding = resolver.resolve_query(query); - if binding.is_empty() { - if self.is_decidable(i_use) { - self.register_error(i_use, NameResolutionError::NotFound); - return None; - } else { - return Some(IUseResolution::Unchanged(i_use.clone())); - } - } - - for res in binding.iter() { - if res.is_external(self.db, i_use) || res.is_derived_from_glob() { - self.suspicious_imports.insert(i_use.use_); - break; + // Filter out invisible resolutions. + let mut invisible_span = None; + binding.resolutions.retain(|_, res| { + let Ok(res) = res else { + return true; + }; + match res.scope() { + Some(scope) => { + if let NameDerivation::GlobImported(use_) + | NameDerivation::NamedImported(use_) = res.derivation + { + if !is_use_visible(self.db, i_use.original_scope, use_) { + invisible_span.get_or_insert_with(|| use_.lazy_span().into()); + false + } else { + true + } + } else if is_scope_visible(self.db, i_use.original_scope, scope) { + true + } else { + if scope.is_importable() { + if let Some(span) = res.kind.name_span(self.db) { + invisible_span.get_or_insert(span); + } + } + false + } } + None => true, } + }); - return Some(IUseResolution::Full(binding)); + // Filter out irrelevant resolutions if the segment is not the last one. + if !i_use.is_base_resolved(self.db) { + binding.filter_by_domain(NameDomain::Item); } - let res = match binding.res_in_domain(NameDomain::Item) { - Ok(res) => res, - Err(NameResolutionError::NotFound) if !self.is_decidable(i_use) => { - return Some(IUseResolution::Unchanged(i_use.clone())) - } - Err(err) => { + for err in binding.errors() { + if !matches!( + err, + NameResolutionError::NotFound | NameResolutionError::Invalid + ) { self.register_error(i_use, err.clone()); return None; } - }; + } + if binding.is_empty() { + if self.is_decidable(i_use) { + let err = if let Some(invisible_span) = invisible_span { + NameResolutionError::Invisible(invisible_span.into()) + } else { + NameResolutionError::NotFound + }; + self.register_error(i_use, err); + return None; + } else { + return Some(IUseResolution::Unchanged(i_use.clone())); + }; + } - if res.is_external(self.db, i_use) || res.is_derived_from_glob() { - self.suspicious_imports.insert(i_use.use_); + // If the resolution is derived from glob import or external crate, we have to + // insert the use into the `suspicious_imports` set to verify the ambiguity + // after the algorithm reaches the fixed point. + for res in binding.iter() { + if res.is_external(self.db, i_use) || res.is_derived_from_glob() { + self.suspicious_imports.insert(i_use.use_); + break; + } } - let next_i_use = i_use.proceed(res.clone()); - if next_i_use.is_base_resolved(self.db) { - Some(IUseResolution::BasePath(next_i_use)) + if i_use.is_base_resolved(self.db) { + Some(IUseResolution::Full(binding)) } else { - Some(IUseResolution::Partial(next_i_use)) + let res = binding.res_by_domain(NameDomain::Item).clone().unwrap(); + let next_i_use = i_use.proceed(res); + if next_i_use.is_base_resolved(self.db) { + Some(IUseResolution::BasePath(next_i_use)) + } else { + Some(IUseResolution::Partial(next_i_use)) + } } } @@ -414,7 +436,7 @@ impl<'db> ImportResolver<'db> { fn try_finalize_named_use(&mut self, i_use: IntermediateUse) -> bool { debug_assert!(i_use.is_base_resolved(self.db)); - let mut binding = match self.resolve_segment(&i_use) { + let binding = match self.resolve_segment(&i_use) { Some(IUseResolution::Full(binding)) => binding, Some(IUseResolution::Unchanged(_)) => { return false; @@ -426,38 +448,9 @@ impl<'db> ImportResolver<'db> { return true; } }; - let mut representative_invisible_res = None; - binding.resolutions.retain(|_, res| { - let Ok(res) = res else { - return false; - }; - match res.scope() { - Some(scope) => { - if is_scope_visible(self.db, i_use.original_scope, scope) { - true - } else { - if scope.is_importable() { - representative_invisible_res.get_or_insert_with(|| res.clone()); - } - false - } - } - None => true, - } - }); let n_res = binding.len(); let is_decidable = self.is_decidable(&i_use); - if n_res == 0 && is_decidable { - let error = if let Some(invisible_res) = representative_invisible_res { - NameResolutionError::Invisible(invisible_res) - } else { - NameResolutionError::NotFound - }; - self.register_error(&i_use, error); - return true; - } - if *self.num_imported_res.entry(i_use.use_).or_default() == n_res { return is_decidable; } @@ -567,11 +560,11 @@ impl<'db> ImportResolver<'db> { )); } - NameResolutionError::Invisible(name_res) => { + NameResolutionError::Invisible(invisible_span) => { self.accumulated_errors.push(NameResDiag::invisible( - self.db, i_use.current_segment_span(), - name_res, + i_use.current_segment_ident(self.db).unwrap(), + invisible_span, )); } } @@ -642,15 +635,10 @@ impl<'db> ImportResolver<'db> { }; if i_uses.is_empty() { - return ScopeState::Closed; - } - for i_use in i_uses { - if i_use.is_exported(self.db) { - return ScopeState::Open; - } + ScopeState::Closed + } else { + ScopeState::Open } - - ScopeState::Semi } /// Returns `true` if the next segment of the intermediate use is @@ -759,10 +747,6 @@ enum ScopeState { // The scope is open, meaning that the scope needs further resolution. Open, - /// The scope is partially resolved, meaning that the exports in the scope - /// is fully resolved but the imports are partially resolved. - Semi, - /// The scope is closed, meaning that the all imports in the scope is fully /// resolved. Closed, @@ -807,10 +791,6 @@ impl IntermediateUse { } } - fn is_exported(&self, db: &dyn HirAnalysisDb) -> bool { - self.use_.vis(db.as_hir_db()).is_pub() - } - fn is_glob(&self, db: &dyn HirAnalysisDb) -> bool { self.use_.is_glob(db.as_hir_db()) } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 98423e9569..a383338d83 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -175,7 +175,7 @@ impl<'db, 'a> PathResolver<'db, 'a> { let domain = NameDomain::from_scope(scope); let binding = self.inner.resolve_query(query); - match binding.res_in_domain(domain) { + match binding.res_by_domain(domain) { Ok(_) => {} Err(NameResolutionError::Ambiguous(cands)) => { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index b2d1972a42..a35c94d0b3 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -124,7 +124,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { }; let query = NameQuery::new(*ident, scope); let binding = self.resolve_query(query); - scope = match binding.res_in_domain(NameDomain::Item) { + scope = match binding.res_by_domain(NameDomain::Item) { Ok(res) => { if res.is_type(self.db) { return Ok(ResolvedPath::Partial { @@ -502,12 +502,16 @@ impl NameBinding { } /// Returns the resolution of the given `domain`. - pub fn res_in_domain(&self, domain: NameDomain) -> &NameResolutionResult { + pub fn res_by_domain(&self, domain: NameDomain) -> &NameResolutionResult { self.resolutions .get(&domain) .unwrap_or(&Err(NameResolutionError::NotFound)) } + pub fn filter_by_domain(&mut self, domain: NameDomain) { + self.resolutions.retain(|d, _| *d == domain); + } + /// Merge the `resolutions` into the set. If name conflict happens, the old /// resolution will be returned, otherwise `None` will be returned. pub(super) fn merge<'a>(&mut self, resolutions: impl Iterator) { @@ -781,7 +785,7 @@ pub enum NameResolutionError { Invalid, /// The name is found, but it's not visible from the reference site. - Invisible(NameRes), + Invisible(Option), /// The name is found, but it's ambiguous. Ambiguous(Vec), diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.fe b/crates/uitest/fixtures/name_resolution/import_invisible.fe index e7b986e3cb..1d96bf0883 100644 --- a/crates/uitest/fixtures/name_resolution/import_invisible.fe +++ b/crates/uitest/fixtures/name_resolution/import_invisible.fe @@ -1,5 +1,22 @@ use foo::Bar - mod foo { struct Bar {} -} \ No newline at end of file +} + +use foo2::Bar +mod foo2 { + use foo3::Bar + + mod foo3 { + pub struct Bar {} + } +} + +use foo3::foo4::Bar +mod foo3 { + mod foo4 { + pub struct Bar {} + } +} + + diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.snap b/crates/uitest/fixtures/name_resolution/import_invisible.snap index 6ecebaf192..4a289175bf 100644 --- a/crates/uitest/fixtures/name_resolution/import_invisible.snap +++ b/crates/uitest/fixtures/name_resolution/import_invisible.snap @@ -1,5 +1,5 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_invisible.fe --- @@ -8,8 +8,26 @@ error[2-0003]: `Bar` is not visible │ 1 │ use foo::Bar │ ^^^ `Bar` is not visible - · -4 │ struct Bar {} +2 │ mod foo { +3 │ struct Bar {} │ --- `Bar is defined here +error[2-0003]: `Bar` is not visible + ┌─ import_invisible.fe:6:11 + │ +6 │ use foo2::Bar + │ ^^^ `Bar` is not visible +7 │ mod foo2 { +8 │ use foo3::Bar + │ ------------- `Bar is defined here + +error[2-0003]: `foo4` is not visible + ┌─ import_invisible.fe:15:11 + │ +15 │ use foo3::foo4::Bar + │ ^^^^ `foo4` is not visible +16 │ mod foo3 { +17 │ mod foo4 { + │ ---- `foo4 is defined here + diff --git a/crates/uitest/tests/name_resolution.rs b/crates/uitest/tests/name_resolution.rs index ed3039c395..19d1ff0dce 100644 --- a/crates/uitest/tests/name_resolution.rs +++ b/crates/uitest/tests/name_resolution.rs @@ -33,6 +33,5 @@ mod wasm { let mut driver = DriverDataBase::default(); let path = Path::new(fixture.path()); driver.run_on_file(path, fixture.content()); - let diags = driver.format_diags(); } } From 0d72f63d496dea907d47cabffb41140c4667e794 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Jul 2023 18:47:15 +0200 Subject: [PATCH 198/678] Implement `EarlyPathResolver` --- .../src/name_resolution/import_resolver.rs | 4 +- .../hir-analysis/src/name_resolution/mod.rs | 23 +- .../src/name_resolution/name_resolver.rs | 821 ++++++++---------- .../src/name_resolution/path_resolver.rs | 233 +++++ .../src/name_resolution/visibility_checker.rs | 16 +- crates/hir-analysis/tests/import.rs | 4 +- crates/hir/src/hir_def/ident.rs | 1 + crates/hir/src/hir_def/path.rs | 3 +- crates/hir/src/hir_def/scope_graph.rs | 2 +- crates/hir/src/visitor.rs | 2 +- 10 files changed, 629 insertions(+), 480 deletions(-) create mode 100644 crates/hir-analysis/src/name_resolution/path_resolver.rs diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 9bcf56779d..be1fe323cc 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -12,7 +12,7 @@ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ - name_resolution::visibility_checker::{is_scope_visible, is_use_visible}, + name_resolution::visibility_checker::{is_scope_visible_from, is_use_visible}, HirAnalysisDb, }; @@ -351,7 +351,7 @@ impl<'db> ImportResolver<'db> { } else { true } - } else if is_scope_visible(self.db, i_use.original_scope, scope) { + } else if is_scope_visible_from(self.db, i_use.original_scope, scope) { true } else { if scope.is_importable() { diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index a383338d83..0ebfbe533d 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -1,3 +1,15 @@ +pub mod diagnostics; + +mod import_resolver; +mod name_resolver; +mod path_resolver; +mod visibility_checker; + +pub use import_resolver::ResolvedImports; +pub use name_resolver::{ + NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, QueryDirective, +}; + use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, @@ -17,17 +29,10 @@ use crate::HirAnalysisDb; use self::{ diagnostics::{NameResDiag, NameResolutionDiagAccumulator}, - import_resolver::{DefaultImporter, ResolvedImports}, - name_resolver::{ - NameDomain, NameQuery, NameResolutionError, QueryDirective, ResolvedQueryCacheStore, - }, + import_resolver::DefaultImporter, + name_resolver::{NameResolutionError, ResolvedQueryCacheStore}, }; -pub mod diagnostics; -pub mod import_resolver; -pub mod name_resolver; -pub mod visibility_checker; - pub struct ImportAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index a35c94d0b3..2793a619f2 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -12,7 +12,7 @@ use hir::{ AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, VariantEdge, }, - IdentId, ItemKind, Partial, Use, + IdentId, ItemKind, Use, }, span::DynLazySpan, }; @@ -22,367 +22,9 @@ use crate::HirAnalysisDb; use super::{ import_resolver::Importer, - visibility_checker::{is_scope_visible, is_use_visible}, + visibility_checker::{is_scope_visible_from, is_use_visible}, }; -pub struct NameResolver<'db, 'a> { - db: &'db dyn HirAnalysisDb, - importer: &'a dyn Importer, - cache_store: ResolvedQueryCacheStore, -} - -impl<'db, 'a> NameResolver<'db, 'a> { - pub(super) fn new(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { - Self { - db, - importer, - cache_store: Default::default(), - } - } - - pub(super) fn new_no_cache(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { - let cache_store = ResolvedQueryCacheStore { - no_cache: true, - ..Default::default() - }; - Self { - db, - importer, - cache_store, - } - } - - pub(super) fn into_cache_store(self) -> ResolvedQueryCacheStore { - self.cache_store - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ResolvedPath { - Full(NameBinding), - - /// The path is partially resolved; this means that the `resolved` is a type - /// and the following segments depend on type to resolve. - /// These unresolved parts are resolved in the later type inference and - /// trait solving phases. - Partial { - resolved: NameRes, - unresolved_from: usize, - }, -} - -impl ResolvedPath { - pub fn partial(resolved: NameRes, unresolved_from: usize) -> Self { - Self::Partial { - resolved, - unresolved_from, - } - } -} - -#[derive(Debug, derive_more::Display, Clone, PartialEq, Eq, Hash, derive_more::Error)] -#[display(fmt = "failed_at: {failed_at}, kind: {kind}")] -pub struct PathResolutionError { - pub kind: NameResolutionError, - pub failed_at: usize, -} -impl PathResolutionError { - fn new(kind: NameResolutionError, failed_at: usize) -> Self { - Self { kind, failed_at } - } -} - -impl<'db, 'a> NameResolver<'db, 'a> { - /// Resolve the path segments to a set of possible resolutions. - /// A path can be resolved to multiple resolutions because we have multiple - /// name domains. - /// - /// For example, the `foo::FOO` can be resolved to both `const - /// FOO` and `struct FOO` in the following code without any error: - /// ```fe - /// use foo::FOO - /// - /// mod foo { - /// pub const FOO: i32 = 1 - /// pub struct FOO {} - /// } - /// ``` - pub fn resolve_segments( - &mut self, - segments: Vec>, - mut scope: ScopeId, - directive: QueryDirective, - ) -> Result { - if segments.is_empty() { - return Err(PathResolutionError::new(NameResolutionError::Invalid, 0)); - } - - let last_seg_idx = segments.len() - 1; - for (i, seg) in segments[0..last_seg_idx].iter().enumerate() { - let Partial::Present(ident) = seg else { - return Err(PathResolutionError::new(NameResolutionError::Invalid, i)); - }; - let query = NameQuery::new(*ident, scope); - let binding = self.resolve_query(query); - scope = match binding.res_by_domain(NameDomain::Item) { - Ok(res) => { - if res.is_type(self.db) { - return Ok(ResolvedPath::Partial { - resolved: res.clone(), - unresolved_from: i + 1, - }); - } else if let Some(scope) = res.scope() { - scope - } else { - return Err(PathResolutionError::new( - NameResolutionError::NotFound, - i + 1, - )); - } - } - Err(err) => { - return Err(PathResolutionError::new(err.clone(), i)); - } - }; - } - - let Partial::Present(ident) = segments[last_seg_idx] else { - return Err(PathResolutionError::new(NameResolutionError::Invalid, last_seg_idx)); - }; - let query = NameQuery::with_directive(ident, scope, directive); - let resolved = self.resolve_query(query); - Ok(ResolvedPath::Full(resolved)) - } - - pub fn resolve_query(&mut self, query: NameQuery) -> NameBinding { - // If the query is already resolved, return the cached result. - if let Some(resolved) = self.cache_store.get(query) { - return resolved.clone(); - }; - - let mut binding = NameBinding::default(); - - // The shadowing rule is - // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, - // where `$` means current scope. - // This ordering means that greater one shadows lower ones in the same domain. - let mut parent = None; - - // 1. Look for the name in the current scope. - let mut found_scopes = FxHashSet::default(); - for edge in query.scope.edges(self.db.as_hir_db()) { - match edge.kind.propagate(&query) { - PropagationResult::Terminated => { - if found_scopes.insert(edge.dest) { - let res = NameRes::new_scope( - edge.dest, - NameDomain::from_scope(edge.dest), - NameDerivation::Def, - ); - binding.push(&res); - } - } - - PropagationResult::Continuation => { - debug_assert!(parent.is_none()); - parent = Some(edge.dest); - } - - PropagationResult::UnPropagated => {} - } - } - - // 2. Look for the name in the named imports of the current scope. - if let Some(imported) = self - .importer - .named_imports(self.db, query.scope) - .and_then(|imports| imports.get(&query.name)) - { - binding.merge(imported.iter()); - } - - // 3. Look for the name in the glob imports. - if query.directive.allow_glob { - if let Some(imported) = self.importer.glob_imports(self.db, query.scope) { - for res in imported.name_res_for(query.name) { - binding.push(res); - } - } - } - - // 4. Look for the name in the lexical scope if it exists. - if let Some(parent) = parent { - let mut query_for_parent = query; - query_for_parent.scope = parent; - query_for_parent.directive.disallow_external(); - - let mut resolved = self.resolve_query(query_for_parent); - resolved.set_lexed_derivation(); - binding.merge(resolved.iter()); - } - - if !query.directive.allow_external { - return self.finalize_query_result(query, binding); - } - - // 5. Look for the name in the external ingots. - query - .scope - .top_mod(self.db.as_hir_db()) - .ingot(self.db.as_hir_db()) - .external_ingots(self.db.as_hir_db()) - .iter() - .for_each(|(name, root_mod)| { - if *name == query.name { - // We don't care about the result of `push` because we assume ingots are - // guaranteed to be unique. - binding.push(&NameRes::new_scope( - ScopeId::root(*root_mod), - NameDomain::Item, - NameDerivation::External, - )) - } - }); - - // 6. Look for the name in the builtin types. - for &prim in PrimTy::all_types() { - // We don't care about the result of `push` because we assume builtin types are - // guaranteed to be unique. - if query.name == prim.name() { - binding.push(&NameRes::new_prim(prim)); - } - } - - self.finalize_query_result(query, binding) - } - - /// Collect all visible resolutions in the given `target` scope. - /// - /// The function follows the shadowing rule, meaning the same name in the - /// same domain is properly shadowed. Also, this function guarantees that - /// the collected resolutions are unique in terms of its name and resolved - /// scope. - /// - /// On the other hand, the function doesn't cause any error and collect all - /// resolutions even if they are in the same domain. The reason - /// for this is - /// - Ambiguous error should be reported lazily, meaning it should be - /// reported when the resolution is actually used. - /// - The function is used for glob imports, so it's necessary to return - /// monotonously increasing results. Also, we can't arbitrarily choose the - /// possible resolution from multiple candidates to avoid hiding - /// ambiguity. That's also the reason why we can't use `NameBinding` and - /// `NameBinding::merge` in this function. - /// - /// The below examples demonstrates the second point. - /// We need to report ambiguous error at `const C: S = S` because `S` is - /// ambiguous, on the other hand, we need NOT to report ambiguous error in - /// `foo` modules because `S` is not referred to in the module. - /// - /// ```fe - /// use foo::* - /// const C: S = S - /// - /// mod foo { - /// pub use inner1::* - /// pub use inner2::* - /// - /// mod inner1 { - /// pub struct S {} - /// } - /// mod inner2 { - /// pub struct S {} - /// } - /// } - /// ``` - pub(super) fn collect_all_resolutions_for_glob( - &mut self, - target: ScopeId, - ref_scope: ScopeId, - unresolved_named_imports: FxHashSet, - ) -> FxHashMap> { - let mut res_collection: FxHashMap> = FxHashMap::default(); - let mut found_domains: FxHashMap = FxHashMap::default(); - let mut found_kinds: FxHashSet<(IdentId, NameResKind)> = FxHashSet::default(); - - for edge in target.edges(self.db.as_hir_db()) { - let scope = match edge.kind.propagate_glob() { - PropagationResult::Terminated => edge.dest, - _ => { - continue; - } - }; - - let name = scope.name(self.db.as_hir_db()).unwrap(); - if !found_kinds.insert((name, scope.into())) { - continue; - } - let res = NameRes::new_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); - - *found_domains.entry(name).or_default() |= res.domain as u8; - res_collection.entry(name).or_default().push(res); - } - - let mut found_domains_after_named = found_domains.clone(); - if let Some(named_imports) = self.importer.named_imports(self.db, target) { - for (&name, import) in named_imports { - let found_domain = found_domains.get(&name).copied().unwrap_or_default(); - for res in import.iter().filter(|res| { - if let NameDerivation::NamedImported(use_) = res.derivation { - is_use_visible(self.db, ref_scope, use_) - } else { - false - } - }) { - if (found_domain & res.domain as u8 != 0) - || !found_kinds.insert((name, res.kind)) - { - continue; - } - - *found_domains_after_named.entry(name).or_default() |= res.domain as u8; - res_collection.entry(name).or_default().push(res.clone()); - } - } - } - - if let Some(glob_imports) = self.importer.glob_imports(self.db, target) { - for (&use_, resolutions) in glob_imports.iter() { - if !is_use_visible(self.db, ref_scope, use_) { - continue; - } - for (&name, res_for_name) in resolutions.iter() { - if unresolved_named_imports.contains(&name) { - continue; - } - - for res in res_for_name.iter() { - let seen_domain = found_domains_after_named - .get(&name) - .copied() - .unwrap_or_default(); - - if (seen_domain & res.domain as u8 != 0) - || !found_kinds.insert((name, res.kind)) - { - continue; - } - res_collection.entry(name).or_default().push(res.clone()); - } - } - } - } - - res_collection - } - - /// Finalize the query result and cache it to the cache store. - fn finalize_query_result(&mut self, query: NameQuery, binding: NameBinding) -> NameBinding { - self.cache_store.cache_result(query, binding.clone()); - binding - } -} - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct NameQuery { /// The name to be resolved. @@ -432,8 +74,8 @@ pub struct QueryDirective { impl QueryDirective { /// Make a new query directive with the default settings. - /// The default setting is to allow lexical scope lookup and look up names - /// in the `Item` domain. + /// The default setting is to lookup the name in the lexical scope and all + /// imports and external ingots. pub fn new() -> Self { Self { allow_lex: true, @@ -648,124 +290,389 @@ impl NameRes { }; match scope_or_use { - Either::Left(target_scope) => is_scope_visible(db, from, target_scope), + Either::Left(target_scope) => is_scope_visible_from(db, target_scope, from), Either::Right(use_) => is_use_visible(db, from, use_), } - } + } + + pub fn pretty_path(&self, db: &dyn HirAnalysisDb) -> Option { + match self.kind { + NameResKind::Scope(scope) => scope.pretty_path(db.as_hir_db()), + NameResKind::Prim(prim) => prim.name().data(db.as_hir_db()).clone().into(), + } + } + + pub(super) fn derived_from(&self, db: &dyn HirAnalysisDb) -> Option { + match self.derivation { + NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { + self.kind.name_span(db) + } + NameDerivation::NamedImported(use_) => use_.imported_name_span(db.as_hir_db()), + NameDerivation::GlobImported(use_) => use_.glob_span(db.as_hir_db()), + NameDerivation::Lex(ref inner) => { + let mut inner = inner; + while let NameDerivation::Lex(parent) = inner.as_ref() { + inner = parent; + } + Self { + derivation: inner.as_ref().clone(), + ..self.clone() + } + .derived_from(db) + } + } + } + + pub(super) fn new_from_scope( + scope: ScopeId, + domain: NameDomain, + derivation: NameDerivation, + ) -> Self { + Self { + kind: scope.into(), + derivation, + domain, + } + } + + fn new_prim(prim: PrimTy) -> Self { + Self { + kind: prim.into(), + derivation: NameDerivation::Prim, + domain: NameDomain::Item, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, derive_more::From)] +pub enum NameResKind { + Scope(ScopeId), + Prim(PrimTy), +} + +impl NameResKind { + pub fn name_span(self, db: &dyn HirAnalysisDb) -> Option { + match self { + NameResKind::Scope(scope) => scope.name_span(db.as_hir_db()), + NameResKind::Prim(_) => None, + } + } + + pub fn name(self, db: &dyn HirAnalysisDb) -> Option { + match self { + NameResKind::Scope(scope) => scope.name(db.as_hir_db()), + NameResKind::Prim(prim) => prim.name().into(), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum NameDerivation { + Def, + NamedImported(Use), + GlobImported(Use), + Lex(Box), + External, + Prim, +} + +impl NameDerivation { + fn lexed(&mut self) { + let inner = mem::replace(self, NameDerivation::Def); + *self = NameDerivation::Lex(Box::new(inner)); + } +} + +impl PartialOrd for NameDerivation { + fn partial_cmp(&self, other: &Self) -> Option { + match (self, other) { + (NameDerivation::Def, NameDerivation::Def) => Some(cmp::Ordering::Equal), + (NameDerivation::Def, _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::Def) => Some(cmp::Ordering::Less), + + (NameDerivation::NamedImported(_), NameDerivation::NamedImported(_)) => { + Some(cmp::Ordering::Equal) + } + (NameDerivation::NamedImported(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::NamedImported(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::GlobImported(_), NameDerivation::GlobImported(_)) => { + Some(cmp::Ordering::Equal) + } + (NameDerivation::GlobImported(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::GlobImported(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::Lex(lhs), NameDerivation::Lex(rhs)) => lhs.partial_cmp(rhs), + (NameDerivation::Lex(_), _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::Lex(_)) => Some(cmp::Ordering::Less), + + (NameDerivation::External, NameDerivation::External) => Some(cmp::Ordering::Equal), + (NameDerivation::External, _) => Some(cmp::Ordering::Greater), + (_, NameDerivation::External) => Some(cmp::Ordering::Less), + + (NameDerivation::Prim, NameDerivation::Prim) => Some(cmp::Ordering::Equal), + } + } +} + +pub(crate) struct NameResolver<'db, 'a> { + db: &'db dyn HirAnalysisDb, + importer: &'a dyn Importer, + cache_store: ResolvedQueryCacheStore, +} + +impl<'db, 'a> NameResolver<'db, 'a> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { + Self { + db, + importer, + cache_store: Default::default(), + } + } + + pub(super) fn new_no_cache(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer) -> Self { + let cache_store = ResolvedQueryCacheStore { + no_cache: true, + ..Default::default() + }; + Self { + db, + importer, + cache_store, + } + } + + pub(super) fn into_cache_store(self) -> ResolvedQueryCacheStore { + self.cache_store + } + + pub(crate) fn resolve_query(&mut self, query: NameQuery) -> NameBinding { + // If the query is already resolved, return the cached result. + if let Some(resolved) = self.cache_store.get(query) { + return resolved.clone(); + }; + + let mut binding = NameBinding::default(); + + // The shadowing rule is + // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, + // where `$` means current scope. + // This ordering means that greater one shadows lower ones in the same domain. + let mut parent = None; + + // 1. Look for the name in the current scope. + let mut found_scopes = FxHashSet::default(); + for edge in query.scope.edges(self.db.as_hir_db()) { + match edge.kind.propagate(&query) { + PropagationResult::Terminated => { + if found_scopes.insert(edge.dest) { + let res = NameRes::new_from_scope( + edge.dest, + NameDomain::from_scope(edge.dest), + NameDerivation::Def, + ); + binding.push(&res); + } + } + + PropagationResult::Continuation => { + debug_assert!(parent.is_none()); + parent = Some(edge.dest); + } + + PropagationResult::UnPropagated => {} + } + } - pub fn pretty_path(&self, db: &dyn HirAnalysisDb) -> Option { - match self.kind { - NameResKind::Scope(scope) => scope.pretty_path(db.as_hir_db()), - NameResKind::Prim(prim) => prim.name().data(db.as_hir_db()).into(), + // 2. Look for the name in the named imports of the current scope. + if let Some(imported) = self + .importer + .named_imports(self.db, query.scope) + .and_then(|imports| imports.get(&query.name)) + { + binding.merge(imported.iter()); } - } - pub(super) fn derived_from(&self, db: &dyn HirAnalysisDb) -> Option { - match self.derivation { - NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { - self.kind.name_span(db) - } - NameDerivation::NamedImported(use_) => use_.imported_name_span(db.as_hir_db()), - NameDerivation::GlobImported(use_) => use_.glob_span(db.as_hir_db()), - NameDerivation::Lex(ref inner) => { - let mut inner = inner; - while let NameDerivation::Lex(parent) = inner.as_ref() { - inner = parent; - } - Self { - derivation: inner.as_ref().clone(), - ..self.clone() + // 3. Look for the name in the glob imports. + if query.directive.allow_glob { + if let Some(imported) = self.importer.glob_imports(self.db, query.scope) { + for res in imported.name_res_for(query.name) { + binding.push(res); } - .derived_from(db) } } - } - fn new_scope(scope: ScopeId, domain: NameDomain, derivation: NameDerivation) -> Self { - Self { - kind: scope.into(), - derivation, - domain, + // 4. Look for the name in the lexical scope if it exists. + if let Some(parent) = parent { + let mut query_for_parent = query; + query_for_parent.scope = parent; + query_for_parent.directive.disallow_external(); + + let mut resolved = self.resolve_query(query_for_parent); + resolved.set_lexed_derivation(); + binding.merge(resolved.iter()); } - } - fn new_prim(prim: PrimTy) -> Self { - Self { - kind: prim.into(), - derivation: NameDerivation::Prim, - domain: NameDomain::Item, + if !query.directive.allow_external { + return self.finalize_query_result(query, binding); } - } -} -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, derive_more::From)] -pub enum NameResKind { - Scope(ScopeId), - Prim(PrimTy), -} + // 5. Look for the name in the external ingots. + query + .scope + .top_mod(self.db.as_hir_db()) + .ingot(self.db.as_hir_db()) + .external_ingots(self.db.as_hir_db()) + .iter() + .for_each(|(name, root_mod)| { + if *name == query.name { + // We don't care about the result of `push` because we assume ingots are + // guaranteed to be unique. + binding.push(&NameRes::new_from_scope( + ScopeId::root(*root_mod), + NameDomain::Item, + NameDerivation::External, + )) + } + }); -impl NameResKind { - pub fn name_span(self, db: &dyn HirAnalysisDb) -> Option { - match self { - NameResKind::Scope(scope) => scope.name_span(db.as_hir_db()), - NameResKind::Prim(_) => None, + // 6. Look for the name in the builtin types. + for &prim in PrimTy::all_types() { + // We don't care about the result of `push` because we assume builtin types are + // guaranteed to be unique. + if query.name == prim.name() { + binding.push(&NameRes::new_prim(prim)); + } } - } - pub fn name(self, db: &dyn HirAnalysisDb) -> Option { - match self { - NameResKind::Scope(scope) => scope.name(db.as_hir_db()), - NameResKind::Prim(prim) => prim.name().into(), - } + self.finalize_query_result(query, binding) } -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub enum NameDerivation { - Def, - NamedImported(Use), - GlobImported(Use), - Lex(Box), - External, - Prim, -} -impl NameDerivation { - fn lexed(&mut self) { - let inner = mem::replace(self, NameDerivation::Def); - *self = NameDerivation::Lex(Box::new(inner)); - } -} + /// Collect all visible resolutions in the given `target` scope. + /// + /// The function follows the shadowing rule, meaning the same name in the + /// same domain is properly shadowed. Also, this function guarantees that + /// the collected resolutions are unique in terms of its name and resolved + /// scope. + /// + /// On the other hand, the function doesn't cause any error and collect all + /// resolutions even if they are in the same domain. The reason + /// for this is + /// - Ambiguous error should be reported lazily, meaning it should be + /// reported when the resolution is actually used. + /// - The function is used for glob imports, so it's necessary to return + /// monotonously increasing results. Also, we can't arbitrarily choose the + /// possible resolution from multiple candidates to avoid hiding + /// ambiguity. That's also the reason why we can't use `NameBinding` and + /// `NameBinding::merge` in this function. + /// + /// The below examples demonstrates the second point. + /// We need to report ambiguous error at `const C: S = S` because `S` is + /// ambiguous, on the other hand, we need NOT to report ambiguous error in + /// `foo` modules because `S` is not referred to in the module. + /// + /// ```fe + /// use foo::* + /// const C: S = S + /// + /// mod foo { + /// pub use inner1::* + /// pub use inner2::* + /// + /// mod inner1 { + /// pub struct S {} + /// } + /// mod inner2 { + /// pub struct S {} + /// } + /// } + /// ``` + pub(super) fn collect_all_resolutions_for_glob( + &mut self, + target: ScopeId, + ref_scope: ScopeId, + unresolved_named_imports: FxHashSet, + ) -> FxHashMap> { + let mut res_collection: FxHashMap> = FxHashMap::default(); + let mut found_domains: FxHashMap = FxHashMap::default(); + let mut found_kinds: FxHashSet<(IdentId, NameResKind)> = FxHashSet::default(); -impl PartialOrd for NameDerivation { - fn partial_cmp(&self, other: &Self) -> Option { - match (self, other) { - (NameDerivation::Def, NameDerivation::Def) => Some(cmp::Ordering::Equal), - (NameDerivation::Def, _) => Some(cmp::Ordering::Greater), - (_, NameDerivation::Def) => Some(cmp::Ordering::Less), + for edge in target.edges(self.db.as_hir_db()) { + let scope = match edge.kind.propagate_glob() { + PropagationResult::Terminated => edge.dest, + _ => { + continue; + } + }; - (NameDerivation::NamedImported(_), NameDerivation::NamedImported(_)) => { - Some(cmp::Ordering::Equal) + let name = scope.name(self.db.as_hir_db()).unwrap(); + if !found_kinds.insert((name, scope.into())) { + continue; } - (NameDerivation::NamedImported(_), _) => Some(cmp::Ordering::Greater), - (_, NameDerivation::NamedImported(_)) => Some(cmp::Ordering::Less), + let res = + NameRes::new_from_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); - (NameDerivation::GlobImported(_), NameDerivation::GlobImported(_)) => { - Some(cmp::Ordering::Equal) + *found_domains.entry(name).or_default() |= res.domain as u8; + res_collection.entry(name).or_default().push(res); + } + + let mut found_domains_after_named = found_domains.clone(); + if let Some(named_imports) = self.importer.named_imports(self.db, target) { + for (&name, import) in named_imports { + let found_domain = found_domains.get(&name).copied().unwrap_or_default(); + for res in import.iter().filter(|res| { + if let NameDerivation::NamedImported(use_) = res.derivation { + is_use_visible(self.db, ref_scope, use_) + } else { + false + } + }) { + if (found_domain & res.domain as u8 != 0) + || !found_kinds.insert((name, res.kind)) + { + continue; + } + + *found_domains_after_named.entry(name).or_default() |= res.domain as u8; + res_collection.entry(name).or_default().push(res.clone()); + } } - (NameDerivation::GlobImported(_), _) => Some(cmp::Ordering::Greater), - (_, NameDerivation::GlobImported(_)) => Some(cmp::Ordering::Less), + } - (NameDerivation::Lex(lhs), NameDerivation::Lex(rhs)) => lhs.partial_cmp(rhs), - (NameDerivation::Lex(_), _) => Some(cmp::Ordering::Greater), - (_, NameDerivation::Lex(_)) => Some(cmp::Ordering::Less), + if let Some(glob_imports) = self.importer.glob_imports(self.db, target) { + for (&use_, resolutions) in glob_imports.iter() { + if !is_use_visible(self.db, ref_scope, use_) { + continue; + } + for (&name, res_for_name) in resolutions.iter() { + if unresolved_named_imports.contains(&name) { + continue; + } - (NameDerivation::External, NameDerivation::External) => Some(cmp::Ordering::Equal), - (NameDerivation::External, _) => Some(cmp::Ordering::Greater), - (_, NameDerivation::External) => Some(cmp::Ordering::Less), + for res in res_for_name.iter() { + let seen_domain = found_domains_after_named + .get(&name) + .copied() + .unwrap_or_default(); - (NameDerivation::Prim, NameDerivation::Prim) => Some(cmp::Ordering::Equal), + if (seen_domain & res.domain as u8 != 0) + || !found_kinds.insert((name, res.kind)) + { + continue; + } + res_collection.entry(name).or_default().push(res.clone()); + } + } + } } + + res_collection + } + + /// Finalize the query result and cache it to the cache store. + fn finalize_query_result(&mut self, query: NameQuery, binding: NameBinding) -> NameBinding { + self.cache_store.cache_result(query, binding.clone()); + binding } } @@ -813,16 +720,16 @@ pub(crate) struct ResolvedQueryCacheStore { } impl ResolvedQueryCacheStore { + pub(super) fn get(&self, query: NameQuery) -> Option<&NameBinding> { + self.cache.get(&query) + } + fn cache_result(&mut self, query: NameQuery, result: NameBinding) { if self.no_cache { return; } self.cache.insert(query, result); } - - fn get(&self, query: NameQuery) -> Option<&NameBinding> { - self.cache.get(&query) - } } /// Each resolved name is associated with a domain that indicates which domain diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs new file mode 100644 index 0000000000..d3b4915916 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -0,0 +1,233 @@ +#![allow(unused)] +use hir::hir_def::{scope_graph::ScopeId, IdentId, Partial, PathId}; + +use crate::{name_resolution::QueryDirective, HirAnalysisDb}; + +use super::{ + name_resolver::{ + NameBinding, NameRes, NameResolutionError, NameResolver, ResolvedQueryCacheStore, + }, + NameDomain, NameQuery, +}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum EarlyResolvedPath { + Full(NameBinding), + + /// The path is partially resolved; this means that the `resolved` is a type + /// and the following segments depend on type to resolve. + /// These unresolved parts are resolved in the later type inference and + /// trait solving phases. + Partial { + resolved: NameRes, + unresolved_from: usize, + }, +} + +pub(super) struct EarlyResolvedPathWithTrajectory { + pub(super) resolved: EarlyResolvedPath, + /// The trajectory of the resolution, which starts with the original scope + /// of the resolution, then goes through the resolution of each segment. + /// This trajectory doesn't include the final resolution of the path, which + /// is stored in `resolved`. e.g., for the query `std::foo::Bar` from + /// `crate::baz`, the trajectory is `crate::baz -> std -> foo`. + pub(super) trajectory: Vec, +} + +pub type PathResolutionResult = Result; + +#[derive(Debug, derive_more::Display, Clone, PartialEq, Eq, Hash, derive_more::Error)] +#[display(fmt = "failed_at: {failed_at}, kind: {kind}")] +pub struct PathResolutionError { + pub(crate) kind: NameResolutionError, + pub(crate) failed_at: usize, +} +impl PathResolutionError { + fn new(kind: NameResolutionError, failed_at: usize) -> Self { + Self { kind, failed_at } + } +} + +pub(super) struct EarlyPathResolver<'db, 'a, 'b> { + db: &'db dyn HirAnalysisDb, + name_resolver: &'a mut NameResolver<'db, 'a>, + cache_store: &'b ResolvedQueryCacheStore, +} + +impl<'db, 'a, 'b> EarlyPathResolver<'db, 'a, 'b> { + pub(super) fn new( + db: &'db dyn HirAnalysisDb, + name_resolver: &'a mut NameResolver<'db, 'a>, + cache_store: &'b ResolvedQueryCacheStore, + ) -> Self { + Self { + db, + name_resolver, + cache_store, + } + } + + /// Resolves the given `path` in the given `scope`. + pub(super) fn resolve_path( + &mut self, + path: PathId, + scope: ScopeId, + ) -> PathResolutionResult { + self.resolve_segments(path.segments(self.db.as_hir_db()), scope) + } + + /// Resolves the given `segments` in the given `scope`. + pub(super) fn resolve_segments( + &mut self, + segments: &[Partial], + scope: ScopeId, + ) -> PathResolutionResult { + let mut i_path = IntermediatePath::new(segments, scope); + loop { + match i_path.state(self.db) { + IntermediatePathState::ReadyToFinalize => { + let binding = self.resolve_last_segment(&i_path)?; + return Ok(i_path.finalize_as_full(binding)); + } + + IntermediatePathState::TypeDependent => return Ok(i_path.finalize_as_partial()), + + IntermediatePathState::Unresolved => { + self.resolve_segment(&mut i_path)?; + } + } + } + } + + fn resolve_segment(&mut self, i_path: &mut IntermediatePath) -> PathResolutionResult<()> { + let query = i_path.make_query(self.db)?; + let binding = self.resolve_query(query); + i_path.proceed(binding) + } + + fn resolve_last_segment( + &mut self, + i_path: &IntermediatePath, + ) -> PathResolutionResult { + let query = i_path.make_query(self.db)?; + Ok(self.resolve_query(query)) + } + + fn resolve_query(&mut self, query: NameQuery) -> NameBinding { + if let Some(binding) = self.cache_store.get(query) { + binding.clone() + } else { + self.name_resolver.resolve_query(query) + } + } +} + +struct IntermediatePath<'a> { + path: &'a [Partial], + idx: usize, + current_res: NameRes, + trajectory: Vec, +} + +impl<'a> IntermediatePath<'a> { + fn new(path: &'a [Partial], scope: ScopeId) -> Self { + let domain = NameDomain::from_scope(scope); + Self { + path, + idx: 0, + current_res: NameRes::new_from_scope( + scope, + NameDomain::from_scope(scope), + super::NameDerivation::Def, + ), + trajectory: vec![], + } + } + + fn make_query(&self, db: &dyn HirAnalysisDb) -> PathResolutionResult { + debug_assert!(self.state(db) != IntermediatePathState::TypeDependent); + let Partial::Present(name) = self.path[self.idx] else { + return Err(PathResolutionError::new( + NameResolutionError::Invalid, + self.idx + )); + }; + + let Some(scope) = self.current_res.scope() else { + return Err(PathResolutionError::new( + NameResolutionError::NotFound, + self.idx, + )) + }; + + let mut directive = QueryDirective::new(); + if self.idx != 0 { + directive.disallow_external(); + directive.disallow_lex(); + } + + Ok(NameQuery::with_directive(name, scope, directive)) + } + + fn finalize_as_partial(self) -> EarlyResolvedPathWithTrajectory { + let resolved = EarlyResolvedPath::Partial { + resolved: self.current_res, + unresolved_from: self.idx, + }; + + EarlyResolvedPathWithTrajectory { + resolved, + trajectory: self.trajectory, + } + } + + fn finalize_as_full(mut self, binding: NameBinding) -> EarlyResolvedPathWithTrajectory { + let resolved = EarlyResolvedPath::Full(binding); + let mut trajectory = self.trajectory; + let current_res = self.current_res; + trajectory.push(current_res); + + EarlyResolvedPathWithTrajectory { + resolved, + trajectory, + } + } + + fn proceed(&mut self, binding: NameBinding) -> PathResolutionResult<()> { + let next_res = binding + .res_by_domain(NameDomain::Item) + .clone() + .map_err(|err| PathResolutionError::new(err, self.idx))?; + + let old_res = std::mem::replace(&mut self.current_res, next_res); + self.idx += 1; + self.trajectory.push(old_res); + Ok(()) + } + + fn state(&self, db: &dyn HirAnalysisDb) -> IntermediatePathState { + debug_assert!(self.idx < self.path.len()); + + if self.current_res.is_type(db) { + IntermediatePathState::TypeDependent + } else if self.idx == self.path.len() - 1 { + IntermediatePathState::ReadyToFinalize + } else { + IntermediatePathState::Unresolved + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum IntermediatePathState { + /// The intermediate path points to the last segment of the path and need to + /// be resolved to finalize the path resolution. + ReadyToFinalize, + + /// The intermediate path points to a type and the next segment need to be + /// resolved in the type context. + TypeDependent, + + /// The path resolution need to be continued further. + Unresolved, +} diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index 5e5d5ad66a..bb30e7c999 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -7,24 +7,28 @@ use crate::HirAnalysisDb; /// 1. It is declared as public, or /// 2. The `ref_scope` is a transitive reflexive child of the scope where the /// name is defined. -pub fn is_scope_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, target_scope: ScopeId) -> bool { +pub(crate) fn is_scope_visible_from( + db: &dyn HirAnalysisDb, + scope: ScopeId, + from_scope: ScopeId, +) -> bool { // If resolved is public, then it is visible. - if target_scope.data(db.as_hir_db()).vis.is_pub() { + if scope.data(db.as_hir_db()).vis.is_pub() { return true; } - let Some(def_scope) = (if matches!(target_scope, ScopeId::Field(..) | ScopeId::Variant(..)) { + let Some(def_scope) = (if matches!(scope, ScopeId::Field(..) | ScopeId::Variant(..)) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. - target_scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) + scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) } else { - target_scope.parent(db.as_hir_db()) + scope.parent(db.as_hir_db()) }) else { return false; }; - ref_scope.is_transitive_child_of(db.as_hir_db(), def_scope) + from_scope.is_transitive_child_of(db.as_hir_db(), def_scope) } /// Return `true` if the given `use_` is visible from the `ref_scope`. diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 6b3e9fc435..88ee7ff2ab 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -5,9 +5,7 @@ use std::path::Path; use dir_test::{dir_test, Fixture}; use fe_compiler_test_utils::snap_test; -use fe_hir_analysis::name_resolution::{ - import_resolver::ResolvedImports, name_resolver::NameDerivation, ImportAnalysisPass, -}; +use fe_hir_analysis::name_resolution::{ImportAnalysisPass, NameDerivation, ResolvedImports}; use hir::{analysis_pass::ModuleAnalysisPass, hir_def::Use}; use rustc_hash::FxHashMap; diff --git a/crates/hir/src/hir_def/ident.rs b/crates/hir/src/hir_def/ident.rs index ef4bb73d02..23f07c33e0 100644 --- a/crates/hir/src/hir_def/ident.rs +++ b/crates/hir/src/hir_def/ident.rs @@ -1,5 +1,6 @@ #[salsa::interned] pub struct IdentId { + #[return_ref] pub data: String, } impl IdentId { diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index f0bf88b309..e66f6bcaf2 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -4,5 +4,6 @@ use super::IdentId; #[salsa::interned] pub struct PathId { - pub data: Vec>, + #[return_ref] + pub segments: Vec>, } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index f7b2966736..4bdba58e13 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -260,7 +260,7 @@ impl ScopeId { let parent_path = parent.pretty_path(db)?; Some(format!("{}::{}", parent_path, self.name(db)?.data(db))) } else { - self.name(db).map(|name| name.data(db)) + self.name(db).map(|name| name.data(db).clone()) } } } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 7a9ddf4f2a..396c1da6be 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1532,7 +1532,7 @@ pub fn walk_path(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, p where V: Visitor + ?Sized, { - for (idx, segment) in path.data(ctxt.db).iter().enumerate() { + for (idx, segment) in path.segments(ctxt.db).iter().enumerate() { if let Some(ident) = segment.to_opt() { ctxt.with_new_ctxt( |span| span.segment_moved(idx).into_atom(), From 4ae385396a6da16612728626f83c0ebefeeb76f4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 7 Jul 2023 19:57:27 +0200 Subject: [PATCH 199/678] Add checker method for trajectory visibility --- .../src/name_resolution/import_resolver.rs | 9 +++++--- .../src/name_resolution/path_resolver.rs | 22 +++++++++++++++++-- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index be1fe323cc..329e5d31e2 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -351,7 +351,7 @@ impl<'db> ImportResolver<'db> { } else { true } - } else if is_scope_visible_from(self.db, i_use.original_scope, scope) { + } else if is_scope_visible_from(self.db, scope, i_use.original_scope) { true } else { if scope.is_importable() { @@ -573,7 +573,7 @@ impl<'db> ImportResolver<'db> { /// Makes a query for the current segment of the intermediate use to be /// resolved. fn make_query(&self, i_use: &IntermediateUse) -> NameResolutionResult { - let Some(seg_name) = i_use.current_segment_ident(self.db) else { + let Some(seg_name) = i_use.current_segment_ident(self.db) else { return Err(NameResolutionError::Invalid); }; @@ -1011,7 +1011,10 @@ impl NameRes { /// Returns true if the binding contains an resolution that is not in the /// same ingot as the current resolution of the `i_use`. fn is_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { - let Some(current_ingot) = i_use.current_scope().map(|scope| scope.ingot(db.as_hir_db())) else { + let Some(current_ingot) = i_use + .current_scope() + .map(|scope| scope.ingot(db.as_hir_db())) + else { return false; }; diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index d3b4915916..0f1a64e509 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -34,6 +34,24 @@ pub(super) struct EarlyResolvedPathWithTrajectory { pub(super) trajectory: Vec, } +impl EarlyResolvedPathWithTrajectory { + pub(super) fn check_trajectory_visibility( + &self, + db: &dyn HirAnalysisDb, + ) -> PathResolutionResult<()> { + let original_scope = self.trajectory.first().unwrap().scope().unwrap(); + for (i, res) in self.trajectory[1..].iter().enumerate() { + if !res.is_visible(db, original_scope) { + return Err(PathResolutionError::new( + NameResolutionError::Invisible(res.derived_from(db)), + i, + )); + } + } + Ok(()) + } +} + pub type PathResolutionResult = Result; #[derive(Debug, derive_more::Display, Clone, PartialEq, Eq, Hash, derive_more::Error)] @@ -149,7 +167,7 @@ impl<'a> IntermediatePath<'a> { let Partial::Present(name) = self.path[self.idx] else { return Err(PathResolutionError::new( NameResolutionError::Invalid, - self.idx + self.idx, )); }; @@ -157,7 +175,7 @@ impl<'a> IntermediatePath<'a> { return Err(PathResolutionError::new( NameResolutionError::NotFound, self.idx, - )) + )); }; let mut directive = QueryDirective::new(); From fcc4ea4679b6d01ddde5e59e8ed65f9426fe14e7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 10 Jul 2023 11:38:34 +0200 Subject: [PATCH 200/678] Re-exports all lazy span types from `lazy_spans` module --- crates/hir/src/span/mod.rs | 45 ++++++++++++++++++++++++++++++++++++++ crates/hir/src/visitor.rs | 26 +--------------------- 2 files changed, 46 insertions(+), 25 deletions(-) diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index ec1d41e275..5b8f7e5197 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -26,6 +26,51 @@ pub mod use_tree; pub(crate) mod transition; +pub mod lazy_spans { + pub use super::attr::{ + LazyAttrArgListSpan, LazyAttrArgSpan, LazyAttrListSpan, LazyAttrSpan, + LazyDocCommentAttrSpan, LazyNormalAttrSpan, + }; + + pub use super::expr::{ + LazyBinExprSpan, LazyCallArgListSpan, LazyCallArgSpan, LazyCallExprSpan, LazyExprSpan, + LazyFieldExprSpan, LazyFieldListSpan, LazyFieldSpan, LazyLitExprSpan, LazyMatchArmListSpan, + LazyMatchArmSpan, LazyMatchExprSpan, LazyMethodCallExprSpan, LazyPathExprSpan, + LazyRecordInitExprSpan, LazyUnExprSpan, + }; + + pub use super::item::{ + LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFieldDefListSpan, + LazyFieldDefSpan, LazyFuncSpan, LazyImplSpan, LazyImplTraitSpan, LazyItemModifierSpan, + LazyItemSpan, LazyModSpan, LazyStructSpan, LazyTopModSpan, LazyTraitSpan, + LazyTypeAliasSpan, LazyUseSpan, LazyVariantDefListSpan, LazyVariantDefSpan, + }; + + pub use super::params::{ + LazyConstGenericParamSpan, LazyFuncParamListSpan, LazyFuncParamSpan, + LazyGenericArgListSpan, LazyGenericArgSpan, LazyGenericParamListSpan, LazyGenericParamSpan, + LazyTypeBoundListSpan, LazyTypeBoundSpan, LazyTypeGenericArgSpan, LazyWhereClauseSpan, + LazyWherePredicateSpan, + }; + + pub use super::pat::{ + LazyLitPatSpan, LazyPatSpan, LazyPathPatSpan, LazyPathTuplePatSpan, + LazyRecordPatFieldListSpan, LazyRecordPatFieldSpan, LazyRecordPatSpan, + }; + + pub use super::path::{LazyPathSegmentSpan, LazyPathSpan}; + + pub use super::stmt::{LazyLetStmtSpan, LazyStmtSpan}; + + pub use super::types::{ + LazyArrayTypeSpan, LazyPathTypeSpan, LazyPtrTypeSpan, LazyTupleTypeSpan, LazyTySpan, + }; + + pub use super::use_tree::{LazyUseAliasSpan, LazyUsePathSegmentSpan, LazyUsePathSpan}; + + pub use super::{DynLazySpan, LazyLitSpan, LazySpan, LazySpanAtom}; +} + /// This struct represents a dynamic lazy span, which can be converted from all /// types that implement [`LazySpan`] in this module. We want to avoid `dyn /// LazySpan` usage because it doesn't implement `Clone` and `Eq` which leads to diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 396c1da6be..06b1683a58 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -9,31 +9,7 @@ use crate::{ Trait, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, UsePathSegment, VariantDef, VariantDefListId, WhereClauseId, WherePredicate, }, - span::{ - attr::{LazyAttrListSpan, LazyAttrSpan}, - expr::{ - LazyCallArgListSpan, LazyCallArgSpan, LazyExprSpan, LazyFieldListSpan, LazyFieldSpan, - LazyMatchArmSpan, - }, - item::{ - LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFieldDefListSpan, - LazyFieldDefSpan, LazyFuncSpan, LazyImplSpan, LazyImplTraitSpan, LazyItemSpan, - LazyModSpan, LazyStructSpan, LazyTopModSpan, LazyTraitSpan, LazyTypeAliasSpan, - LazyUseSpan, LazyVariantDefListSpan, LazyVariantDefSpan, - }, - params::{ - LazyFuncParamListSpan, LazyFuncParamSpan, LazyGenericArgListSpan, LazyGenericArgSpan, - LazyGenericParamListSpan, LazyGenericParamSpan, LazyTypeBoundListSpan, - LazyTypeBoundSpan, LazyWhereClauseSpan, LazyWherePredicateSpan, - }, - pat::LazyPatSpan, - path::LazyPathSpan, - stmt::LazyStmtSpan, - transition::ChainRoot, - types::LazyTySpan, - use_tree::LazyUsePathSpan, - DynLazySpan, LazyLitSpan, LazySpan, LazySpanAtom, SpanDowncast, - }, + span::{lazy_spans::*, transition::ChainRoot, SpanDowncast}, HirDb, }; From 425dd272ece64e13f801620e991939bbfdc366f1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 10 Jul 2023 11:51:26 +0200 Subject: [PATCH 201/678] Define `prelude` for `Visitor` implementation --- crates/hir/src/visitor.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 06b1683a58..8ee9a42b11 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -13,6 +13,21 @@ use crate::{ HirDb, }; +pub mod prelude { + pub use super::{ + walk_arm, walk_attribute, walk_attribute_list, walk_body, walk_call_arg, + walk_call_arg_list, walk_const, walk_contract, walk_enum, walk_expr, walk_field, + walk_field_def, walk_field_def_list, walk_field_list, walk_func, walk_func_param, + walk_func_param_list, walk_generic_arg, walk_generic_arg_list, walk_generic_param, + walk_generic_param_list, walk_impl, walk_impl_trait, walk_item, walk_mod, walk_pat, + walk_path, walk_stmt, walk_struct, walk_top_mod, walk_trait, walk_ty, walk_type_alias, + walk_type_bound, walk_type_bound_list, walk_use, walk_use_path, walk_variant_def, + walk_variant_def_list, walk_where_clause, walk_where_predicate, Visitor, VisitorCtxt, + }; + + pub use crate::span::lazy_spans::*; +} + /// A visitor for traversing the HIR. pub trait Visitor { fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { @@ -84,7 +99,7 @@ pub trait Visitor { ctxt: &mut VisitorCtxt<'_, LazyAttrListSpan>, attrs: AttrListId, ) { - walk_attributes(self, ctxt, attrs); + walk_attribute_list(self, ctxt, attrs); } fn visit_attribute(&mut self, ctxt: &mut VisitorCtxt<'_, LazyAttrSpan>, attr: &Attr) { @@ -1122,7 +1137,7 @@ where } } -pub fn walk_attributes( +pub fn walk_attribute_list( visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyAttrListSpan>, attr: AttrListId, From f86f63bd9a5599c579c2a29a9ad9d2229981e4fe Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 10 Jul 2023 15:46:32 +0200 Subject: [PATCH 202/678] Rename `NameBinding` to `ResBucket` --- .../src/name_resolution/import_resolver.rs | 74 ++++++++----------- .../src/name_resolution/name_resolver.rs | 72 +++++++++--------- .../src/name_resolution/path_resolver.rs | 28 +++---- 3 files changed, 78 insertions(+), 96 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 329e5d31e2..198379538e 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -19,8 +19,8 @@ use crate::{ use super::{ diagnostics::NameResDiag, name_resolver::{ - NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, - NameResolutionError, NameResolutionResult, NameResolver, QueryDirective, + NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, NameResolutionError, + NameResolutionResult, NameResolver, QueryDirective, ResBucket, }, }; @@ -123,7 +123,7 @@ impl<'db> ImportResolver<'db> { continue; } - // If the unresolved use is not a glob and the number of imported bindings is + // If the unresolved use is not a glob and the number of imported bucket is // not 0, then we can regard the resolution for the use as completed. // This happens if the scope that the use is referring to is not closed. if !i_use.is_glob(self.db) && *self.num_imported_res.entry(i_use.use_).or_default() != 0 @@ -215,7 +215,7 @@ impl<'db> ImportResolver<'db> { let original_scope = base_path_resolved.original_scope; let use_ = base_path_resolved.use_; - // Collect all unresolved named imports in the target scope to avoid binding a + // Collect all unresolved named imports in the target scope to avoid bucket a // name to a wrong resolution being brought by a glob. let unresolved_named_imports = match self.intermediate_uses.get(&target_scope) { Some(i_uses) => i_uses @@ -234,7 +234,7 @@ impl<'db> ImportResolver<'db> { None => FxHashSet::default(), }; - // Collect all bindings in the target scope. + // Collect all bucket in the target scope. let mut resolver = NameResolver::new(self.db, &self.resolved_imports); let resolutions = resolver.collect_all_resolutions_for_glob( target_scope, @@ -332,11 +332,11 @@ impl<'db> ImportResolver<'db> { }; let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); - let mut binding = resolver.resolve_query(query); + let mut bucket = resolver.resolve_query(query); // Filter out invisible resolutions. let mut invisible_span = None; - binding.resolutions.retain(|_, res| { + bucket.bucket.retain(|_, res| { let Ok(res) = res else { return true; }; @@ -368,10 +368,10 @@ impl<'db> ImportResolver<'db> { // Filter out irrelevant resolutions if the segment is not the last one. if !i_use.is_base_resolved(self.db) { - binding.filter_by_domain(NameDomain::Item); + bucket.filter_by_domain(NameDomain::Item); } - for err in binding.errors() { + for err in bucket.errors() { if !matches!( err, NameResolutionError::NotFound | NameResolutionError::Invalid @@ -380,7 +380,7 @@ impl<'db> ImportResolver<'db> { return None; } } - if binding.is_empty() { + if bucket.is_empty() { if self.is_decidable(i_use) { let err = if let Some(invisible_span) = invisible_span { NameResolutionError::Invisible(invisible_span.into()) @@ -397,7 +397,7 @@ impl<'db> ImportResolver<'db> { // If the resolution is derived from glob import or external crate, we have to // insert the use into the `suspicious_imports` set to verify the ambiguity // after the algorithm reaches the fixed point. - for res in binding.iter() { + for res in bucket.iter() { if res.is_external(self.db, i_use) || res.is_derived_from_glob() { self.suspicious_imports.insert(i_use.use_); break; @@ -405,9 +405,9 @@ impl<'db> ImportResolver<'db> { } if i_use.is_base_resolved(self.db) { - Some(IUseResolution::Full(binding)) + Some(IUseResolution::Full(bucket)) } else { - let res = binding.res_by_domain(NameDomain::Item).clone().unwrap(); + let res = bucket.res_by_domain(NameDomain::Item).clone().unwrap(); let next_i_use = i_use.proceed(res); if next_i_use.is_base_resolved(self.db) { Some(IUseResolution::BasePath(next_i_use)) @@ -436,8 +436,8 @@ impl<'db> ImportResolver<'db> { fn try_finalize_named_use(&mut self, i_use: IntermediateUse) -> bool { debug_assert!(i_use.is_base_resolved(self.db)); - let binding = match self.resolve_segment(&i_use) { - Some(IUseResolution::Full(binding)) => binding, + let bucket = match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(bucket)) => bucket, Some(IUseResolution::Unchanged(_)) => { return false; } @@ -449,7 +449,7 @@ impl<'db> ImportResolver<'db> { } }; - let n_res = binding.len(); + let n_res = bucket.len(); let is_decidable = self.is_decidable(&i_use); if *self.num_imported_res.entry(i_use.use_).or_default() == n_res { return is_decidable; @@ -458,7 +458,7 @@ impl<'db> ImportResolver<'db> { self.num_imported_res.insert(i_use.use_, n_res); if let Err(err) = self .resolved_imports - .set_named_binds(self.db, &i_use, binding) + .set_named_binds(self.db, &i_use, bucket) { self.accumulated_errors.push(err); } @@ -477,7 +477,7 @@ impl<'db> ImportResolver<'db> { let ingot = scope.ingot(self.db.as_hir_db()); // The ambiguity in the first segment possibly occurs when the segment is - // resolved to either a glob imported binding or an external ingot in the + // resolved to either a glob imported bucket or an external ingot in the // `i_use` resolution. // // This is because: @@ -667,7 +667,7 @@ impl<'db> ImportResolver<'db> { pub struct ResolvedImports { pub named_resolved: FxHashMap, pub glob_resolved: FxHashMap, - pub unnamed_resolved: Vec, + pub unnamed_resolved: Vec, } pub(super) trait Importer { @@ -683,11 +683,7 @@ pub(super) trait Importer { scope: ScopeId, ) -> Option<&'a GlobImportSet>; - fn unnamed_imports<'a>( - &'a self, - db: &'a dyn HirAnalysisDb, - scope: ScopeId, - ) -> &'a [NameBinding]; + fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket]; } pub(super) struct DefaultImporter; @@ -713,16 +709,12 @@ impl Importer for DefaultImporter { .get(&scope) } - fn unnamed_imports<'a>( - &'a self, - db: &'a dyn HirAnalysisDb, - scope: ScopeId, - ) -> &'a [NameBinding] { + fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket] { &resolved_imports_for_scope(db, scope).unnamed_resolved } } -pub type NamedImportSet = FxHashMap; +pub type NamedImportSet = FxHashMap; #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct GlobImportSet { @@ -796,7 +788,7 @@ impl IntermediateUse { } /// Proceed the resolution of the use path to the next segment. - /// The binding must contain exactly one resolution. + /// The bucket must contain exactly one resolution. fn proceed(&self, next_res: NameRes) -> Self { Self { use_: self.use_, @@ -858,7 +850,7 @@ impl IntermediateUse { #[derive(Debug, Clone, PartialEq, Eq)] enum IUseResolution { /// The all segments are resolved. - Full(NameBinding), + Full(ResBucket), /// The all path segments except the last one are resolved. BasePath(IntermediateUse), @@ -888,7 +880,7 @@ impl IntermediateResolvedImports { &mut self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse, - mut bind: NameBinding, + mut bind: ResBucket, ) -> Result<(), NameResDiag> { let scope = i_use.original_scope; bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); @@ -909,9 +901,9 @@ impl IntermediateResolvedImports { match imported_set.entry(imported_name) { Entry::Occupied(mut e) => { - let bindings = e.get_mut(); - bindings.merge(bind.iter()); - for err in bindings.errors() { + let bucket = e.get_mut(); + bucket.merge(bind.iter()); + for err in bucket.errors() { let NameResolutionError::Ambiguous(cands) = err else { continue; }; @@ -989,11 +981,7 @@ impl Importer for IntermediateResolvedImports { } } - fn unnamed_imports<'a>( - &'a self, - db: &'a dyn HirAnalysisDb, - scope: ScopeId, - ) -> &'a [NameBinding] { + fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket] { if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { &resolved_imports_for_scope(db, scope).unnamed_resolved } else { @@ -1008,7 +996,7 @@ fn resolved_imports_for_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> &Resolv } impl NameRes { - /// Returns true if the binding contains an resolution that is not in the + /// Returns true if the bucket contains an resolution that is not in the /// same ingot as the current resolution of the `i_use`. fn is_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { let Some(current_ingot) = i_use @@ -1024,7 +1012,7 @@ impl NameRes { } } - /// Returns true if the binding contains a glob import. + /// Returns true if the bucket contains a glob import. fn is_derived_from_glob(&self) -> bool { matches!(self.derivation, NameDerivation::GlobImported(_)) } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 2793a619f2..82edb1cdf5 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -108,15 +108,15 @@ impl Default for QueryDirective { } /// The struct contains the lookup result of a name query. -/// The results can contain more than one resolved items which belong to +/// The results can contain more than one name resolution which belong to /// different name domains. #[derive(Clone, Debug, PartialEq, Eq, Default)] -pub struct NameBinding { - pub(super) resolutions: FxHashMap>, +pub struct ResBucket { + pub(super) bucket: FxHashMap>, } -impl NameBinding { - /// Returns the number of resolutions in the binding. +impl ResBucket { + /// Returns the number of resolutions in the bucket. pub fn len(&self) -> usize { self.iter().count() } @@ -126,32 +126,26 @@ impl NameBinding { } pub fn iter(&self) -> impl Iterator { - self.resolutions - .values() - .filter_map(|res| res.as_ref().ok()) + self.bucket.values().filter_map(|res| res.as_ref().ok()) } pub fn iter_mut(&mut self) -> impl Iterator { - self.resolutions - .values_mut() - .filter_map(|res| res.as_mut().ok()) + self.bucket.values_mut().filter_map(|res| res.as_mut().ok()) } pub fn errors(&self) -> impl Iterator { - self.resolutions - .values() - .filter_map(|res| res.as_ref().err()) + self.bucket.values().filter_map(|res| res.as_ref().err()) } /// Returns the resolution of the given `domain`. pub fn res_by_domain(&self, domain: NameDomain) -> &NameResolutionResult { - self.resolutions + self.bucket .get(&domain) .unwrap_or(&Err(NameResolutionError::NotFound)) } pub fn filter_by_domain(&mut self, domain: NameDomain) { - self.resolutions.retain(|d, _| *d == domain); + self.bucket.retain(|d, _| *d == domain); } /// Merge the `resolutions` into the set. If name conflict happens, the old @@ -171,7 +165,7 @@ impl NameBinding { /// Push the `res` into the set. fn push(&mut self, res: &NameRes) { let domain = res.domain; - match self.resolutions.entry(domain) { + match self.bucket.entry(domain) { Entry::Occupied(mut e) => { let old_res = match e.get_mut() { Ok(res) => res, @@ -223,20 +217,20 @@ impl NameBinding { } } -impl IntoIterator for NameBinding { +impl IntoIterator for ResBucket { type Item = NameResolutionResult; type IntoIter = IntoValues>; fn into_iter(self) -> Self::IntoIter { - self.resolutions.into_values() + self.bucket.into_values() } } -impl From for NameBinding { +impl From for ResBucket { fn from(res: NameRes) -> Self { let mut names = FxHashMap::default(); names.insert(res.domain, Ok(res)); - Self { resolutions: names } + Self { bucket: names } } } @@ -446,13 +440,13 @@ impl<'db, 'a> NameResolver<'db, 'a> { self.cache_store } - pub(crate) fn resolve_query(&mut self, query: NameQuery) -> NameBinding { + pub(crate) fn resolve_query(&mut self, query: NameQuery) -> ResBucket { // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(query) { return resolved.clone(); }; - let mut binding = NameBinding::default(); + let mut bucket = ResBucket::default(); // The shadowing rule is // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, @@ -471,7 +465,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { NameDomain::from_scope(edge.dest), NameDerivation::Def, ); - binding.push(&res); + bucket.push(&res); } } @@ -490,14 +484,14 @@ impl<'db, 'a> NameResolver<'db, 'a> { .named_imports(self.db, query.scope) .and_then(|imports| imports.get(&query.name)) { - binding.merge(imported.iter()); + bucket.merge(imported.iter()); } // 3. Look for the name in the glob imports. if query.directive.allow_glob { if let Some(imported) = self.importer.glob_imports(self.db, query.scope) { for res in imported.name_res_for(query.name) { - binding.push(res); + bucket.push(res); } } } @@ -510,11 +504,11 @@ impl<'db, 'a> NameResolver<'db, 'a> { let mut resolved = self.resolve_query(query_for_parent); resolved.set_lexed_derivation(); - binding.merge(resolved.iter()); + bucket.merge(resolved.iter()); } if !query.directive.allow_external { - return self.finalize_query_result(query, binding); + return self.finalize_query_result(query, bucket); } // 5. Look for the name in the external ingots. @@ -528,7 +522,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { if *name == query.name { // We don't care about the result of `push` because we assume ingots are // guaranteed to be unique. - binding.push(&NameRes::new_from_scope( + bucket.push(&NameRes::new_from_scope( ScopeId::root(*root_mod), NameDomain::Item, NameDerivation::External, @@ -541,11 +535,11 @@ impl<'db, 'a> NameResolver<'db, 'a> { // We don't care about the result of `push` because we assume builtin types are // guaranteed to be unique. if query.name == prim.name() { - binding.push(&NameRes::new_prim(prim)); + bucket.push(&NameRes::new_prim(prim)); } } - self.finalize_query_result(query, binding) + self.finalize_query_result(query, bucket) } /// Collect all visible resolutions in the given `target` scope. @@ -563,8 +557,8 @@ impl<'db, 'a> NameResolver<'db, 'a> { /// - The function is used for glob imports, so it's necessary to return /// monotonously increasing results. Also, we can't arbitrarily choose the /// possible resolution from multiple candidates to avoid hiding - /// ambiguity. That's also the reason why we can't use `NameBinding` and - /// `NameBinding::merge` in this function. + /// ambiguity. That's also the reason why we can't use [`ResBucket`] and + /// [`ResBucket::merge`] in this function. /// /// The below examples demonstrates the second point. /// We need to report ambiguous error at `const C: S = S` because `S` is @@ -670,9 +664,9 @@ impl<'db, 'a> NameResolver<'db, 'a> { } /// Finalize the query result and cache it to the cache store. - fn finalize_query_result(&mut self, query: NameQuery, binding: NameBinding) -> NameBinding { - self.cache_store.cache_result(query, binding.clone()); - binding + fn finalize_query_result(&mut self, query: NameQuery, bucket: ResBucket) -> ResBucket { + self.cache_store.cache_result(query, bucket.clone()); + bucket } } @@ -715,16 +709,16 @@ impl std::error::Error for NameResolutionError {} #[derive(Default, Debug, PartialEq, Eq)] pub(crate) struct ResolvedQueryCacheStore { - cache: FxHashMap, + cache: FxHashMap, no_cache: bool, } impl ResolvedQueryCacheStore { - pub(super) fn get(&self, query: NameQuery) -> Option<&NameBinding> { + pub(super) fn get(&self, query: NameQuery) -> Option<&ResBucket> { self.cache.get(&query) } - fn cache_result(&mut self, query: NameQuery, result: NameBinding) { + fn cache_result(&mut self, query: NameQuery, result: ResBucket) { if self.no_cache { return; } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 0f1a64e509..a13ccd2694 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -5,14 +5,14 @@ use crate::{name_resolution::QueryDirective, HirAnalysisDb}; use super::{ name_resolver::{ - NameBinding, NameRes, NameResolutionError, NameResolver, ResolvedQueryCacheStore, + NameRes, NameResolutionError, NameResolver, ResBucket, ResolvedQueryCacheStore, }, NameDomain, NameQuery, }; #[derive(Debug, Clone, PartialEq, Eq)] pub enum EarlyResolvedPath { - Full(NameBinding), + Full(ResBucket), /// The path is partially resolved; this means that the `resolved` is a type /// and the following segments depend on type to resolve. @@ -104,8 +104,8 @@ impl<'db, 'a, 'b> EarlyPathResolver<'db, 'a, 'b> { loop { match i_path.state(self.db) { IntermediatePathState::ReadyToFinalize => { - let binding = self.resolve_last_segment(&i_path)?; - return Ok(i_path.finalize_as_full(binding)); + let bucket = self.resolve_last_segment(&i_path)?; + return Ok(i_path.finalize_as_full(bucket)); } IntermediatePathState::TypeDependent => return Ok(i_path.finalize_as_partial()), @@ -119,21 +119,21 @@ impl<'db, 'a, 'b> EarlyPathResolver<'db, 'a, 'b> { fn resolve_segment(&mut self, i_path: &mut IntermediatePath) -> PathResolutionResult<()> { let query = i_path.make_query(self.db)?; - let binding = self.resolve_query(query); - i_path.proceed(binding) + let bucket = self.resolve_query(query); + i_path.proceed(bucket) } fn resolve_last_segment( &mut self, i_path: &IntermediatePath, - ) -> PathResolutionResult { + ) -> PathResolutionResult { let query = i_path.make_query(self.db)?; Ok(self.resolve_query(query)) } - fn resolve_query(&mut self, query: NameQuery) -> NameBinding { - if let Some(binding) = self.cache_store.get(query) { - binding.clone() + fn resolve_query(&mut self, query: NameQuery) -> ResBucket { + if let Some(bucket) = self.cache_store.get(query) { + bucket.clone() } else { self.name_resolver.resolve_query(query) } @@ -199,8 +199,8 @@ impl<'a> IntermediatePath<'a> { } } - fn finalize_as_full(mut self, binding: NameBinding) -> EarlyResolvedPathWithTrajectory { - let resolved = EarlyResolvedPath::Full(binding); + fn finalize_as_full(mut self, bucket: ResBucket) -> EarlyResolvedPathWithTrajectory { + let resolved = EarlyResolvedPath::Full(bucket); let mut trajectory = self.trajectory; let current_res = self.current_res; trajectory.push(current_res); @@ -211,8 +211,8 @@ impl<'a> IntermediatePath<'a> { } } - fn proceed(&mut self, binding: NameBinding) -> PathResolutionResult<()> { - let next_res = binding + fn proceed(&mut self, bucket: ResBucket) -> PathResolutionResult<()> { + let next_res = bucket .res_by_domain(NameDomain::Item) .clone() .map_err(|err| PathResolutionError::new(err, self.idx))?; From 0816b5079a25e49a5368511ded2dc41a0ea06c07 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 00:10:55 +0200 Subject: [PATCH 203/678] Implement `EarlyPathVisitor` --- .../src/name_resolution/diagnostics.rs | 53 ++++ .../src/name_resolution/import_resolver.rs | 4 +- .../hir-analysis/src/name_resolution/mod.rs | 288 ++++++++++++++---- .../src/name_resolution/name_resolver.rs | 41 ++- .../src/name_resolution/path_resolver.rs | 30 +- crates/hir/src/hir_def/item.rs | 23 ++ crates/hir/src/hir_def/path.rs | 12 +- crates/hir/src/hir_def/scope_graph.rs | 17 ++ 8 files changed, 385 insertions(+), 83 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 34e2db78f5..ba7d8e795f 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -10,6 +10,8 @@ use hir::{ use crate::HirAnalysisDb; +use super::NameRes; + #[salsa::accumulator] pub struct NameResolutionDiagAccumulator(pub(super) NameResDiag); @@ -26,6 +28,18 @@ pub enum NameResDiag { /// The resolved name is ambiguous. Ambiguous(DynLazySpan, IdentId, Vec), + + /// The name is found but belongs to a different name domain other than the + /// Type. + ExpectedType(DynLazySpan, IdentId, NameRes), + + /// The name is found but belongs to a different name domain other than the + /// trait. + ExpectedTrait(DynLazySpan, IdentId, NameRes), + + /// The name is found but belongs to a different name domain other than the + /// value. + ExpectedValue(DynLazySpan, IdentId, NameRes), } impl NameResDiag { @@ -60,6 +74,9 @@ impl NameResDiag { Self::NotFound(span, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Invisible(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Ambiguous(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedType(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedTrait(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedValue(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), } } @@ -69,6 +86,9 @@ impl NameResDiag { Self::NotFound(..) => 2, Self::Invisible(..) => 3, Self::Ambiguous(..) => 4, + Self::ExpectedType(..) => 5, + Self::ExpectedTrait(..) => 6, + Self::ExpectedValue(..) => 7, } } @@ -86,6 +106,9 @@ impl NameResDiag { format!("`{}` is not visible", name.data(db),) } Self::Ambiguous(_, name, _) => format!("`{}` is ambiguous", name.data(db)), + Self::ExpectedType(_, _, _) => "expected type item here".to_string(), + Self::ExpectedTrait(_, _, _) => "expected trait item here".to_string(), + Self::ExpectedValue(_, _, _) => "expected value here".to_string(), } } @@ -166,6 +189,36 @@ impl NameResDiag { diags } + + Self::ExpectedType(prim_span, name, res) => { + let res_kind_name = res.kind_name(); + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected type here, found {} `{}`", res_kind_name, name), + prim_span.resolve(db), + )] + } + + Self::ExpectedTrait(prim_span, name, res) => { + let res_kind_name = res.kind_name(); + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected trait here, found {} `{}`", res_kind_name, name), + prim_span.resolve(db), + )] + } + + Self::ExpectedValue(prim_span, name, res) => { + let res_kind_name = res.kind_name(); + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected value here, found {} `{}`", res_kind_name, name), + prim_span.resolve(db), + )] + } } } } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 198379538e..a6b831fceb 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -368,7 +368,7 @@ impl<'db> ImportResolver<'db> { // Filter out irrelevant resolutions if the segment is not the last one. if !i_use.is_base_resolved(self.db) { - bucket.filter_by_domain(NameDomain::Item); + bucket.filter_by_domain(NameDomain::Type); } for err in bucket.errors() { @@ -407,7 +407,7 @@ impl<'db> ImportResolver<'db> { if i_use.is_base_resolved(self.db) { Some(IUseResolution::Full(bucket)) } else { - let res = bucket.res_by_domain(NameDomain::Item).clone().unwrap(); + let res = bucket.pick(NameDomain::Type).clone().unwrap(); let next_i_use = i_use.proceed(res); if next_i_use.is_base_resolved(self.db) { Some(IUseResolution::BasePath(next_i_use)) diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 0ebfbe533d..bd08f801d4 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -5,23 +5,20 @@ mod name_resolver; mod path_resolver; mod visibility_checker; +use either::Either; pub use import_resolver::ResolvedImports; pub use name_resolver::{ - NameBinding, NameDerivation, NameDomain, NameQuery, NameRes, QueryDirective, + NameDerivation, NameDomain, NameQuery, NameRes, QueryDirective, ResBucket, }; use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, hir_def::{ - scope_graph::ScopeId, FieldDefListId, GenericParamListId, IngotId, ItemKind, TopLevelMod, - VariantDefListId, - }, - span::item::{LazyFieldDefListSpan, LazyItemSpan, LazyVariantDefListSpan}, - visitor::{ - walk_field_def_list, walk_generic_param_list, walk_item, walk_variant_def_list, Visitor, - VisitorCtxt, + scope_graph::ScopeId, Expr, FieldDefListId, GenericParamListId, IngotId, ItemKind, Pat, + PathId, TopLevelMod, TypeBound, TypeId, VariantDefListId, }, + visitor::prelude::*, }; use rustc_hash::FxHashSet; @@ -31,6 +28,7 @@ use self::{ diagnostics::{NameResDiag, NameResolutionDiagAccumulator}, import_resolver::DefaultImporter, name_resolver::{NameResolutionError, ResolvedQueryCacheStore}, + path_resolver::{EarlyPathResolver, EarlyResolvedPath}, }; pub struct ImportAnalysisPass<'db> { @@ -72,7 +70,7 @@ impl<'db> ModuleAnalysisPass for DefConflictAnalysisPass<'db> { let errors = resolve_path_early::accumulated::(self.db, top_mod); - // TODO: Impl collector. + // TODO: `ImplCollector`. errors .into_iter() .filter_map(|err| matches!(err, NameResDiag::Conflict(..)).then(|| Box::new(err) as _)) @@ -92,10 +90,10 @@ pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> Resolve } /// Performs early path resolution and cache the resolutions for paths appeared -/// in the given module. Also checks the conflict of the item definitions +/// in the given module. Also checks the conflict of the item definitions. /// -/// NOTE: This method doesn't check the conflict in impl blocks since it -/// requires ingot granularity analysis. +/// NOTE: This method doesn't check the conflict in impl/impl-trait blocks since +/// it requires ingot granularity analysis. #[salsa::tracked(return_ref)] #[allow(unused)] pub(crate) fn resolve_path_early( @@ -103,27 +101,30 @@ pub(crate) fn resolve_path_early( top_mod: TopLevelMod, ) -> ResolvedQueryCacheStore { let importer = DefaultImporter; - let mut resolver = PathResolver::new(db, &importer); - resolver.resolve_all(top_mod); + let mut visitor = EarlyPathVisitor::new(db, &importer); + + let mut ctxt = VisitorCtxt::with_item(db.as_hir_db(), top_mod.into()); + visitor.visit_item(&mut ctxt, top_mod.into()); - for diag in resolver.diags { + for diag in visitor.diags { NameResolutionDiagAccumulator::push(db, diag); } - resolver.inner.into_cache_store() + visitor.inner.into_cache_store() } -struct PathResolver<'db, 'a> { +struct EarlyPathVisitor<'db, 'a> { db: &'db dyn HirAnalysisDb, inner: name_resolver::NameResolver<'db, 'a>, diags: Vec, item_stack: Vec, + path_ctxt: Vec, /// The set of scopes that have already been conflicted to avoid duplicate /// diagnostics. already_conflicted: FxHashSet, } -impl<'db, 'a> PathResolver<'db, 'a> { +impl<'db, 'a> EarlyPathVisitor<'db, 'a> { fn new(db: &'db dyn HirAnalysisDb, importer: &'a DefaultImporter) -> Self { let resolver = name_resolver::NameResolver::new(db, importer); Self { @@ -131,41 +132,51 @@ impl<'db, 'a> PathResolver<'db, 'a> { inner: resolver, diags: Vec::new(), item_stack: Vec::new(), + path_ctxt: Vec::new(), already_conflicted: FxHashSet::default(), } } - fn resolve_all(&mut self, top_mod: TopLevelMod) { - let mut ctxt = VisitorCtxt::with_item(self.db.as_hir_db(), top_mod.into()); - self.visit_item(&mut ctxt, top_mod.into()); - } - - fn check_item_conflict(&mut self, item: ItemKind) { - let scope = ScopeId::from_item(item); - self.check_conflict(scope); - } - - fn check_field_conflict(&mut self, fields: FieldDefListId) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..fields.data(self.db.as_hir_db()).len() { - let scope = ScopeId::Field(parent_item, i); - self.check_conflict(scope); - } - } + fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: ResBucket) { + debug_assert!(!bucket.is_empty()); + + let path_kind = self.path_ctxt.last().unwrap(); + let last_seg_idx = path.segment_len(self.db.as_hir_db()) - 1; + let last_seg_ident = *path.segments(self.db.as_hir_db())[last_seg_idx].unwrap(); + let span = span.segment(last_seg_idx).into(); + + match path_kind.pick(self.db, bucket) { + // The path exists and belongs to the expected kind. + Either::Left(res) => { + if !res.is_visible( + self.db, + ScopeId::from_item(*self.item_stack.last().unwrap()), + ) { + self.diags.push(NameResDiag::invisible( + span, + last_seg_ident, + res.derived_from(self.db), + )); + } + } - fn check_variant_conflict(&mut self, variants: VariantDefListId) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..variants.data(self.db.as_hir_db()).len() { - let scope = ScopeId::Variant(parent_item, i); - self.check_conflict(scope); - } - } - - fn check_generic_param_conflict(&mut self, params: GenericParamListId) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..params.data(self.db.as_hir_db()).len() { - let scope = ScopeId::GenericParam(parent_item, i); - self.check_conflict(scope); + // The path exists but doesn't belong to the expected kind. + Either::Right(res) => match path_kind { + ExpectedPathKind::Type => { + self.diags + .push(NameResDiag::ExpectedType(span, last_seg_ident, res)); + } + + ExpectedPathKind::Trait => { + self.diags + .push(NameResDiag::ExpectedTrait(span, last_seg_ident, res)); + } + + ExpectedPathKind::Value => { + self.diags + .push(NameResDiag::ExpectedValue(span, last_seg_ident, res)); + } + }, } } @@ -180,7 +191,7 @@ impl<'db, 'a> PathResolver<'db, 'a> { let domain = NameDomain::from_scope(scope); let binding = self.inner.resolve_query(query); - match binding.res_by_domain(domain) { + match binding.pick(domain) { Ok(_) => {} Err(NameResolutionError::Ambiguous(cands)) => { @@ -199,6 +210,7 @@ impl<'db, 'a> PathResolver<'db, 'a> { ); self.diags.push(diag); } + Err(_) => unreachable!(), }; } @@ -213,31 +225,71 @@ impl<'db, 'a> PathResolver<'db, 'a> { } } -impl<'db, 'a> Visitor for PathResolver<'db, 'a> { +impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { - self.check_item_conflict(item); + // We don't need to check use statements for conflicts because they are + // already checked in import resolution. + if matches!(item, ItemKind::Use(_)) { + return; + } + + let scope = ScopeId::from_item(item); + // We don't need to check impl/impl-trait blocks for conflicts because they + // needs ingot granularity analysis, the conflict checks for them is done by the + // `ImplCollector`. + if !matches!(item, ItemKind::Impl(_) | ItemKind::ImplTrait(_)) { + self.check_conflict(scope); + } self.item_stack.push(item); + if matches!(item, ItemKind::Body(_)) { + self.path_ctxt.push(ExpectedPathKind::Value); + } else { + self.path_ctxt.push(ExpectedPathKind::Type); + } + walk_item(self, ctxt, item); + self.item_stack.pop(); + self.path_ctxt.pop(); + } + + fn visit_type_bound( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyTypeBoundSpan>, + bound: &TypeBound, + ) { + self.path_ctxt.push(ExpectedPathKind::Trait); + walk_type_bound(self, ctxt, bound); + self.path_ctxt.pop(); } fn visit_field_def_list( &mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefListSpan>, - field: FieldDefListId, + fields: FieldDefListId, ) { - self.check_field_conflict(field); - walk_field_def_list(self, ctxt, field); + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..fields.data(self.db.as_hir_db()).len() { + let scope = ScopeId::Field(parent_item, i); + self.check_conflict(scope); + } + + walk_field_def_list(self, ctxt, fields); } fn visit_variant_def_list( &mut self, ctxt: &mut VisitorCtxt<'_, LazyVariantDefListSpan>, - variant: VariantDefListId, + variants: VariantDefListId, ) { - self.check_variant_conflict(variant); - walk_variant_def_list(self, ctxt, variant); + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..variants.data(self.db.as_hir_db()).len() { + let scope = ScopeId::Variant(parent_item, i); + self.check_conflict(scope); + } + + walk_variant_def_list(self, ctxt, variants); } fn visit_generic_param_list( @@ -245,7 +297,127 @@ impl<'db, 'a> Visitor for PathResolver<'db, 'a> { ctxt: &mut VisitorCtxt<'_, hir::span::params::LazyGenericParamListSpan>, params: GenericParamListId, ) { - self.check_generic_param_conflict(params); + let parent_item = *self.item_stack.last().unwrap(); + for i in 0..params.data(self.db.as_hir_db()).len() { + let scope = ScopeId::GenericParam(parent_item, i); + self.check_conflict(scope); + } + walk_generic_param_list(self, ctxt, params); } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { + self.path_ctxt.push(ExpectedPathKind::Type); + walk_ty(self, ctxt, ty); + self.path_ctxt.pop(); + } + + fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { + if matches!(pat, Pat::Record { .. }) { + self.path_ctxt.push(ExpectedPathKind::Type); + } else { + self.path_ctxt.push(ExpectedPathKind::Value); + } + walk_pat(self, ctxt, pat); + self.path_ctxt.pop(); + } + + fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { + if matches!(expr, Expr::RecordInit { .. }) { + self.path_ctxt.push(ExpectedPathKind::Type); + } else { + self.path_ctxt.push(ExpectedPathKind::Value); + } + walk_expr(self, ctxt, expr); + self.path_ctxt.pop(); + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { + let scope = ScopeId::from_item(self.item_stack.last().copied().unwrap()); + let dummy_cache_store = ResolvedQueryCacheStore::no_cache(); + + let mut resolver = EarlyPathResolver::new(self.db, &mut self.inner, &dummy_cache_store); + let resolved_path = match resolver.resolve_path(path, scope) { + Ok(bucket) => bucket, + + Err(err) => { + let failed_at = err.failed_at; + let span = ctxt.span().unwrap().segment(failed_at); + let ident = path.segments(self.db.as_hir_db())[failed_at]; + let diag = match err.kind { + NameResolutionError::NotFound => { + NameResDiag::not_found(span.into(), *ident.unwrap()) + } + + NameResolutionError::Invalid => { + return; + } + + NameResolutionError::Invisible(_) => { + unreachable!("`EarlyPathResolver doesn't check visibility"); + } + + NameResolutionError::Ambiguous(cands) => NameResDiag::ambiguous( + span.into(), + *ident.unwrap(), + cands + .into_iter() + .filter_map(|res| res.derived_from(self.db)) + .collect(), + ), + }; + + self.diags.push(diag); + return; + } + }; + + if let Some((idx, res)) = resolved_path.find_invisible_segment(self.db) { + let span = ctxt.span().unwrap().segment(idx); + let ident = path.segments(self.db.as_hir_db())[idx].unwrap(); + let diag = NameResDiag::invisible(span.into(), *ident, res.derived_from(self.db)); + self.diags.push(diag); + return; + } + + let EarlyResolvedPath::Full(bucket) = resolved_path.resolved else { + return; + }; + self.verify_path(path, ctxt.span().unwrap(), bucket); + } +} + +#[derive(Debug, Clone, Copy)] +enum ExpectedPathKind { + Type, + Trait, + Value, +} + +impl ExpectedPathKind { + fn domain(self) -> NameDomain { + match self { + ExpectedPathKind::Type => NameDomain::Type, + ExpectedPathKind::Trait => NameDomain::Type, + ExpectedPathKind::Value => NameDomain::Value, + } + } + + fn pick(self, db: &dyn HirAnalysisDb, bucket: ResBucket) -> Either { + debug_assert!(!bucket.is_empty()); + + let res = match bucket.pick(self.domain()).as_ref().ok() { + Some(res) => res.clone(), + None => { + return Either::Right(bucket.into_iter().find_map(|res| res.ok()).unwrap()); + } + }; + + match self { + Self::Type if !res.is_type(db) => Either::Right(res), + Self::Trait if !res.is_trait(db) => Either::Right(res), + Self::Value if !res.is_value(db) => Either::Right(res), + _ => Either::Left(res), + } + } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 82edb1cdf5..5dab010156 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -138,7 +138,7 @@ impl ResBucket { } /// Returns the resolution of the given `domain`. - pub fn res_by_domain(&self, domain: NameDomain) -> &NameResolutionResult { + pub fn pick(&self, domain: NameDomain) -> &NameResolutionResult { self.bucket .get(&domain) .unwrap_or(&Err(NameResolutionError::NotFound)) @@ -249,6 +249,17 @@ impl NameRes { } } + pub fn is_trait(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => scope.is_trait(db.as_hir_db()), + } + } + + pub fn is_value(&self, db: &dyn HirAnalysisDb) -> bool { + !self.is_type(db) && !self.is_trait(db) + } + /// Returns the scope of the name resolution if the name is not a builtin /// type. pub fn scope(&self) -> Option { @@ -329,11 +340,18 @@ impl NameRes { } } + pub(super) fn kind_name(&self) -> &'static str { + match self.kind { + NameResKind::Scope(scope) => scope.kind_name(), + NameResKind::Prim(_) => "type", + } + } + fn new_prim(prim: PrimTy) -> Self { Self { kind: prim.into(), derivation: NameDerivation::Prim, - domain: NameDomain::Item, + domain: NameDomain::Type, } } } @@ -352,10 +370,10 @@ impl NameResKind { } } - pub fn name(self, db: &dyn HirAnalysisDb) -> Option { + pub fn name(self, db: &dyn HirAnalysisDb) -> IdentId { match self { - NameResKind::Scope(scope) => scope.name(db.as_hir_db()), - NameResKind::Prim(prim) => prim.name().into(), + NameResKind::Scope(scope) => scope.name(db.as_hir_db()).unwrap(), + NameResKind::Prim(prim) => prim.name(), } } } @@ -524,7 +542,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // guaranteed to be unique. bucket.push(&NameRes::new_from_scope( ScopeId::root(*root_mod), - NameDomain::Item, + NameDomain::Type, NameDerivation::External, )) } @@ -718,6 +736,13 @@ impl ResolvedQueryCacheStore { self.cache.get(&query) } + pub(super) fn no_cache() -> Self { + Self { + cache: FxHashMap::default(), + no_cache: true, + } + } + fn cache_result(&mut self, query: NameQuery, result: ResBucket) { if self.no_cache { return; @@ -744,7 +769,7 @@ impl ResolvedQueryCacheStore { pub enum NameDomain { /// The domain is associated with all items except for items that belongs to /// the `Value` domain. - Item = 0b1, + Type = 0b1, /// The domain is associated with a local variable and items that are /// guaranteed not to have associated names. e.g., `fn`, `const` or enum /// variables. @@ -759,7 +784,7 @@ impl NameDomain { ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FuncParam(..) => { Self::Value } - ScopeId::Item(_) | ScopeId::GenericParam(..) => Self::Item, + ScopeId::Item(_) | ScopeId::GenericParam(..) => Self::Type, ScopeId::Field(..) => Self::Field, ScopeId::Variant(..) => Self::Value, } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index a13ccd2694..1bc5e15cf9 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -35,20 +35,22 @@ pub(super) struct EarlyResolvedPathWithTrajectory { } impl EarlyResolvedPathWithTrajectory { - pub(super) fn check_trajectory_visibility( + pub(super) fn find_invisible_segment( &self, db: &dyn HirAnalysisDb, - ) -> PathResolutionResult<()> { + ) -> Option<(usize, &NameRes)> { let original_scope = self.trajectory.first().unwrap().scope().unwrap(); for (i, res) in self.trajectory[1..].iter().enumerate() { if !res.is_visible(db, original_scope) { - return Err(PathResolutionError::new( - NameResolutionError::Invisible(res.derived_from(db)), - i, - )); + return Some((i, res)); } } - Ok(()) + + None + } + + pub(super) fn resolved_at(&self, index: usize) -> &NameRes { + &self.trajectory[index] } } @@ -66,17 +68,17 @@ impl PathResolutionError { } } -pub(super) struct EarlyPathResolver<'db, 'a, 'b> { +pub(super) struct EarlyPathResolver<'db, 'a, 'b, 'c> { db: &'db dyn HirAnalysisDb, - name_resolver: &'a mut NameResolver<'db, 'a>, - cache_store: &'b ResolvedQueryCacheStore, + name_resolver: &'a mut NameResolver<'db, 'b>, + cache_store: &'c ResolvedQueryCacheStore, } -impl<'db, 'a, 'b> EarlyPathResolver<'db, 'a, 'b> { +impl<'db, 'a, 'b, 'c> EarlyPathResolver<'db, 'a, 'b, 'c> { pub(super) fn new( db: &'db dyn HirAnalysisDb, - name_resolver: &'a mut NameResolver<'db, 'a>, - cache_store: &'b ResolvedQueryCacheStore, + name_resolver: &'a mut NameResolver<'db, 'b>, + cache_store: &'c ResolvedQueryCacheStore, ) -> Self { Self { db, @@ -213,7 +215,7 @@ impl<'a> IntermediatePath<'a> { fn proceed(&mut self, bucket: ResBucket) -> PathResolutionResult<()> { let next_res = bucket - .res_by_domain(NameDomain::Item) + .pick(NameDomain::Type) .clone() .map_err(|err| PathResolutionError::new(err, self.idx))?; diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 0c398f5054..23f6d98009 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -78,6 +78,25 @@ impl ItemKind { } } + pub fn kind_name(self) -> &'static str { + use ItemKind::*; + match self { + TopMod(_) => "module", + Mod(_) => "module", + Func(_) => "function", + Struct(_) => "struct", + Contract(_) => "contract", + Enum(_) => "enum", + TypeAlias(_) => "type alias", + Trait(_) => "trait", + Impl(_) => "impl", + ImplTrait(_) => "impl trait", + Const(_) => "const", + Use(_) => "use", + Body(_) => "expression body", + } + } + pub fn name_span(self) -> Option { use ItemKind::*; match self { @@ -139,6 +158,10 @@ impl ItemKind { Self::Struct(_) | Self::Enum(_) | Self::Contract(_) | Self::TypeAlias(_) ) } + + pub fn is_trait(self) -> bool { + matches!(self, Self::Trait(_)) + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index e66f6bcaf2..c00bab1556 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -1,4 +1,4 @@ -use crate::hir_def::Partial; +use crate::{hir_def::Partial, HirDb}; use super::IdentId; @@ -7,3 +7,13 @@ pub struct PathId { #[return_ref] pub segments: Vec>, } + +impl PathId { + pub fn last_segment(self, db: &dyn HirDb) -> Partial { + self.segments(db).last().copied().unwrap_or_default() + } + + pub fn segment_len(self, db: &dyn HirDb) -> usize { + self.segments(db).len() + } +} diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 4bdba58e13..bb6c9ca7e9 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -64,6 +64,16 @@ impl ScopeId { } } + pub fn kind_name(&self) -> &'static str { + match self { + ScopeId::Item(item) => item.kind_name(), + ScopeId::GenericParam(_, _) => "type", + ScopeId::FuncParam(_, _) => "value", + ScopeId::Field(_, _) => "field", + ScopeId::Variant(_, _) => "value", + } + } + pub fn from_item(item: ItemKind) -> Self { Self::Item(item) } @@ -176,6 +186,13 @@ impl ScopeId { } } + pub fn is_trait(self, db: &dyn HirDb) -> bool { + match self.data(db).id { + ScopeId::Item(item) => item.is_trait(), + _ => false, + } + } + pub fn name(self, db: &dyn HirDb) -> Option { match self.data(db).id { ScopeId::Item(item) => item.name(db), From 417d9363f48986b626280121d572701ab38ad05f Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 00:41:54 +0200 Subject: [PATCH 204/678] Implement `PathAnalysisPass` --- crates/driver2/src/lib.rs | 3 +- crates/hir-analysis/src/lib.rs | 1 + .../src/name_resolution/diagnostics.rs | 66 +++++++++++------- .../src/name_resolution/import_resolver.rs | 6 +- .../hir-analysis/src/name_resolution/mod.rs | 68 +++++++++++++++---- .../src/name_resolution/path_resolver.rs | 14 ++-- crates/hir/src/hir_def/item.rs | 1 + crates/hir/src/lower/item.rs | 3 +- crates/hir/src/visitor.rs | 9 +++ 9 files changed, 123 insertions(+), 48 deletions(-) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index a5af12f653..a359974484 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -16,7 +16,7 @@ use hir::{ HirDb, LowerHirDb, ParsingPass, SpannedHirDb, }; use hir_analysis::{ - name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass}, + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; @@ -141,6 +141,7 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { let mut pass_manager = AnalysisPassManager::new(); pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 47084cd828..fc14e8c961 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -6,6 +6,7 @@ pub struct Jar( name_resolution::resolve_path_early, name_resolution::resolve_imports, name_resolution::diagnostics::NameResolutionDiagAccumulator, + name_resolution::diagnostics::ImportResolutionDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index ba7d8e795f..0e5d6051f4 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -15,6 +15,9 @@ use super::NameRes; #[salsa::accumulator] pub struct NameResolutionDiagAccumulator(pub(super) NameResDiag); +#[salsa::accumulator] +pub struct ImportResolutionDiagAccumulator(pub(super) NameResDiag); + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum NameResDiag { /// The definition conflicts with other definitions. @@ -43,27 +46,7 @@ pub enum NameResDiag { } impl NameResDiag { - pub fn conflict(name: IdentId, conflict_with: Vec) -> Self { - Self::Conflict(name, conflict_with) - } - - pub fn not_found(span: DynLazySpan, ident: IdentId) -> Self { - Self::NotFound(span, ident) - } - - pub fn invisible( - span: DynLazySpan, - name: IdentId, - invisible_span: Option, - ) -> Self { - Self::Invisible(span, name, invisible_span) - } - - pub fn ambiguous(span: DynLazySpan, ident: IdentId, candidates: Vec) -> Self { - Self::Ambiguous(span, ident, candidates) - } - - // Returns the top-level module where the diagnostic is located. + /// Returns the top-level module where the diagnostic is located. pub fn top_mod(&self, db: &dyn HirAnalysisDb) -> TopLevelMod { match self { Self::Conflict(_, conflicts) => conflicts @@ -80,6 +63,35 @@ impl NameResDiag { } } + pub(super) fn conflict(name: IdentId, conflict_with: Vec) -> Self { + Self::Conflict(name, conflict_with) + } + + pub(super) fn not_found(span: DynLazySpan, ident: IdentId) -> Self { + Self::NotFound(span, ident) + } + + pub(super) fn invisible( + span: DynLazySpan, + name: IdentId, + invisible_span: Option, + ) -> Self { + Self::Invisible(span, name, invisible_span) + } + + pub(super) fn ambiguous( + db: &dyn HirAnalysisDb, + span: DynLazySpan, + ident: IdentId, + cands: Vec, + ) -> Self { + let cands = cands + .into_iter() + .filter_map(|name| name.kind.name_span(db)) + .collect(); + Self::Ambiguous(span, ident, cands) + } + fn local_code(&self) -> u16 { match self { Self::Conflict(..) => 1, @@ -195,7 +207,7 @@ impl NameResDiag { let name = name.data(db.as_hir_db()); vec![SubDiagnostic::new( LabelStyle::Primary, - format!("expected type here, found {} `{}`", res_kind_name, name), + format!("expected type here, but found {} `{}`", res_kind_name, name), prim_span.resolve(db), )] } @@ -205,7 +217,10 @@ impl NameResDiag { let name = name.data(db.as_hir_db()); vec![SubDiagnostic::new( LabelStyle::Primary, - format!("expected trait here, found {} `{}`", res_kind_name, name), + format!( + "expected trait here, but found {} `{}`", + res_kind_name, name + ), prim_span.resolve(db), )] } @@ -215,7 +230,10 @@ impl NameResDiag { let name = name.data(db.as_hir_db()); vec![SubDiagnostic::new( LabelStyle::Primary, - format!("expected value here, found {} `{}`", res_kind_name, name), + format!( + "expected value here, but found {} `{}`", + res_kind_name, name + ), prim_span.resolve(db), )] } diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index a6b831fceb..d2ba2101b4 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -551,12 +551,10 @@ impl<'db> ImportResolver<'db> { NameResolutionError::Ambiguous(cands) => { self.accumulated_errors.push(NameResDiag::ambiguous( + self.db, i_use.current_segment_span(), i_use.current_segment_ident(self.db).unwrap(), - cands - .into_iter() - .filter_map(|name| name.kind.name_span(self.db)) - .collect(), + cands, )); } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index bd08f801d4..724e263a26 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -25,7 +25,7 @@ use rustc_hash::FxHashSet; use crate::HirAnalysisDb; use self::{ - diagnostics::{NameResDiag, NameResolutionDiagAccumulator}, + diagnostics::{ImportResolutionDiagAccumulator, NameResDiag, NameResolutionDiagAccumulator}, import_resolver::DefaultImporter, name_resolver::{NameResolutionError, ResolvedQueryCacheStore}, path_resolver::{EarlyPathResolver, EarlyResolvedPath}, @@ -48,13 +48,37 @@ impl<'db> ImportAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for ImportAnalysisPass<'db> { fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { let ingot = top_mod.ingot(self.db.as_hir_db()); - resolve_imports::accumulated::(self.db, ingot) + resolve_imports::accumulated::(self.db, ingot) .into_iter() .filter_map(|diag| (diag.top_mod(self.db) == top_mod).then(|| Box::new(diag) as _)) .collect() } } +pub struct PathAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> PathAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass for PathAnalysisPass<'db> { + fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { + let errors = + resolve_path_early::accumulated::(self.db, top_mod); + + errors + .into_iter() + .filter_map(|err| { + (!matches!(err, NameResDiag::Conflict(..))).then(|| Box::new(err) as _) + }) + .collect() + } +} + pub struct DefConflictAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } @@ -83,7 +107,7 @@ pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> Resolve let resolver = import_resolver::ImportResolver::new(db, ingot); let (imports, diags) = resolver.resolve_imports(); for diag in diags { - NameResolutionDiagAccumulator::push(db, diag); + ImportResolutionDiagAccumulator::push(db, diag); } imports @@ -138,13 +162,35 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { } fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: ResBucket) { - debug_assert!(!bucket.is_empty()); - let path_kind = self.path_ctxt.last().unwrap(); let last_seg_idx = path.segment_len(self.db.as_hir_db()) - 1; let last_seg_ident = *path.segments(self.db.as_hir_db())[last_seg_idx].unwrap(); let span = span.segment(last_seg_idx).into(); + if bucket.is_empty() { + let Err(err) = bucket.pick(path_kind.domain()) else { + unreachable!() + }; + + match err { + NameResolutionError::NotFound => { + self.diags + .push(NameResDiag::not_found(span, last_seg_ident)); + } + NameResolutionError::Ambiguous(cands) => { + self.diags.push(NameResDiag::ambiguous( + self.db, + span, + last_seg_ident, + cands.clone(), + )); + } + _ => {} + }; + + return; + } + match path_kind.pick(self.db, bucket) { // The path exists and belongs to the expected kind. Either::Left(res) => { @@ -344,6 +390,7 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { let failed_at = err.failed_at; let span = ctxt.span().unwrap().segment(failed_at); let ident = path.segments(self.db.as_hir_db())[failed_at]; + let diag = match err.kind { NameResolutionError::NotFound => { NameResDiag::not_found(span.into(), *ident.unwrap()) @@ -357,14 +404,9 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { unreachable!("`EarlyPathResolver doesn't check visibility"); } - NameResolutionError::Ambiguous(cands) => NameResDiag::ambiguous( - span.into(), - *ident.unwrap(), - cands - .into_iter() - .filter_map(|res| res.derived_from(self.db)) - .collect(), - ), + NameResolutionError::Ambiguous(cands) => { + NameResDiag::ambiguous(self.db, span.into(), *ident.unwrap(), cands) + } }; self.diags.push(diag); diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 1bc5e15cf9..56122a43ea 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -191,13 +191,17 @@ impl<'a> IntermediatePath<'a> { fn finalize_as_partial(self) -> EarlyResolvedPathWithTrajectory { let resolved = EarlyResolvedPath::Partial { - resolved: self.current_res, + resolved: self.current_res.clone(), unresolved_from: self.idx, }; + let mut trajectory = self.trajectory; + let current_res = self.current_res; + trajectory.push(current_res); + EarlyResolvedPathWithTrajectory { resolved, - trajectory: self.trajectory, + trajectory: trajectory, } } @@ -228,10 +232,10 @@ impl<'a> IntermediatePath<'a> { fn state(&self, db: &dyn HirAnalysisDb) -> IntermediatePathState { debug_assert!(self.idx < self.path.len()); - if self.current_res.is_type(db) { + if self.idx == self.path.len() - 1 { + return IntermediatePathState::ReadyToFinalize; + } else if self.current_res.is_type(db) { IntermediatePathState::TypeDependent - } else if self.idx == self.path.len() - 1 { - IntermediatePathState::ReadyToFinalize } else { IntermediatePathState::Unresolved } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 23f6d98009..ac1d729461 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -504,6 +504,7 @@ pub struct Const { id: TrackedItemId, pub name: Partial, + pub ty: Partial, pub body: Partial, pub vis: Visibility, pub top_mod: TopLevelMod, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index c30e313c39..3c060f3f81 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -387,6 +387,7 @@ impl Const { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = TrackedItemId::Const(name).join(parent_id); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let body = ast .value() .map(|ast| Body::lower_ast(ctxt, id.clone(), ast)) @@ -394,7 +395,7 @@ impl Const { let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let origin = HirOrigin::raw(&ast); - let const_ = Self::new(ctxt.db(), id, name, body, vis, ctxt.top_mod(), origin); + let const_ = Self::new(ctxt.db(), id, name, ty, body, vis, ctxt.top_mod(), origin); ctxt.leave_scope(const_) } } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 8ee9a42b11..1f18106ccb 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -767,6 +767,15 @@ where ) } + if let Some(ty) = const_.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + if let Some(body) = const_.body(ctxt.db).to_opt() { visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); } From 2455f16fae691d50ab9370d5e2036c282d2d7f25 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 01:12:17 +0200 Subject: [PATCH 205/678] Add ui-tests for PathAnalysis --- .../src/name_resolution/path_resolver.rs | 4 +- crates/hir/src/span/transition.rs | 2 + .../name_resolution/conflict_generics.snap | 12 ++++- .../name_resolution/import_conflict.snap | 10 +--- .../name_resolution/path_invalid_domain.fe | 23 +++++++++ .../name_resolution/path_invalid_domain.snap | 48 +++++++++++++++++++ .../name_resolution/path_missing_generics.fe | 11 +++++ .../path_missing_generics.snap | 18 +++++++ .../fixtures/name_resolution/path_shadow.fe | 7 +++ .../fixtures/name_resolution/path_shadow.snap | 12 +++++ 10 files changed, 135 insertions(+), 12 deletions(-) create mode 100644 crates/uitest/fixtures/name_resolution/path_invalid_domain.fe create mode 100644 crates/uitest/fixtures/name_resolution/path_invalid_domain.snap create mode 100644 crates/uitest/fixtures/name_resolution/path_missing_generics.fe create mode 100644 crates/uitest/fixtures/name_resolution/path_missing_generics.snap create mode 100644 crates/uitest/fixtures/name_resolution/path_shadow.fe create mode 100644 crates/uitest/fixtures/name_resolution/path_shadow.snap diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 56122a43ea..5b09bb8c3c 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -201,7 +201,7 @@ impl<'a> IntermediatePath<'a> { EarlyResolvedPathWithTrajectory { resolved, - trajectory: trajectory, + trajectory, } } @@ -233,7 +233,7 @@ impl<'a> IntermediatePath<'a> { debug_assert!(self.idx < self.path.len()); if self.idx == self.path.len() - 1 { - return IntermediatePathState::ReadyToFinalize; + IntermediatePathState::ReadyToFinalize } else if self.current_res.is_type(db) { IntermediatePathState::TypeDependent } else { diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 028f25520e..030976909a 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -107,6 +107,7 @@ pub(crate) enum ChainRoot { Pat(PatRoot), } +#[derive(Debug, Clone)] pub(crate) struct ResolvedOrigin { pub(crate) file: InputFile, pub(crate) kind: ResolvedOriginKind, @@ -173,6 +174,7 @@ impl ResolvedOrigin { } } +#[derive(Debug, Clone)] pub(crate) enum ResolvedOriginKind { Node(SyntaxNode), Token(SyntaxToken), diff --git a/crates/uitest/fixtures/name_resolution/conflict_generics.snap b/crates/uitest/fixtures/name_resolution/conflict_generics.snap index c0cc9d2791..c2949fd85c 100644 --- a/crates/uitest/fixtures/name_resolution/conflict_generics.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.snap @@ -1,5 +1,5 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/conflict_generics.fe --- @@ -11,4 +11,14 @@ error[2-0001]: `T` conflicts with other definitions │ │ │ `T` is defined here +error[2-0004]: `T` is ambiguous + ┌─ conflict_generics.fe:2:8 + │ +1 │ pub struct MyS { + │ - - candidate `#1` + │ │ + │ candidate `#0` +2 │ x: T + │ ^ `T` is ambiguous + diff --git a/crates/uitest/fixtures/name_resolution/import_conflict.snap b/crates/uitest/fixtures/name_resolution/import_conflict.snap index 8895a26b14..58d1d0f5d2 100644 --- a/crates/uitest/fixtures/name_resolution/import_conflict.snap +++ b/crates/uitest/fixtures/name_resolution/import_conflict.snap @@ -1,5 +1,5 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_conflict.fe --- @@ -11,12 +11,4 @@ error[2-0001]: `S` conflicts with other definitions 2 │ use foo2::S │ - `S` is redefined here -error[2-0001]: `S` conflicts with other definitions - ┌─ import_conflict.fe:1:11 - │ -1 │ use foo1::S - │ ^ `S` is defined here -2 │ use foo2::S - │ - `S` is redefined here - diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe new file mode 100644 index 0000000000..2e7fe61da7 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe @@ -0,0 +1,23 @@ +pub const MyC: i32 = 1 + +pub enum MyE { + Var +} + +pub trait MyT {} +pub trait MyTWithGenerics {} + +use MyE::Var + +pub enum MyE2 +where T: MyE + U: MyTWithGenerics +{ + Variant(MyC) + Variant2(Var) +} + +pub fn foo() { + let x: MyT = MyT + let MyE::Var{ x } = MyE::Var +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap new file mode 100644 index 0000000000..951651a041 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap @@ -0,0 +1,48 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_invalid_domain.fe +--- +error[2-0005]: expected type item here + ┌─ path_invalid_domain.fe:14:26 + │ +14 │ U: MyTWithGenerics + │ ^^^ expected type here, but found trait `MyT` + +error[2-0005]: expected type item here + ┌─ path_invalid_domain.fe:16:13 + │ +16 │ Variant(MyC) + │ ^^^ expected type here, but found const `MyC` + +error[2-0005]: expected type item here + ┌─ path_invalid_domain.fe:17:14 + │ +17 │ Variant2(Var) + │ ^^^ expected type here, but found value `Var` + +error[2-0005]: expected type item here + ┌─ path_invalid_domain.fe:21:12 + │ +21 │ let x: MyT = MyT + │ ^^^ expected type here, but found trait `MyT` + +error[2-0005]: expected type item here + ┌─ path_invalid_domain.fe:22:14 + │ +22 │ let MyE::Var{ x } = MyE::Var + │ ^^^ expected type here, but found value `Var` + +error[2-0006]: expected trait item here + ┌─ path_invalid_domain.fe:13:10 + │ +13 │ where T: MyE + │ ^^^ expected trait here, but found enum `MyE` + +error[2-0007]: expected value here + ┌─ path_invalid_domain.fe:21:18 + │ +21 │ let x: MyT = MyT + │ ^^^ expected value here, but found trait `MyT` + + diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe new file mode 100644 index 0000000000..44726226c0 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe @@ -0,0 +1,11 @@ +pub trait Trait {} + +pub struct MyS + where T: Trait + U: Trait + Z: Trait +{ + t: T + u: U + z: Z +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.snap b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap new file mode 100644 index 0000000000..254ff84a8e --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_missing_generics.fe +--- +error[2-0002]: `Z` is not found + ┌─ path_missing_generics.fe:6:11 + │ +6 │ Z: Trait + │ ^ `Z` is not found + +error[2-0002]: `Z` is not found + ┌─ path_missing_generics.fe:10:8 + │ +10 │ z: Z + │ ^ `Z` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.fe b/crates/uitest/fixtures/name_resolution/path_shadow.fe new file mode 100644 index 0000000000..70d9c6124a --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_shadow.fe @@ -0,0 +1,7 @@ +pub trait T {} +pub struct MyS + where U: T +{ + t: T + u: U +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.snap b/crates/uitest/fixtures/name_resolution/path_shadow.snap new file mode 100644 index 0000000000..8c664920ff --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_shadow.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_shadow.fe +--- +error[2-0006]: expected trait item here + ┌─ path_shadow.fe:3:14 + │ +3 │ where U: T + │ ^ expected trait here, but found type `T` + + From 3e7605dd52051fece919555a243f8ea5775c7cdc Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 14:42:52 +0200 Subject: [PATCH 206/678] Fix a bug in visitor's lazy span tracing --- crates/hir-analysis/src/name_resolution/mod.rs | 8 +++++++- crates/hir/src/hir_def/pat.rs | 18 ++++++++++++++++++ crates/hir/src/hir_def/path.rs | 2 +- crates/hir/src/span/transition.rs | 12 ++++++++---- crates/hir/src/visitor.rs | 12 +++++++++--- 5 files changed, 43 insertions(+), 9 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 724e263a26..a45dc04b8f 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -163,7 +163,7 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: ResBucket) { let path_kind = self.path_ctxt.last().unwrap(); - let last_seg_idx = path.segment_len(self.db.as_hir_db()) - 1; + let last_seg_idx = path.len(self.db.as_hir_db()) - 1; let last_seg_ident = *path.segments(self.db.as_hir_db())[last_seg_idx].unwrap(); let span = span.segment(last_seg_idx).into(); @@ -359,6 +359,12 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { } fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { + // We don't need to check bind patterns here, it will be checked in pattern + // match analysis. + if pat.is_bind(self.db.as_hir_db()) { + return; + } + if matches!(pat, Pat::Record { .. }) { self.path_ctxt.push(ExpectedPathKind::Type); } else { diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs index 5839948406..1bf6156fbf 100644 --- a/crates/hir/src/hir_def/pat.rs +++ b/crates/hir/src/hir_def/pat.rs @@ -16,6 +16,16 @@ pub enum Pat { Or(PatId, PatId), } +impl Pat { + /// Return `true` if this pattern is a binding. + pub fn is_bind(&self, db: &dyn HirDb) -> bool { + match self { + Self::Path(Partial::Present(p)) => p.len(db) == 1, + _ => false, + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct PatId(u32); entity_impl!(PatId); @@ -28,6 +38,14 @@ impl PatId { pub fn data(self, db: &dyn HirDb, body: Body) -> &Partial { &body.pats(db)[self] } + + /// Return `true` if this pattern is a binding. + pub fn is_bind(self, db: &dyn HirDb, body: Body) -> bool { + match self.data(db, body) { + Partial::Present(p) => p.is_bind(db), + Partial::Absent => false, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index c00bab1556..fc2444abd2 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -13,7 +13,7 @@ impl PathId { self.segments(db).last().copied().unwrap_or_default() } - pub fn segment_len(self, db: &dyn HirDb) -> usize { + pub fn len(self, db: &dyn HirDb) -> usize { self.segments(db).len() } } diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs index 030976909a..417ffe031d 100644 --- a/crates/hir/src/span/transition.rs +++ b/crates/hir/src/span/transition.rs @@ -48,6 +48,14 @@ pub(crate) struct SpanTransitionChain { } impl SpanTransitionChain { + pub(crate) fn pop_transition(&mut self) { + self.chain.pop(); + } + + pub(crate) fn len(&self) -> usize { + self.chain.len() + } + pub(super) fn new(root: impl Into) -> Self { Self { root: root.into(), @@ -80,10 +88,6 @@ impl SpanTransitionChain { pub(super) fn push(&mut self, transition: LazyTransitionFn) { self.chain.push(transition); } - - pub(crate) fn pop_transition(&mut self) { - self.chain.pop(); - } } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, derive_more::From)] diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 1f18106ccb..9593eb4eed 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1749,9 +1749,13 @@ where F2: FnOnce(&mut VisitorCtxt), U: LazySpan + SpanDowncast + Into, { + let chain_len = self.span.0.as_ref().unwrap().len(); let mut new_ctxt = self.transition(f1); + f2(&mut new_ctxt); - *self = new_ctxt.pop(); + + let n_pop = new_ctxt.span.0.as_ref().unwrap().len() - chain_len; + *self = new_ctxt.pop(n_pop); } fn transition(&mut self, f: F) -> VisitorCtxt<'db, U> @@ -1772,11 +1776,13 @@ where .cast() } - fn pop(mut self) -> VisitorCtxt<'db, U> + fn pop(mut self, n_pop: usize) -> VisitorCtxt<'db, U> where U: LazySpan, { - self.span.0.as_mut().unwrap().pop_transition(); + for _ in 0..n_pop { + self.span.0.as_mut().unwrap().pop_transition(); + } Self { db: self.db, From ff13077a0b332a0931bc21308b1ca5d2a8914119 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 15:18:22 +0200 Subject: [PATCH 207/678] Fix broken intra doc links --- crates/hir/src/diagnostics.rs | 4 ++-- crates/hir/src/hir_def/item.rs | 3 ++- crates/hir/src/hir_def/module_tree.rs | 2 +- crates/hir/src/span/types.rs | 8 ++++---- crates/hir/src/visitor.rs | 5 +++++ 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 5845c33b28..4ec9530393 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -1,7 +1,7 @@ //! This module defines the diagnostics that can be accumulated inside salsa-db //! with span-agnostic forms. All diagnostics accumulated in salsa-db should //! implement [`DiagnosticVoucher`] which defines the conversion into -//! [`CompleteDiagnostics`]. +//! [`CompleteDiagnostic`]. use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; @@ -13,7 +13,7 @@ use crate::SpannedHirDb; /// /// All types that implement `DiagnosticVoucher` must NOT have a span /// information which invalidates cache in salsa-db. Instead of it, the all -/// information is given by [`SpannedHirDB`] to allow evaluating span lazily. +/// information is given by [`SpannedHirDb`] to allow evaluating span lazily. /// /// The reason why we use `DiagnosticVoucher` is that we want to evaluate span /// lazily to avoid invalidating cache in salsa-db. diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index ac1d729461..3d9d8754f8 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -251,7 +251,8 @@ impl TopLevelMod { } /// Returns the top level children of this module. - /// If you need all the children, use [`children_nested`] instead. + /// If you need all the children, use + /// [`children_nested`](Self::children_nested) instead. pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { let s_graph = self.scope_graph(db); let scope = ScopeId::from_item(self.into()); diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs index 2a436fe650..0484dc2c9d 100644 --- a/crates/hir/src/hir_def/module_tree.rs +++ b/crates/hir/src/hir_def/module_tree.rs @@ -10,7 +10,7 @@ use super::{IdentId, IngotId, TopLevelMod}; /// This tree represents the structure of an ingot. /// Internal modules are not included in this tree, instead, they are included -/// in [ModuleItemTree](crate::hir_def::item_tree::ModuleItemTree). +/// in [ScopeGraph](crate::hir_def::scope_graph::ScopeGraph). /// /// This is used in later name resolution phase. /// The tree is file contents agnostic, i.e., **only** depends on project diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 721212a5e3..b71d55f018 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -6,7 +6,7 @@ use super::define_lazy_span_node; define_lazy_span_node!(LazyTySpan); impl LazyTySpan { - /// Convert this [`LazyTypeSpan`] into a [`LazyPathTypeSpan`]. + /// Convert this [`LazyTySpan`] into a [`LazyPathTypeSpan`]. /// /// If the type that is pointed to by this is not a path type, the result /// span will point to the same span of the original type. @@ -14,7 +14,7 @@ impl LazyTySpan { LazyPathTypeSpan(self.0) } - /// Convert this [`LazyTypeSpan`] into a [`LazyPtrTypeSpan`]. + /// Convert this [`LazyTySpan`] into a [`LazyPtrTypeSpan`]. /// /// If the type that is pointed to by this is not a pointer type, the result /// span will point to the same span of the original type. @@ -22,7 +22,7 @@ impl LazyTySpan { LazyPtrTypeSpan(self.0) } - /// Convert this [`LazyTypeSpan`] into a [`LazyTupleTypeSpan`]. + /// Convert this [`LazyTySpan`] into a [`LazyTupleTypeSpan`]. /// /// If the type that is pointed to by this is not a tuple type, the result /// span will point to the same span of the original type. @@ -30,7 +30,7 @@ impl LazyTySpan { LazyTupleTypeSpan(self.0) } - /// convert this [`LazyTypeSpan`] into a [`LazyArrayTypeSpan`]. + /// convert this [`LazyTySpan`] into a [`LazyArrayTypeSpan`]. /// /// If the type that is pointed to by this is not an array type, the result /// span will point to the same span of the original type. diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 9593eb4eed..8ea32a169a 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1721,6 +1721,10 @@ pub fn walk_where_predicate( use attr::{Attr, AttrListId}; +/// [`VisitorCtxt`] is used to track the span information of the current node +/// being visited. +/// The context is updated automatically when entering a new node. Thus, the +/// user need to only construct the context when invoking a visitor. pub struct VisitorCtxt<'db, T> where T: LazySpan, @@ -1816,6 +1820,7 @@ macro_rules! define_ctxt_ctor { $span_ty:ty, $ctor:ident($($ctor_name:ident: $ctor_ty:ty),*)),)*) => { $(impl<'db> VisitorCtxt<'db, $span_ty> { + /// Create a new [`VisitorCtxt`] with the given item as the root of the span chain. pub fn $ctor(db: &'db dyn HirDb, $($ctor_name: $ctor_ty,)*) -> Self { Self { db, From 37d94d16161cd9c027d92cc23d4a3be6bac481ea Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 19:07:33 +0200 Subject: [PATCH 208/678] Add thin wrappers for early path resolution as an API --- crates/hir-analysis/src/lib.rs | 2 +- .../src/name_resolution/import_resolver.rs | 31 +++++--- .../hir-analysis/src/name_resolution/mod.rs | 72 ++++++++++++++++--- .../src/name_resolution/name_resolver.rs | 22 +++--- .../src/name_resolution/path_resolver.rs | 12 ++-- 5 files changed, 103 insertions(+), 36 deletions(-) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index fc14e8c961..7c809f8e62 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -3,7 +3,7 @@ use hir::{span::DynLazySpan, HirDb}; #[salsa::jar(db = HirAnalysisDb)] pub struct Jar( /// Functions for import/name resolutions. - name_resolution::resolve_path_early, + name_resolution::resolve_path_early_impl, name_resolution::resolve_imports, name_resolution::diagnostics::NameResolutionDiagAccumulator, name_resolution::diagnostics::ImportResolutionDiagAccumulator, diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index d2ba2101b4..76908f210d 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -19,8 +19,8 @@ use crate::{ use super::{ diagnostics::NameResDiag, name_resolver::{ - NameDerivation, NameDomain, NameQuery, NameRes, NameResKind, NameResolutionError, - NameResolutionResult, NameResolver, QueryDirective, ResBucket, + NameDerivation, NameDomain, NameQuery, NameRes, NameResBucket, NameResKind, + NameResolutionError, NameResolutionResult, NameResolver, QueryDirective, }, }; @@ -665,7 +665,7 @@ impl<'db> ImportResolver<'db> { pub struct ResolvedImports { pub named_resolved: FxHashMap, pub glob_resolved: FxHashMap, - pub unnamed_resolved: Vec, + pub unnamed_resolved: Vec, } pub(super) trait Importer { @@ -681,9 +681,14 @@ pub(super) trait Importer { scope: ScopeId, ) -> Option<&'a GlobImportSet>; - fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket]; + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameResBucket]; } +#[derive(Debug, Clone, Copy, Default)] pub(super) struct DefaultImporter; impl Importer for DefaultImporter { @@ -707,12 +712,16 @@ impl Importer for DefaultImporter { .get(&scope) } - fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket] { + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameResBucket] { &resolved_imports_for_scope(db, scope).unnamed_resolved } } -pub type NamedImportSet = FxHashMap; +pub type NamedImportSet = FxHashMap; #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct GlobImportSet { @@ -848,7 +857,7 @@ impl IntermediateUse { #[derive(Debug, Clone, PartialEq, Eq)] enum IUseResolution { /// The all segments are resolved. - Full(ResBucket), + Full(NameResBucket), /// The all path segments except the last one are resolved. BasePath(IntermediateUse), @@ -878,7 +887,7 @@ impl IntermediateResolvedImports { &mut self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse, - mut bind: ResBucket, + mut bind: NameResBucket, ) -> Result<(), NameResDiag> { let scope = i_use.original_scope; bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); @@ -979,7 +988,11 @@ impl Importer for IntermediateResolvedImports { } } - fn unnamed_imports<'a>(&'a self, db: &'a dyn HirAnalysisDb, scope: ScopeId) -> &'a [ResBucket] { + fn unnamed_imports<'a>( + &'a self, + db: &'a dyn HirAnalysisDb, + scope: ScopeId, + ) -> &'a [NameResBucket] { if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { &resolved_imports_for_scope(db, scope).unnamed_resolved } else { diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index a45dc04b8f..c1538b8ba1 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -8,15 +8,15 @@ mod visibility_checker; use either::Either; pub use import_resolver::ResolvedImports; pub use name_resolver::{ - NameDerivation, NameDomain, NameQuery, NameRes, QueryDirective, ResBucket, + NameDerivation, NameDomain, NameQuery, NameRes, NameResBucket, QueryDirective, }; use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, hir_def::{ - scope_graph::ScopeId, Expr, FieldDefListId, GenericParamListId, IngotId, ItemKind, Pat, - PathId, TopLevelMod, TypeBound, TypeId, VariantDefListId, + scope_graph::ScopeId, Expr, FieldDefListId, GenericParamListId, IdentId, IngotId, ItemKind, + Partial, Pat, PathId, TopLevelMod, TypeBound, TypeId, VariantDefListId, }, visitor::prelude::*, }; @@ -31,6 +31,49 @@ use self::{ path_resolver::{EarlyPathResolver, EarlyResolvedPath}, }; +// TODO: Implement `resolve_path` and `resolve_segments` after implementing the +// late path resolution. + +/// Resolves the given path in the given scope. +/// It's not necessary to report any error even if the `EarlyResolvedPath` +/// contains some errors; it's always reported from [`PathAnalysisPass`]. +pub fn resolve_path_early( + db: &dyn HirAnalysisDb, + path: PathId, + scope: ScopeId, +) -> EarlyResolvedPath { + resolve_segments_early(db, path.segments(db.as_hir_db()), scope) +} + +/// Resolves the given path segments in the given scope. +/// It's not necessary to report any error even if the `EarlyResolvedPath` +/// contains some errors; it's always reported from [`PathAnalysisPass`]. +pub fn resolve_segments_early( + db: &dyn HirAnalysisDb, + segments: &[Partial], + scope: ScopeId, +) -> EarlyResolvedPath { + // Obtain cache store for the given scope. + let cache_store = resolve_path_early_impl(db, scope.top_mod(db.as_hir_db())); + let importer = DefaultImporter::default(); + // We use the cache store that is returned from `resolve_path_early` to get + // cached results immediately. + let mut name_resolver = name_resolver::NameResolver::new_no_cache(db, &importer); + + let mut resolver = EarlyPathResolver::new(db, &mut name_resolver, &cache_store); + match resolver.resolve_segments(segments, scope) { + Ok(res) => res.resolved, + Err(_) => { + // It's ok to ignore the errors here and returns an empty bucket because the + // precise errors are reported from `PathAnalysisPass`. + let bucket = NameResBucket::default(); + EarlyResolvedPath::Full(bucket) + } + } +} + +/// Performs import resolution analysis. This pass only checks correctness of +/// the imports and doesn't emit other name resolutions errors. pub struct ImportAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } @@ -55,6 +98,16 @@ impl<'db> ModuleAnalysisPass for ImportAnalysisPass<'db> { } } +/// Performs path resolution analysis. This pass checks all paths appeared in a +/// module for +/// - Existence +/// - Visibility +/// - Domain correctness +/// - Ambiguity +/// +/// NOTE: This pass doesn't check the conflict of item definitions or import +/// errors. If you need to check them, please consider using +/// [`ImportAnalysisPass`] or [`DefConflictAnalysisPass`]. pub struct PathAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } @@ -68,7 +121,7 @@ impl<'db> PathAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for PathAnalysisPass<'db> { fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { let errors = - resolve_path_early::accumulated::(self.db, top_mod); + resolve_path_early_impl::accumulated::(self.db, top_mod); errors .into_iter() @@ -79,6 +132,8 @@ impl<'db> ModuleAnalysisPass for PathAnalysisPass<'db> { } } +/// Performs conflict analysis. This pass checks the conflict of item +/// definitions. pub struct DefConflictAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } @@ -92,7 +147,7 @@ impl<'db> DefConflictAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for DefConflictAnalysisPass<'db> { fn run_on_module(&mut self, top_mod: TopLevelMod) -> Vec> { let errors = - resolve_path_early::accumulated::(self.db, top_mod); + resolve_path_early_impl::accumulated::(self.db, top_mod); // TODO: `ImplCollector`. errors @@ -119,8 +174,7 @@ pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> Resolve /// NOTE: This method doesn't check the conflict in impl/impl-trait blocks since /// it requires ingot granularity analysis. #[salsa::tracked(return_ref)] -#[allow(unused)] -pub(crate) fn resolve_path_early( +pub(crate) fn resolve_path_early_impl( db: &dyn HirAnalysisDb, top_mod: TopLevelMod, ) -> ResolvedQueryCacheStore { @@ -161,7 +215,7 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { } } - fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: ResBucket) { + fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: NameResBucket) { let path_kind = self.path_ctxt.last().unwrap(); let last_seg_idx = path.len(self.db.as_hir_db()) - 1; let last_seg_ident = *path.segments(self.db.as_hir_db())[last_seg_idx].unwrap(); @@ -451,7 +505,7 @@ impl ExpectedPathKind { } } - fn pick(self, db: &dyn HirAnalysisDb, bucket: ResBucket) -> Either { + fn pick(self, db: &dyn HirAnalysisDb, bucket: NameResBucket) -> Either { debug_assert!(!bucket.is_empty()); let res = match bucket.pick(self.domain()).as_ref().ok() { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 5dab010156..a16920ba17 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -110,12 +110,12 @@ impl Default for QueryDirective { /// The struct contains the lookup result of a name query. /// The results can contain more than one name resolution which belong to /// different name domains. -#[derive(Clone, Debug, PartialEq, Eq, Default)] -pub struct ResBucket { +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct NameResBucket { pub(super) bucket: FxHashMap>, } -impl ResBucket { +impl NameResBucket { /// Returns the number of resolutions in the bucket. pub fn len(&self) -> usize { self.iter().count() @@ -217,7 +217,7 @@ impl ResBucket { } } -impl IntoIterator for ResBucket { +impl IntoIterator for NameResBucket { type Item = NameResolutionResult; type IntoIter = IntoValues>; @@ -226,7 +226,7 @@ impl IntoIterator for ResBucket { } } -impl From for ResBucket { +impl From for NameResBucket { fn from(res: NameRes) -> Self { let mut names = FxHashMap::default(); names.insert(res.domain, Ok(res)); @@ -458,13 +458,13 @@ impl<'db, 'a> NameResolver<'db, 'a> { self.cache_store } - pub(crate) fn resolve_query(&mut self, query: NameQuery) -> ResBucket { + pub(crate) fn resolve_query(&mut self, query: NameQuery) -> NameResBucket { // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(query) { return resolved.clone(); }; - let mut bucket = ResBucket::default(); + let mut bucket = NameResBucket::default(); // The shadowing rule is // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, @@ -682,7 +682,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } /// Finalize the query result and cache it to the cache store. - fn finalize_query_result(&mut self, query: NameQuery, bucket: ResBucket) -> ResBucket { + fn finalize_query_result(&mut self, query: NameQuery, bucket: NameResBucket) -> NameResBucket { self.cache_store.cache_result(query, bucket.clone()); bucket } @@ -727,12 +727,12 @@ impl std::error::Error for NameResolutionError {} #[derive(Default, Debug, PartialEq, Eq)] pub(crate) struct ResolvedQueryCacheStore { - cache: FxHashMap, + cache: FxHashMap, no_cache: bool, } impl ResolvedQueryCacheStore { - pub(super) fn get(&self, query: NameQuery) -> Option<&ResBucket> { + pub(super) fn get(&self, query: NameQuery) -> Option<&NameResBucket> { self.cache.get(&query) } @@ -743,7 +743,7 @@ impl ResolvedQueryCacheStore { } } - fn cache_result(&mut self, query: NameQuery, result: ResBucket) { + fn cache_result(&mut self, query: NameQuery, result: NameResBucket) { if self.no_cache { return; } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 5b09bb8c3c..2f23334a45 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -5,14 +5,14 @@ use crate::{name_resolution::QueryDirective, HirAnalysisDb}; use super::{ name_resolver::{ - NameRes, NameResolutionError, NameResolver, ResBucket, ResolvedQueryCacheStore, + NameRes, NameResBucket, NameResolutionError, NameResolver, ResolvedQueryCacheStore, }, NameDomain, NameQuery, }; #[derive(Debug, Clone, PartialEq, Eq)] pub enum EarlyResolvedPath { - Full(ResBucket), + Full(NameResBucket), /// The path is partially resolved; this means that the `resolved` is a type /// and the following segments depend on type to resolve. @@ -128,12 +128,12 @@ impl<'db, 'a, 'b, 'c> EarlyPathResolver<'db, 'a, 'b, 'c> { fn resolve_last_segment( &mut self, i_path: &IntermediatePath, - ) -> PathResolutionResult { + ) -> PathResolutionResult { let query = i_path.make_query(self.db)?; Ok(self.resolve_query(query)) } - fn resolve_query(&mut self, query: NameQuery) -> ResBucket { + fn resolve_query(&mut self, query: NameQuery) -> NameResBucket { if let Some(bucket) = self.cache_store.get(query) { bucket.clone() } else { @@ -205,7 +205,7 @@ impl<'a> IntermediatePath<'a> { } } - fn finalize_as_full(mut self, bucket: ResBucket) -> EarlyResolvedPathWithTrajectory { + fn finalize_as_full(mut self, bucket: NameResBucket) -> EarlyResolvedPathWithTrajectory { let resolved = EarlyResolvedPath::Full(bucket); let mut trajectory = self.trajectory; let current_res = self.current_res; @@ -217,7 +217,7 @@ impl<'a> IntermediatePath<'a> { } } - fn proceed(&mut self, bucket: ResBucket) -> PathResolutionResult<()> { + fn proceed(&mut self, bucket: NameResBucket) -> PathResolutionResult<()> { let next_res = bucket .pick(NameDomain::Type) .clone() From c8382c398e67dc8c5de4f8fb2cc4b53cea7df16e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 11 Jul 2023 23:43:34 +0200 Subject: [PATCH 209/678] Add basic tests for early path resolution --- .../hir-analysis/src/name_resolution/mod.rs | 42 ++---- .../src/name_resolution/name_resolver.rs | 14 +- .../src/name_resolution/path_resolver.rs | 13 +- .../early_path_resolution/alias_res.fe | 10 ++ .../early_path_resolution/alias_res.snap | 18 +++ .../early_path_resolution/partial.fe | 16 +++ .../early_path_resolution/partial.snap | 18 +++ .../tests/early_path_resolution.rs | 129 ++++++++++++++++++ crates/hir-analysis/tests/import.rs | 4 +- crates/hir-analysis/tests/test_db.rs | 4 +- .../name_resolution/path_invalid_domain.fe | 5 - .../name_resolution/path_invalid_domain.snap | 18 --- .../name_resolution/path_missing_generics.fe | 4 +- .../path_missing_generics.snap | 6 + 14 files changed, 238 insertions(+), 63 deletions(-) create mode 100644 crates/hir-analysis/test_files/early_path_resolution/alias_res.fe create mode 100644 crates/hir-analysis/test_files/early_path_resolution/alias_res.snap create mode 100644 crates/hir-analysis/test_files/early_path_resolution/partial.fe create mode 100644 crates/hir-analysis/test_files/early_path_resolution/partial.snap create mode 100644 crates/hir-analysis/tests/early_path_resolution.rs diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index c1538b8ba1..d6429a4194 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -10,6 +10,7 @@ pub use import_resolver::ResolvedImports; pub use name_resolver::{ NameDerivation, NameDomain, NameQuery, NameRes, NameResBucket, QueryDirective, }; +pub use path_resolver::EarlyResolvedPath; use hir::{ analysis_pass::ModuleAnalysisPass, @@ -28,7 +29,7 @@ use self::{ diagnostics::{ImportResolutionDiagAccumulator, NameResDiag, NameResolutionDiagAccumulator}, import_resolver::DefaultImporter, name_resolver::{NameResolutionError, ResolvedQueryCacheStore}, - path_resolver::{EarlyPathResolver, EarlyResolvedPath}, + path_resolver::EarlyPathResolver, }; // TODO: Implement `resolve_path` and `resolve_segments` after implementing the @@ -60,7 +61,7 @@ pub fn resolve_segments_early( // cached results immediately. let mut name_resolver = name_resolver::NameResolver::new_no_cache(db, &importer); - let mut resolver = EarlyPathResolver::new(db, &mut name_resolver, &cache_store); + let mut resolver = EarlyPathResolver::new(db, &mut name_resolver, cache_store); match resolver.resolve_segments(segments, scope) { Ok(res) => res.resolved, Err(_) => { @@ -171,8 +172,11 @@ pub(crate) fn resolve_imports(db: &dyn HirAnalysisDb, ingot: IngotId) -> Resolve /// Performs early path resolution and cache the resolutions for paths appeared /// in the given module. Also checks the conflict of the item definitions. /// -/// NOTE: This method doesn't check the conflict in impl/impl-trait blocks since -/// it requires ingot granularity analysis. +/// NOTE: This method doesn't check +/// - the conflict in impl/impl-trait blocks since it requires ingot granularity +/// analysis. +/// - the path resolution errors at expression and statement level since it +/// generally requires type analysis #[salsa::tracked(return_ref)] pub(crate) fn resolve_path_early_impl( db: &dyn HirAnalysisDb, @@ -412,31 +416,11 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { self.path_ctxt.pop(); } - fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { - // We don't need to check bind patterns here, it will be checked in pattern - // match analysis. - if pat.is_bind(self.db.as_hir_db()) { - return; - } - - if matches!(pat, Pat::Record { .. }) { - self.path_ctxt.push(ExpectedPathKind::Type); - } else { - self.path_ctxt.push(ExpectedPathKind::Value); - } - walk_pat(self, ctxt, pat); - self.path_ctxt.pop(); - } - - fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { - if matches!(expr, Expr::RecordInit { .. }) { - self.path_ctxt.push(ExpectedPathKind::Type); - } else { - self.path_ctxt.push(ExpectedPathKind::Value); - } - walk_expr(self, ctxt, expr); - self.path_ctxt.pop(); - } + // We don't need to run path analysis on patterns, statements and expressions in + // early path resolution. + fn visit_pat(&mut self, _: &mut VisitorCtxt<'_, LazyPatSpan>, _: &Pat) {} + fn visit_stmt(&mut self, _: &mut VisitorCtxt<'_, LazyStmtSpan>, _: &hir::hir_def::Stmt) {} + fn visit_expr(&mut self, _: &mut VisitorCtxt<'_, LazyExprSpan>, _: &Expr) {} fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { let scope = ScopeId::from_item(self.item_stack.last().copied().unwrap()); diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index a16920ba17..b79442278f 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -57,6 +57,9 @@ impl NameQuery { self.name } } + +/// The query directive is used to control the name resolution behavior, such as +/// whether to lookup the name in the lexical scope or not. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct QueryDirective { /// If `allow_lex` is `true`, then the query will be propagated to the @@ -378,13 +381,22 @@ impl NameResKind { } } +/// The name derivation indicates where a name resolution comes from. +/// Name derivation is used to track the origin of a resolution, and to +/// determine the shadowing rules. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum NameDerivation { + /// Derived from a definition in the current scope. Def, + /// Derived from a named import in the current scope. NamedImported(Use), + /// Derived from a glob import in the current scope. GlobImported(Use), + /// Derived from lexical parent scope. Lex(Box), + /// Derived from an external ingot. External, + /// Derived from a builtin primitive. Prim, } @@ -754,7 +766,7 @@ impl ResolvedQueryCacheStore { /// Each resolved name is associated with a domain that indicates which domain /// the name belongs to. /// The multiple same names can be introduced in a same scope as long as they -/// are in a different domain. +/// are in different domains. /// /// E.g., A `Foo` can be introduced in a same scope as a type and variant at the /// same time. This means the code below is valid. diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 2f23334a45..98f6efda24 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -19,7 +19,7 @@ pub enum EarlyResolvedPath { /// These unresolved parts are resolved in the later type inference and /// trait solving phases. Partial { - resolved: NameRes, + res: NameRes, unresolved_from: usize, }, } @@ -191,7 +191,7 @@ impl<'a> IntermediatePath<'a> { fn finalize_as_partial(self) -> EarlyResolvedPathWithTrajectory { let resolved = EarlyResolvedPath::Partial { - resolved: self.current_res.clone(), + res: self.current_res.clone(), unresolved_from: self.idx, }; @@ -232,9 +232,12 @@ impl<'a> IntermediatePath<'a> { fn state(&self, db: &dyn HirAnalysisDb) -> IntermediatePathState { debug_assert!(self.idx < self.path.len()); - if self.idx == self.path.len() - 1 { + let is_type_dependent = + (self.current_res.is_type(db) || self.current_res.is_trait(db)) && self.idx != 0; + + if (self.idx == self.path.len() - 1) && !is_type_dependent { IntermediatePathState::ReadyToFinalize - } else if self.current_res.is_type(db) { + } else if is_type_dependent { IntermediatePathState::TypeDependent } else { IntermediatePathState::Unresolved @@ -249,7 +252,7 @@ enum IntermediatePathState { ReadyToFinalize, /// The intermediate path points to a type and the next segment need to be - /// resolved in the type context. + /// resolved with the type context. TypeDependent, /// The path resolution need to be continued further. diff --git a/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe b/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe new file mode 100644 index 0000000000..7e61e4f562 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe @@ -0,0 +1,10 @@ +use foo::Bar as FooBar + +struct Foo { + x: FooBar + y: foo::Bar +} + +mod foo { + pub struct Bar {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap b/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap new file mode 100644 index 0000000000..6dd3b9bd60 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/alias_res.fe +--- +note: + ┌─ test_file.fe:4:8 + │ +4 │ x: FooBar + │ ^^^^^^ test_file::foo::Bar + +note: + ┌─ test_file.fe:5:13 + │ +5 │ y: foo::Bar + │ ^^^ test_file::foo::Bar + + diff --git a/crates/hir-analysis/test_files/early_path_resolution/partial.fe b/crates/hir-analysis/test_files/early_path_resolution/partial.fe new file mode 100644 index 0000000000..00ce120594 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/partial.fe @@ -0,0 +1,16 @@ +pub enum MyE { + Variant1 + Variant2 +} + +pub struct MyS { + x: MyE::Variant1 + z: mod1::MyE::Variant1 +} + +mod mod1 { + pub enum MyE { + Variant1 + Variant2 + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/partial.snap b/crates/hir-analysis/test_files/early_path_resolution/partial.snap new file mode 100644 index 0000000000..fb01f85473 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/partial.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/partial.fe +--- +note: + ┌─ test_file.fe:7:8 + │ +7 │ x: MyE::Variant1 + │ ^^^ test_file::MyE + +note: + ┌─ test_file.fe:8:14 + │ +8 │ z: mod1::MyE::Variant1 + │ ^^^ test_file::mod1::MyE + + diff --git a/crates/hir-analysis/tests/early_path_resolution.rs b/crates/hir-analysis/tests/early_path_resolution.rs new file mode 100644 index 0000000000..e1b52acbbb --- /dev/null +++ b/crates/hir-analysis/tests/early_path_resolution.rs @@ -0,0 +1,129 @@ +mod test_db; +use test_db::{HirAnalysisTestDb, HirPropertyFormatter}; + +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use fe_compiler_test_utils::snap_test; +use fe_hir_analysis::{ + name_resolution::{resolve_path_early, EarlyResolvedPath, NameDomain, PathAnalysisPass}, + HirAnalysisDb, +}; +use hir::{ + analysis_pass::ModuleAnalysisPass, + hir_def::{scope_graph::ScopeId, Expr, ItemKind, Pat, PathId, TopLevelMod, TypeId}, + visitor::prelude::*, + HirDb, SpannedHirDb, +}; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/early_path_resolution", + glob: "*.fe" +)] +fn test_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (top_mod, mut prop_formatter) = db.new_stand_alone(file_name, fixture.content()); + + let mut pass = PathAnalysisPass::new(&db); + let diags = pass.run_on_module(top_mod); + if !diags.is_empty() { + for diag in diags { + println!("{}", diag.to_complete(db.as_spanned_hir_db()).message); + } + panic!("Failed to resolve paths"); + } + + let mut ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + PathVisitor { + db: &db, + top_mod, + domain_stack: Vec::new(), + item_stack: Vec::new(), + prop_formatter: &mut prop_formatter, + } + .visit_top_mod(&mut ctxt, top_mod); + + let res = prop_formatter.finish(db.as_spanned_hir_db()); + snap_test!(res, fixture.path()); +} + +struct PathVisitor<'db, 'a> { + db: &'db HirAnalysisTestDb, + top_mod: TopLevelMod, + domain_stack: Vec, + item_stack: Vec, + prop_formatter: &'a mut HirPropertyFormatter, +} + +impl<'db, 'a> Visitor for PathVisitor<'db, 'a> { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'_, LazyItemSpan>, item: ItemKind) { + if matches!(item, ItemKind::Use(_)) { + return; + } + + self.domain_stack.push(NameDomain::Type); + self.item_stack.push(item); + walk_item(self, ctxt, item); + self.domain_stack.pop(); + self.item_stack.pop(); + } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { + self.domain_stack.push(NameDomain::Type); + walk_ty(self, ctxt, ty); + self.domain_stack.pop(); + } + + fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { + if pat.is_bind(self.db.as_hir_db()) { + return; + } + + if matches!(pat, Pat::Record { .. }) { + self.domain_stack.push(NameDomain::Type); + } else { + self.domain_stack.push(NameDomain::Value); + } + walk_pat(self, ctxt, pat); + self.domain_stack.pop(); + } + + fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { + if matches!(expr, Expr::RecordInit { .. }) { + self.domain_stack.push(NameDomain::Type); + } else { + self.domain_stack.push(NameDomain::Value); + } + walk_expr(self, ctxt, expr); + self.domain_stack.pop(); + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { + let scope = ScopeId::from_item(self.item_stack.last().copied().unwrap()); + let resolved_path = resolve_path_early(self.db.as_hir_analysis_db(), path, scope); + match resolved_path { + EarlyResolvedPath::Full(bucket) => { + let domain = self.domain_stack.last().copied().unwrap(); + let res = bucket.pick(domain).as_ref().unwrap(); + let prop = res.pretty_path(self.db.as_hir_analysis_db()).unwrap(); + let span = ctxt + .span() + .unwrap() + .segment(path.len(self.db.as_hir_db()) - 1) + .into(); + self.prop_formatter.push_prop(self.top_mod, span, prop); + } + + EarlyResolvedPath::Partial { + res, + unresolved_from, + } => { + let prop = res.pretty_path(self.db.as_hir_analysis_db()).unwrap(); + let span = ctxt.span().unwrap().segment(unresolved_from - 1).into(); + self.prop_formatter.push_prop(self.top_mod, span, prop); + } + } + } +} diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 88ee7ff2ab..696c221bff 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -63,8 +63,8 @@ fn format_imports( let use_span = use_.lazy_span().into(); values.sort_unstable(); let imported_names = values.join(" | "); - prop_formatter.set_properties(use_.top_mod(db), use_span, imported_names) + prop_formatter.push_prop(use_.top_mod(db), use_span, imported_names) } - prop_formatter.format_all_properties(db) + prop_formatter.finish(db) } diff --git a/crates/hir-analysis/tests/test_db.rs b/crates/hir-analysis/tests/test_db.rs index bf5a4117a7..92d866d1ad 100644 --- a/crates/hir-analysis/tests/test_db.rs +++ b/crates/hir-analysis/tests/test_db.rs @@ -76,14 +76,14 @@ pub struct HirPropertyFormatter { } impl HirPropertyFormatter { - pub fn set_properties(&mut self, top_mod: TopLevelMod, span: DynLazySpan, prop: String) { + pub fn push_prop(&mut self, top_mod: TopLevelMod, span: DynLazySpan, prop: String) { self.properties .entry(top_mod) .or_default() .push((prop, span)); } - pub fn format_all_properties(&mut self, db: &dyn SpannedHirDb) -> String { + pub fn finish(&mut self, db: &dyn SpannedHirDb) -> String { let writer = BufferWriter::stderr(ColorChoice::Never); let mut buffer = writer.buffer(); let config = term::Config::default(); diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe index 2e7fe61da7..d4af57fc20 100644 --- a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe @@ -16,8 +16,3 @@ where T: MyE Variant(MyC) Variant2(Var) } - -pub fn foo() { - let x: MyT = MyT - let MyE::Var{ x } = MyE::Var -} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap index 951651a041..c0bc639bc6 100644 --- a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap @@ -21,28 +21,10 @@ error[2-0005]: expected type item here 17 │ Variant2(Var) │ ^^^ expected type here, but found value `Var` -error[2-0005]: expected type item here - ┌─ path_invalid_domain.fe:21:12 - │ -21 │ let x: MyT = MyT - │ ^^^ expected type here, but found trait `MyT` - -error[2-0005]: expected type item here - ┌─ path_invalid_domain.fe:22:14 - │ -22 │ let MyE::Var{ x } = MyE::Var - │ ^^^ expected type here, but found value `Var` - error[2-0006]: expected trait item here ┌─ path_invalid_domain.fe:13:10 │ 13 │ where T: MyE │ ^^^ expected trait here, but found enum `MyE` -error[2-0007]: expected value here - ┌─ path_invalid_domain.fe:21:18 - │ -21 │ let x: MyT = MyT - │ ^^^ expected value here, but found trait `MyT` - diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe index 44726226c0..59d31614bd 100644 --- a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe @@ -8,4 +8,6 @@ pub struct MyS t: T u: U z: Z -} \ No newline at end of file +} + +impl MyS {} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.snap b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap index 254ff84a8e..b2690d29f7 100644 --- a/crates/uitest/fixtures/name_resolution/path_missing_generics.snap +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap @@ -15,4 +15,10 @@ error[2-0002]: `Z` is not found 10 │ z: Z │ ^ `Z` is not found +error[2-0002]: `V` is not found + ┌─ path_missing_generics.fe:13:22 + │ +13 │ impl MyS {} + │ ^ `V` is not found + From b92f2147c49ddf845ec6be6d672903d9dd0a8384 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 13 Jul 2023 01:50:11 +0200 Subject: [PATCH 210/678] Integrate function block concept to scope graph --- .../src/name_resolution/name_resolver.rs | 6 +- crates/hir/src/hir_def/body.rs | 4 ++ crates/hir/src/hir_def/expr.rs | 2 +- crates/hir/src/hir_def/scope_graph.rs | 12 +++- crates/hir/src/lower/body.rs | 17 ++--- crates/hir/src/lower/expr.rs | 5 +- crates/hir/src/lower/item.rs | 20 +++--- crates/hir/src/lower/mod.rs | 18 +++-- crates/hir/src/lower/scope_builder.rs | 69 +++++++++++++++---- crates/hir/src/lower/use_tree.rs | 8 +-- 10 files changed, 113 insertions(+), 48 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index b79442278f..c2b134ca91 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -793,9 +793,9 @@ pub enum NameDomain { impl NameDomain { pub(super) fn from_scope(scope: ScopeId) -> Self { match scope { - ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) | ScopeId::FuncParam(..) => { - Self::Value - } + ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) + | ScopeId::FuncParam(..) + | ScopeId::Block(..) => Self::Value, ScopeId::Item(_) | ScopeId::GenericParam(..) => Self::Type, ScopeId::Field(..) => Self::Field, ScopeId::Variant(..) => Self::Value, diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 178a32a490..107727172e 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -18,6 +18,10 @@ pub struct Body { #[id] id: TrackedBodyId, + /// The expression that evaluates to the value of the body. + /// In case of a function body, this is always be the block expression. + pub body_expr: ExprId, + #[return_ref] pub stmts: NodeStore>, #[return_ref] diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs index 57fc99f2cf..9d518bf854 100644 --- a/crates/hir/src/hir_def/expr.rs +++ b/crates/hir/src/hir_def/expr.rs @@ -42,7 +42,7 @@ pub enum Expr { Match(ExprId, Partial>), } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ExprId(u32); entity_impl!(ExprId); diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index bb6c9ca7e9..2b172c3523 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -4,7 +4,10 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{hir_def::GenericParamOwner, span::DynLazySpan, HirDb}; -use super::{Enum, Func, FuncParamLabel, IdentId, IngotId, ItemKind, TopLevelMod, Use, Visibility}; +use super::{ + Body, Enum, ExprId, Func, FuncParamLabel, IdentId, IngotId, ItemKind, TopLevelMod, Use, + Visibility, +}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { @@ -52,6 +55,7 @@ pub enum ScopeId { FuncParam(ItemKind, usize), Field(ItemKind, usize), Variant(ItemKind, usize), + Block(Body, ExprId), } impl ScopeId { pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { @@ -61,6 +65,7 @@ impl ScopeId { ScopeId::FuncParam(item, _) => item.top_mod(db), ScopeId::Field(item, _) => item.top_mod(db), ScopeId::Variant(item, _) => item.top_mod(db), + ScopeId::Block(body, _) => body.top_mod(db), } } @@ -71,6 +76,7 @@ impl ScopeId { ScopeId::FuncParam(_, _) => "value", ScopeId::Field(_, _) => "field", ScopeId::Variant(_, _) => "value", + ScopeId::Block(_, _) => "block", } } @@ -224,6 +230,8 @@ impl ScopeId { let params = &parent.params(db).data(db)[idx]; params.name().to_opt() } + + ScopeId::Block(..) => None, } } @@ -269,6 +277,8 @@ impl ScopeId { Some(parent.params_span().param(idx).into()) } + + ScopeId::Block(..) => None, } } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 78f42d607e..ce2996ff3a 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -18,8 +18,8 @@ impl Body { ) -> Self { let bid = TrackedBodyId::ItemBody(parent_id.into()); let mut ctxt = BodyCtxt::new(f_ctxt, bid); - Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast) + let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast, body_expr) } pub(super) fn lower_ast_nested( @@ -29,15 +29,15 @@ impl Body { ) -> Self { let bid = TrackedBodyId::NestedBody(bid.into()); let mut ctxt = BodyCtxt::new(f_ctxt, bid); - Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast) + let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast, body_expr) } pub(super) fn lower_ast_nameless(f_ctxt: &mut FileLowerCtxt<'_>, ast: ast::Expr) -> Self { let bid = TrackedBodyId::NamelessBody; let mut ctxt = BodyCtxt::new(f_ctxt, bid); - Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast) + let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast, body_expr) } } @@ -103,11 +103,12 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { } } - fn build(self, ast: &ast::Expr) -> Body { + fn build(self, ast: &ast::Expr, body_expr: ExprId) -> Body { let origin = HirOrigin::raw(ast); let body = Body::new( self.f_ctxt.db(), self.bid, + body_expr, self.stmts, self.exprs, self.pats, @@ -116,7 +117,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { origin, ); - self.f_ctxt.leave_scope(body); + self.f_ctxt.leave_item_scope(body); body } } diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index b9f7008dde..356da2ce48 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -20,12 +20,15 @@ impl Expr { } ast::ExprKind::Block(block) => { + ctxt.f_ctxt.enter_block_scope(); let mut stmts = vec![]; for stmt in block.stmts() { let stmt = Stmt::push_to_body(ctxt, stmt); stmts.push(stmt); } - Self::Block(stmts) + let expr_id = ctxt.push_expr(Self::Block(stmts), HirOrigin::raw(&ast)); + ctxt.f_ctxt.leave_block_scope(expr_id); + return expr_id; } ast::ExprKind::Bin(bin) => { diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 3c060f3f81..4c47f8b970 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -79,7 +79,7 @@ impl Mod { let origin = HirOrigin::raw(&ast); let mod_ = Self::new(ctxt.db(), id, name, attributes, vis, ctxt.top_mod(), origin); - ctxt.leave_scope(mod_) + ctxt.leave_item_scope(mod_) } } @@ -128,7 +128,7 @@ impl Func { ctxt.top_mod(), origin, ); - ctxt.leave_scope(fn_) + ctxt.leave_item_scope(fn_) } } @@ -162,7 +162,7 @@ impl Struct { ctxt.top_mod(), origin, ); - ctxt.leave_scope(struct_) + ctxt.leave_item_scope(struct_) } } @@ -192,7 +192,7 @@ impl Contract { ctxt.top_mod(), origin, ); - ctxt.leave_scope(contract) + ctxt.leave_item_scope(contract) } } @@ -226,7 +226,7 @@ impl Enum { ctxt.top_mod(), origin, ); - ctxt.leave_scope(enum_) + ctxt.leave_item_scope(enum_) } } @@ -260,7 +260,7 @@ impl TypeAlias { ctxt.top_mod(), origin, ); - ctxt.leave_scope(alias) + ctxt.leave_item_scope(alias) } } @@ -296,7 +296,7 @@ impl Impl { ctxt.top_mod(), origin, ); - ctxt.leave_scope(impl_) + ctxt.leave_item_scope(impl_) } } @@ -335,7 +335,7 @@ impl Trait { origin, ); - ctxt.leave_scope(trait_) + ctxt.leave_item_scope(trait_) } } @@ -373,7 +373,7 @@ impl ImplTrait { ctxt.top_mod(), origin, ); - ctxt.leave_scope(impl_trait) + ctxt.leave_item_scope(impl_trait) } } @@ -396,7 +396,7 @@ impl Const { let origin = HirOrigin::raw(&ast); let const_ = Self::new(ctxt.db(), id, name, ty, body, vis, ctxt.top_mod(), origin); - ctxt.leave_scope(const_) + ctxt.leave_item_scope(const_) } } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 43c3c570bf..75df8d73b6 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -8,8 +8,8 @@ use parser::{ use crate::{ hir_def::{ - module_tree_impl, scope_graph::ScopeGraph, IdentId, IngotId, IntegerId, ItemKind, LitKind, - ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, + module_tree_impl, scope_graph::ScopeGraph, ExprId, IdentId, IngotId, IntegerId, ItemKind, + LitKind, ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, }, HirDb, LowerHirDb, }; @@ -66,7 +66,7 @@ pub(crate) fn scope_graph_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ScopeGra if let Some(items) = ast.items() { lower_module_items(&mut ctxt, id, items); } - ctxt.leave_scope(top_mod); + ctxt.leave_item_scope(top_mod); ctxt.build() } @@ -100,6 +100,14 @@ impl<'db> FileLowerCtxt<'db> { self.builder.top_mod } + pub(super) fn enter_block_scope(&mut self) { + self.builder.enter_block_scope(); + } + + pub(super) fn leave_block_scope(&mut self, block: ExprId) { + self.builder.leave_block_scope(block); + } + /// Creates a new scope for an item. fn enter_scope(&mut self, is_mod: bool) { self.builder.enter_scope(is_mod); @@ -107,11 +115,11 @@ impl<'db> FileLowerCtxt<'db> { /// Leaves the current scope, `item` should be the generated item which owns /// the scope. - fn leave_scope(&mut self, item: I) -> I + fn leave_item_scope(&mut self, item: I) -> I where I: Into + Copy, { - self.builder.leave_scope(item.into()); + self.builder.leave_item_scope(item.into()); item } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 156b4f505d..7b0420b4ed 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -1,52 +1,50 @@ +use std::mem; + use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ hir_def::{ scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, - FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, TopLevelMod, - Use, VariantDefListId, Visibility, + Body, ExprId, FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, + TopLevelMod, Use, VariantDefListId, Visibility, }, HirDb, }; -pub struct ScopeGraphBuilder<'db> { +pub(super) struct ScopeGraphBuilder<'db> { pub(super) db: &'db dyn HirDb, pub(super) top_mod: TopLevelMod, graph: IntermediateScopeGraph, scope_stack: Vec, module_stack: Vec, + declared_blocks: FxHashMap>, } impl<'db> ScopeGraphBuilder<'db> { - pub(crate) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { + pub(super) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod) -> Self { let mut builder = Self { db, top_mod, graph: IntermediateScopeGraph::default(), scope_stack: Default::default(), module_stack: Default::default(), + declared_blocks: FxHashMap::default(), }; builder.enter_scope(true); builder } - pub fn build(self) -> ScopeGraph { + pub(super) fn build(self) -> ScopeGraph { self.graph.build(self.top_mod) } - pub fn enter_scope(&mut self, is_mod: bool) { - // Create dummy scope, the scope kind is initialized in `leave_scope`. - let (dummy_scope_id, dummy_scope) = self.dummy_scope(); - let id = self.graph.push(dummy_scope_id, dummy_scope); - self.scope_stack.push(id); - if is_mod { - self.module_stack.push(id); - } + pub(super) fn enter_scope(&mut self, is_mod: bool) { + self.enter_scope_impl(is_mod); } - pub fn leave_scope(&mut self, item: ItemKind) { + pub(super) fn leave_item_scope(&mut self, item: ItemKind) { use ItemKind::*; let item_node = self.scope_stack.pop().unwrap(); @@ -229,8 +227,12 @@ impl<'db> ScopeGraphBuilder<'db> { EdgeKind::anon() } - Body(_) => { + Body(body) => { self.graph.add_lex_edge(item_node, parent_node); + for (node, block) in mem::take(&mut self.declared_blocks) { + let block = block.unwrap(); + self.finalize_block_scope(node, body, block); + } EdgeKind::anon() } @@ -241,10 +243,39 @@ impl<'db> ScopeGraphBuilder<'db> { .add_edge(parent_node, item_node, parent_to_child_edge); } + pub(super) fn enter_block_scope(&mut self) { + let node = self.enter_scope_impl(false); + self.declared_blocks.insert(node, None); + } + + pub(super) fn leave_block_scope(&mut self, block: ExprId) { + let block_node = self.scope_stack.pop().unwrap(); + let parent_node = *self.scope_stack.last().unwrap(); + *self.declared_blocks.get_mut(&block_node).unwrap() = Some(block); + self.graph.add_lex_edge(block_node, parent_node); + self.graph + .add_edge(parent_node, block_node, EdgeKind::anon()); + } + + fn enter_scope_impl(&mut self, is_mod: bool) -> NodeId { + // Create dummy scope, the scope kind is initialized in `leave_scope`. + let (dummy_scope_id, dummy_scope) = self.dummy_scope(); + let id = self.graph.push(dummy_scope_id, dummy_scope); + self.scope_stack.push(id); + if is_mod { + self.module_stack.push(id); + } + id + } + fn initialize_item_scope(&mut self, node: NodeId, item: ItemKind) { self.graph.initialize_item_scope(self.db, node, item) } + fn finalize_block_scope(&mut self, node: NodeId, body: Body, block: ExprId) { + self.graph.finalize_block_scope(node, body, block); + } + fn add_field_scope( &mut self, parent_node: NodeId, @@ -394,6 +425,14 @@ impl IntermediateScopeGraph { scope_data.1.vis = item.vis(db); } + fn finalize_block_scope(&mut self, node: NodeId, body: Body, block: ExprId) { + let scope_id = ScopeId::Block(body, block); + let scope_data = &mut self.nodes[node]; + scope_data.0 = scope_id; + scope_data.1.id = scope_id; + scope_data.1.vis = Visibility::Private; + } + fn add_lex_edge(&mut self, child: NodeId, parent: NodeId) { self.edges .entry(child) diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index 502ec0ae1b..fd8e2d582c 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -22,8 +22,8 @@ impl Use { let alias = None; let top_mod = ctxt.top_mod(); let origin = HirOrigin::raw(&ast); - let use_ = Self::new(ctxt.db(),id, path, alias, vis, top_mod, origin); - ctxt.leave_scope(use_); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_item_scope(use_); return vec![use_]; }; @@ -38,7 +38,7 @@ impl Use { let top_mod = ctxt.top_mod(); let origin = HirOrigin::raw(&ast); let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); - ctxt.leave_scope(use_); + ctxt.leave_item_scope(use_); return vec![use_]; } @@ -52,7 +52,7 @@ impl Use { let alias = alias; let origin = HirOrigin::desugared(origin); let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); - ctxt.leave_scope(use_) + ctxt.leave_item_scope(use_) }) .collect() } From 42817ec2e660e28e3542726ad2222e6f01f1751b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 13 Jul 2023 16:49:19 +0200 Subject: [PATCH 211/678] Add `Item` syntax node --- crates/hir/src/lower/item.rs | 7 +- crates/hir/src/span/expr.rs | 2 +- crates/hir/src/span/stmt.rs | 3 +- crates/parser2/src/ast/item.rs | 56 +- crates/parser2/src/parser/expr_atom.rs | 11 + crates/parser2/src/parser/item.rs | 146 +-- crates/parser2/src/syntax_kind.rs | 23 + .../error_recovery/items/const_.snap | 99 +- .../error_recovery/items/enum_.snap | 123 +-- .../error_recovery/items/extern_.snap | 111 +-- .../test_files/error_recovery/items/func.snap | 280 +++--- .../error_recovery/items/impl_.snap | 146 +-- .../error_recovery/items/impl_trait.snap | 287 +++--- .../error_recovery/items/struct_.snap | 133 +-- .../error_recovery/items/trait_.snap | 281 +++--- .../error_recovery/items/type_.snap | 69 +- .../test_files/error_recovery/items/use_.snap | 119 +-- .../test_files/syntax_node/exprs/block.fe | 7 + .../test_files/syntax_node/exprs/block.snap | 66 ++ .../test_files/syntax_node/items/const.snap | 268 ++--- .../syntax_node/items/contract.snap | 113 +-- .../test_files/syntax_node/items/enums.snap | 374 +++---- .../test_files/syntax_node/items/extern.snap | 214 ++-- .../test_files/syntax_node/items/func.snap | 675 ++++++------- .../test_files/syntax_node/items/impl.snap | 488 ++++----- .../syntax_node/items/impl_trait.snap | 715 +++++++------- .../test_files/syntax_node/items/mod.snap | 207 ++-- .../test_files/syntax_node/items/trait.snap | 933 +++++++++--------- .../test_files/syntax_node/items/type.snap | 161 +-- .../test_files/syntax_node/items/use.snap | 569 +++++------ .../test_files/syntax_node/structs/attr.snap | 143 +-- .../test_files/syntax_node/structs/empty.snap | 28 +- .../syntax_node/structs/generics.snap | 900 ++++++++--------- .../syntax_node/structs/tupel_field.snap | 146 +-- 34 files changed, 4045 insertions(+), 3858 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/exprs/block.fe create mode 100644 crates/parser2/test_files/syntax_node/exprs/block.snap diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 4c47f8b970..bc9965e2dd 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -16,11 +16,14 @@ pub(crate) fn lower_module_items( items: ast::ItemList, ) { for item in items { - match item.kind() { + let Some(kind) = item.kind() else { + continue; + }; + match kind { ast::ItemKind::Mod(mod_) => { Mod::lower_ast(ctxt, id.clone(), mod_); } - ast::ItemKind::Fn(fn_) => { + ast::ItemKind::Func(fn_) => { Func::lower_ast(ctxt, id.clone(), fn_, false); } ast::ItemKind::Struct(struct_) => { diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs index 0dd76249ff..dde7b4dc1a 100644 --- a/crates/hir/src/span/expr.rs +++ b/crates/hir/src/span/expr.rs @@ -202,7 +202,7 @@ mod tests { fn aug_assign() { let mut db = TestDb::default(); - let text = r#" { + let text = r#" fn foo(mut x: i32) { x += 1 } diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs index 9c20c3cc3a..015c483554 100644 --- a/crates/hir/src/span/stmt.rs +++ b/crates/hir/src/span/stmt.rs @@ -54,7 +54,7 @@ mod tests { fn aug_assign() { let mut db = TestDb::default(); - let text = r#" { + let text = r#" fn foo() { let mut x = 0 x += 1 @@ -63,6 +63,7 @@ mod tests { let body: Body = db.expect_item::(text); let top_mod = body.top_mod(db.as_hir_db()); + assert!(body.stmts(db.as_hir_db()).len() == 2); for (i, stmt) in body.stmts(db.as_hir_db()).keys().enumerate() { match i { 0 => { diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 3ff5d7e528..ee13bb336d 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -25,36 +25,24 @@ ast_node! { /// A single item in a module. /// Use `[Item::kind]` to get the specific type of item. pub struct Item, - SK::Mod - | SK::Func - | SK::Struct - | SK::Contract - | SK::Enum - | SK::TypeAlias - | SK::Impl - | SK::Trait - | SK::ImplTrait - | SK::Const - | SK::Use - | SK::Extern, + SK::Item } impl Item { - pub fn kind(&self) -> ItemKind { - match self.syntax().kind() { - SK::Mod => ItemKind::Mod(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Func => ItemKind::Fn(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Struct => ItemKind::Struct(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Contract => ItemKind::Contract(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Enum => ItemKind::Enum(AstNode::cast(self.syntax().clone()).unwrap()), - SK::TypeAlias => ItemKind::TypeAlias(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Impl => ItemKind::Impl(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Trait => ItemKind::Trait(AstNode::cast(self.syntax().clone()).unwrap()), - SK::ImplTrait => ItemKind::ImplTrait(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Const => ItemKind::Const(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Use => ItemKind::Use(AstNode::cast(self.syntax().clone()).unwrap()), - SK::Extern => ItemKind::Extern(AstNode::cast(self.syntax().clone()).unwrap()), - _ => unreachable!(), - } + pub fn kind(&self) -> Option { + dbg!(self.syntax()); + support::child(self.syntax()) + .map(ItemKind::Mod) + .or_else(|| support::child(self.syntax()).map(ItemKind::Func)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Struct)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Contract)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Enum)) + .or_else(|| support::child(self.syntax()).map(ItemKind::TypeAlias)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Impl)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Trait)) + .or_else(|| support::child(self.syntax()).map(ItemKind::ImplTrait)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Const)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Use)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Extern)) } } @@ -425,7 +413,7 @@ pub trait ItemModifierOwner: AstNode { #[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] pub enum ItemKind { Mod(Mod), - Fn(Func), + Func(Func), Struct(Struct), Contract(Contract), Enum(Enum), @@ -461,7 +449,7 @@ mod tests { let item_list = ItemList::cast(parser.finish_to_node().0).unwrap(); let mut items = item_list.into_iter().collect::>(); assert_eq!(items.len(), 1); - items.pop().unwrap().kind().try_into().unwrap() + items.pop().unwrap().kind().unwrap().try_into().unwrap() } #[test] @@ -479,13 +467,13 @@ mod tests { for item in mod_.items().unwrap().into_iter() { match i { 0 => { - assert!(matches!(item.kind(), ItemKind::Fn(_))); - let func: Func = item.kind().try_into().unwrap(); + assert!(matches!(item.kind().unwrap(), ItemKind::Func(_))); + let func: Func = item.kind().unwrap().try_into().unwrap(); assert_eq!(func.name().unwrap().text(), "bar"); } 1 => { - assert!(matches!(item.kind(), ItemKind::Struct(_))); - let struct_: Struct = item.kind().try_into().unwrap(); + assert!(matches!(item.kind().unwrap(), ItemKind::Struct(_))); + let struct_: Struct = item.kind().unwrap().try_into().unwrap(); assert_eq!(struct_.name().unwrap().text(), "Baz"); } _ => panic!(), diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs index 9e54d3c208..1e0cf5c9b0 100644 --- a/crates/parser2/src/parser/expr_atom.rs +++ b/crates/parser2/src/parser/expr_atom.rs @@ -8,6 +8,7 @@ use crate::{ use super::{ define_scope, expr::{parse_expr, parse_expr_no_struct}, + item::ItemScope, parse_pat, stmt::parse_stmt, token_stream::TokenStream, @@ -60,6 +61,16 @@ impl super::Parse for BlockExprScope { { break; } + + if parser + .current_kind() + .map(SyntaxKind::is_item_head) + .unwrap_or_default() + { + parser.parse(ItemScope::default(), None); + continue; + } + if !parse_stmt(parser, None) { continue; } diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 9ad3a0ee3b..25f11e2a55 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -56,75 +56,89 @@ impl super::Parse for ItemListScope { break; } - let mut checkpoint = attr::parse_attr_list(parser); - let modifier_scope = ItemModifierScope::default(); - let modifier = match parser.current_kind() { - Some(kind) if is_modifier_head(kind) => { - let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); - checkpoint.get_or_insert(modifier_checkpoint); - modifier_scope.kind.get() - } - _ => ModifierKind::None, - }; + parser.parse(ItemScope::default(), None); + } + } +} - if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { - parser.error("expected `fn` after `unsafe` keyword"); - } else if modifier.is_pub() && matches!(parser.current_kind(), Some(ImplKw | ExternKw)) - { - let error_msg = format!( - "`pub` can't be used for `{}`", - parser.current_token().unwrap().text() - ); - parser.error(&error_msg); - } +define_scope! { + #[doc(hidden)] + pub(super) ItemScope, + Item, + Inheritance +} +impl super::Parse for ItemScope { + fn parse(&mut self, parser: &mut Parser) { + use crate::SyntaxKind::*; - match parser.current_kind() { - Some(ModKw) => { - parser.parse(ModScope::default(), checkpoint); - } - Some(FnKw) => { - parser.parse(FuncScope::default(), checkpoint); - } - Some(StructKw) => { - parser.parse(super::struct_::StructScope::default(), checkpoint); - } - Some(ContractKw) => { - parser.parse(ContractScope::default(), checkpoint); - } - Some(EnumKw) => { - parser.parse(EnumScope::default(), checkpoint); - } - Some(TraitKw) => { - parser.parse(TraitScope::default(), checkpoint); - } - Some(ImplKw) => { - parser.parse(ImplScope::default(), checkpoint); - } - Some(UseKw) => { - parser.parse(UseScope::default(), checkpoint); - } - Some(ConstKw) => { - parser.parse(ConstScope::default(), checkpoint); - } - Some(ExternKw) => { - parser.parse(ExternScope::default(), checkpoint); - } - Some(TypeKw) => { - parser.parse(TypeAliasScope::default(), checkpoint); - } - tok => parser - .error_and_recover(&format! {"expected item: but got {tok:?}"}, checkpoint), + let mut checkpoint = attr::parse_attr_list(parser); + let modifier_scope = ItemModifierScope::default(); + let modifier = match parser.current_kind() { + Some(kind) if kind.is_modifier_head() => { + let (_, modifier_checkpoint) = parser.parse(modifier_scope.clone(), None); + checkpoint.get_or_insert(modifier_checkpoint); + modifier_scope.kind.get() } + _ => ModifierKind::None, + }; + + if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { + parser.error("expected `fn` after `unsafe` keyword"); + } else if modifier.is_pub() && matches!(parser.current_kind(), Some(ImplKw | ExternKw)) { + let error_msg = format!( + "`pub` can't be used for `{}`", + parser.current_token().unwrap().text() + ); + parser.error(&error_msg); + } - parser.set_newline_as_trivia(false); - if parser.current_kind().is_some() && !parser.bump_if(SyntaxKind::Newline) { - parser.bump_or_recover( - SyntaxKind::Newline, - "expected newline after item definition", - checkpoint, - ) + match parser.current_kind() { + Some(ModKw) => { + parser.parse(ModScope::default(), checkpoint); + } + Some(FnKw) => { + parser.parse(FuncScope::default(), checkpoint); + } + Some(StructKw) => { + parser.parse(super::struct_::StructScope::default(), checkpoint); + } + Some(ContractKw) => { + parser.parse(ContractScope::default(), checkpoint); + } + Some(EnumKw) => { + parser.parse(EnumScope::default(), checkpoint); + } + Some(TraitKw) => { + parser.parse(TraitScope::default(), checkpoint); + } + Some(ImplKw) => { + parser.parse(ImplScope::default(), checkpoint); + } + Some(UseKw) => { + parser.parse(UseScope::default(), checkpoint); + } + Some(ConstKw) => { + parser.parse(ConstScope::default(), checkpoint); + } + Some(ExternKw) => { + parser.parse(ExternScope::default(), checkpoint); + } + Some(TypeKw) => { + parser.parse(TypeAliasScope::default(), checkpoint); + } + tok => { + parser.error_and_recover(&format! {"expected item: but got {tok:?}"}, checkpoint) } } + + parser.set_newline_as_trivia(false); + if parser.current_kind().is_some() && !parser.bump_if(SyntaxKind::Newline) { + parser.bump_or_recover( + SyntaxKind::Newline, + "expected newline after item definition", + checkpoint, + ) + } } } @@ -507,7 +521,7 @@ fn parse_fn_item_block( let mut checkpoint = attr::parse_attr_list(parser); let modifier_scope = ItemModifierScope::default(); match parser.current_kind() { - Some(kind) if is_modifier_head(kind) && allow_modifier => { + Some(kind) if kind.is_modifier_head() && allow_modifier => { if allow_modifier { let (_, modifier_checkpoint) = parser.parse(modifier_scope, None); checkpoint.get_or_insert(modifier_checkpoint); @@ -538,7 +552,3 @@ fn parse_fn_item_block( parser.bump_or_recover(SyntaxKind::RBrace, "expected `}` to close the block", None); } - -fn is_modifier_head(kind: SyntaxKind) -> bool { - matches!(kind, SyntaxKind::PubKw | SyntaxKind::UnsafeKw) -} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index a517dd71ec..44d6e36856 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -338,6 +338,7 @@ pub enum SyntaxKind { MatchArmList, // Items. These are non-leaf nodes. + Item, /// `mod s { .. }` Mod, /// `fn foo(x: i32) -> i32 { .. }` @@ -486,6 +487,28 @@ impl SyntaxKind { _ => None, } } + + pub(crate) fn is_modifier_head(self) -> bool { + matches!(self, SyntaxKind::PubKw | SyntaxKind::UnsafeKw) + } + + pub(crate) fn is_item_head(self) -> bool { + self.is_modifier_head() + || matches!( + self, + SyntaxKind::ModKw + | SyntaxKind::FnKw + | SyntaxKind::StructKw + | SyntaxKind::ContractKw + | SyntaxKind::EnumKw + | SyntaxKind::TypeKw + | SyntaxKind::ImplKw + | SyntaxKind::TraitKw + | SyntaxKind::ConstKw + | SyntaxKind::UseKw + | SyntaxKind::ExternKw + ) + } } impl From for rowan::SyntaxKind { diff --git a/crates/parser2/test_files/error_recovery/items/const_.snap b/crates/parser2/test_files/error_recovery/items/const_.snap index 090df6462f..582943fa80 100644 --- a/crates/parser2/test_files/error_recovery/items/const_.snap +++ b/crates/parser2/test_files/error_recovery/items/const_.snap @@ -5,52 +5,55 @@ input_file: crates/parser2/test_files/error_recovery/items/const_.fe --- Root@0..44 ItemList@0..44 - Const@0..12 - ConstKw@0..5 "const" - WhiteSpace@5..6 " " - Ident@6..7 "X" - WhiteSpace@7..8 " " - Error@8..8 - PathType@8..8 - Path@8..8 - PathSegment@8..8 - Error@8..8 - Eq@8..9 "=" - WhiteSpace@9..10 " " - LitExpr@10..12 - Lit@10..12 - Int@10..12 "10" - Newline@12..14 "\n\n" - Const@14..27 - ConstKw@14..19 "const" - WhiteSpace@19..20 " " - Ident@20..21 "X" - Colon@21..22 ":" - WhiteSpace@22..23 " " - PathType@23..26 - Path@23..26 - PathSegment@23..26 - Ident@23..26 "i32" - WhiteSpace@26..27 " " - Error@27..27 - Error@27..27 - Newline@27..29 "\n\n" - Const@29..44 - ConstKw@29..34 "const" - WhiteSpace@34..35 " " - Ident@35..36 "X" - Colon@36..37 ":" - WhiteSpace@37..38 " " - PathType@38..40 - Path@38..40 - PathSegment@38..40 - Error@38..40 - RBracket@38..39 "]" - InvalidToken@39..40 "@" - WhiteSpace@40..41 " " - Eq@41..42 "=" - WhiteSpace@42..43 " " - LitExpr@43..44 - Lit@43..44 - Int@43..44 "1" + Item@0..14 + Const@0..12 + ConstKw@0..5 "const" + WhiteSpace@5..6 " " + Ident@6..7 "X" + WhiteSpace@7..8 " " + Error@8..8 + PathType@8..8 + Path@8..8 + PathSegment@8..8 + Error@8..8 + Eq@8..9 "=" + WhiteSpace@9..10 " " + LitExpr@10..12 + Lit@10..12 + Int@10..12 "10" + Newline@12..14 "\n\n" + Item@14..29 + Const@14..27 + ConstKw@14..19 "const" + WhiteSpace@19..20 " " + Ident@20..21 "X" + Colon@21..22 ":" + WhiteSpace@22..23 " " + PathType@23..26 + Path@23..26 + PathSegment@23..26 + Ident@23..26 "i32" + WhiteSpace@26..27 " " + Error@27..27 + Error@27..27 + Newline@27..29 "\n\n" + Item@29..44 + Const@29..44 + ConstKw@29..34 "const" + WhiteSpace@34..35 " " + Ident@35..36 "X" + Colon@36..37 ":" + WhiteSpace@37..38 " " + PathType@38..40 + Path@38..40 + PathSegment@38..40 + Error@38..40 + RBracket@38..39 "]" + InvalidToken@39..40 "@" + WhiteSpace@40..41 " " + Eq@41..42 "=" + WhiteSpace@42..43 " " + LitExpr@43..44 + Lit@43..44 + Int@43..44 "1" diff --git a/crates/parser2/test_files/error_recovery/items/enum_.snap b/crates/parser2/test_files/error_recovery/items/enum_.snap index 02c56b01d5..8d85dfc88f 100644 --- a/crates/parser2/test_files/error_recovery/items/enum_.snap +++ b/crates/parser2/test_files/error_recovery/items/enum_.snap @@ -5,65 +5,66 @@ input_file: crates/parser2/test_files/error_recovery/items/enum_.fe --- Root@0..63 ItemList@0..63 - Enum@0..63 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - EnumKw@4..8 "enum" - WhiteSpace@8..9 " " - Ident@9..15 "MyEnum" - GenericParamList@15..18 - Lt@15..16 "<" - TypeGenericParam@16..17 - Ident@16..17 "T" - Gt@17..18 ">" - WhiteSpace@18..19 " " - VariantDefList@19..63 - LBrace@19..20 "{" - Newline@20..21 "\n" - WhiteSpace@21..25 " " - VariantDef@25..39 - Ident@25..26 "X" - TupleType@26..39 - LParen@26..27 "(" - PathType@27..30 - Path@27..30 - PathSegment@27..30 - Ident@27..30 "u32" - Comma@30..31 "," - WhiteSpace@31..32 " " - PathType@32..33 - Path@32..33 - PathSegment@32..33 - Ident@32..33 "T" - Newline@33..34 "\n" - WhiteSpace@34..38 " " - Error@38..39 - Ident@38..39 "A" - Newline@39..40 "\n" - WhiteSpace@40..44 " " - VariantDef@44..53 - Ident@44..45 "Y" - TupleType@45..53 - LParen@45..46 "(" - PathType@46..47 - Path@46..47 - PathSegment@46..47 - Ident@46..47 "T" - Comma@47..48 "," - WhiteSpace@48..49 " " - PathType@49..52 - Path@49..52 - PathSegment@49..52 - Ident@49..52 "u32" - RParen@52..53 ")" - WhiteSpace@53..54 " " - Error@54..55 - Ident@54..55 "A" - Newline@55..56 "\n" - WhiteSpace@56..60 " " - VariantDef@60..61 - Ident@60..61 "Z" - Newline@61..62 "\n" - RBrace@62..63 "}" + Item@0..63 + Enum@0..63 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + EnumKw@4..8 "enum" + WhiteSpace@8..9 " " + Ident@9..15 "MyEnum" + GenericParamList@15..18 + Lt@15..16 "<" + TypeGenericParam@16..17 + Ident@16..17 "T" + Gt@17..18 ">" + WhiteSpace@18..19 " " + VariantDefList@19..63 + LBrace@19..20 "{" + Newline@20..21 "\n" + WhiteSpace@21..25 " " + VariantDef@25..39 + Ident@25..26 "X" + TupleType@26..39 + LParen@26..27 "(" + PathType@27..30 + Path@27..30 + PathSegment@27..30 + Ident@27..30 "u32" + Comma@30..31 "," + WhiteSpace@31..32 " " + PathType@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "T" + Newline@33..34 "\n" + WhiteSpace@34..38 " " + Error@38..39 + Ident@38..39 "A" + Newline@39..40 "\n" + WhiteSpace@40..44 " " + VariantDef@44..53 + Ident@44..45 "Y" + TupleType@45..53 + LParen@45..46 "(" + PathType@46..47 + Path@46..47 + PathSegment@46..47 + Ident@46..47 "T" + Comma@47..48 "," + WhiteSpace@48..49 " " + PathType@49..52 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "u32" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Error@54..55 + Ident@54..55 "A" + Newline@55..56 "\n" + WhiteSpace@56..60 " " + VariantDef@60..61 + Ident@60..61 "Z" + Newline@61..62 "\n" + RBrace@62..63 "}" diff --git a/crates/parser2/test_files/error_recovery/items/extern_.snap b/crates/parser2/test_files/error_recovery/items/extern_.snap index 23459991e9..8547a5fc96 100644 --- a/crates/parser2/test_files/error_recovery/items/extern_.snap +++ b/crates/parser2/test_files/error_recovery/items/extern_.snap @@ -5,59 +5,60 @@ input_file: crates/parser2/test_files/error_recovery/items/extern_.fe --- Root@0..90 ItemList@0..90 - Extern@0..90 - ExternKw@0..6 "extern" - WhiteSpace@6..7 " " - ExternItemList@7..90 - LBrace@7..8 "{" - Newline@8..9 "\n" - WhiteSpace@9..13 " " - Func@13..41 - ItemModifier@13..23 - PubKw@13..16 "pub" - WhiteSpace@16..17 " " - UnsafeKw@17..23 "unsafe" - WhiteSpace@23..24 " " - FnKw@24..26 "fn" - WhiteSpace@26..27 " " - Ident@27..30 "Foo" - FuncParamList@30..41 - LParen@30..31 "(" - FnParam@31..40 - Ident@31..32 "x" - Colon@32..33 ":" - WhiteSpace@33..34 " " - PtrType@34..40 - Star@34..35 "*" - PathType@35..40 - Path@35..40 - PathSegment@35..40 - Ident@35..40 "usize" - RParen@40..41 ")" - Newline@41..42 "\n" - WhiteSpace@42..46 " " - Newline@46..47 "\n" - WhiteSpace@47..51 " " - Error@51..63 - StructKw@51..57 "struct" - WhiteSpace@57..58 " " - Ident@58..61 "Foo" - WhiteSpace@61..62 " " - LBrace@62..63 "{" - Newline@63..65 "\n\n" - WhiteSpace@65..69 " " - Func@69..88 - ItemModifier@69..79 - PubKw@69..72 "pub" - WhiteSpace@72..73 " " - UnsafeKw@73..79 "unsafe" - WhiteSpace@79..80 " " - FnKw@80..82 "fn" - WhiteSpace@82..83 " " - Ident@83..86 "foo" - FuncParamList@86..88 - LParen@86..87 "(" - RParen@87..88 ")" - Newline@88..89 "\n" - RBrace@89..90 "}" + Item@0..90 + Extern@0..90 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..90 + LBrace@7..8 "{" + Newline@8..9 "\n" + WhiteSpace@9..13 " " + Func@13..41 + ItemModifier@13..23 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UnsafeKw@17..23 "unsafe" + WhiteSpace@23..24 " " + FnKw@24..26 "fn" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + FuncParamList@30..41 + LParen@30..31 "(" + FnParam@31..40 + Ident@31..32 "x" + Colon@32..33 ":" + WhiteSpace@33..34 " " + PtrType@34..40 + Star@34..35 "*" + PathType@35..40 + Path@35..40 + PathSegment@35..40 + Ident@35..40 "usize" + RParen@40..41 ")" + Newline@41..42 "\n" + WhiteSpace@42..46 " " + Newline@46..47 "\n" + WhiteSpace@47..51 " " + Error@51..63 + StructKw@51..57 "struct" + WhiteSpace@57..58 " " + Ident@58..61 "Foo" + WhiteSpace@61..62 " " + LBrace@62..63 "{" + Newline@63..65 "\n\n" + WhiteSpace@65..69 " " + Func@69..88 + ItemModifier@69..79 + PubKw@69..72 "pub" + WhiteSpace@72..73 " " + UnsafeKw@73..79 "unsafe" + WhiteSpace@79..80 " " + FnKw@80..82 "fn" + WhiteSpace@82..83 " " + Ident@83..86 "foo" + FuncParamList@86..88 + LParen@86..87 "(" + RParen@87..88 ")" + Newline@88..89 "\n" + RBrace@89..90 "}" diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 69afea3379..28095d3eba 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -5,144 +5,146 @@ input_file: crates/parser2/test_files/error_recovery/items/func.fe --- Root@0..133 ItemList@0..133 - Func@0..78 - FnKw@0..2 "fn" - WhiteSpace@2..3 " " - Ident@3..6 "foo" - GenericParamList@6..16 - Lt@6..7 "<" - TypeGenericParam@7..15 - Ident@7..8 "T" - TypeBoundList@8..15 - Colon@8..9 ":" - WhiteSpace@9..10 " " - TypeBound@10..15 - Path@10..15 - PathSegment@10..15 - Ident@10..15 "Trait" - Gt@15..16 ">" - Error@16..17 - Gt@16..17 ">" - FuncParamList@17..47 - LParen@17..18 "(" - FnParam@18..24 - Ident@18..19 "x" - Colon@19..20 ":" - WhiteSpace@20..21 " " - PathType@21..24 - Path@21..24 - PathSegment@21..24 - Ident@21..24 "i32" - Comma@24..25 "," - WhiteSpace@25..26 " " - FnParam@26..38 - Underscore@26..27 "_" - WhiteSpace@27..28 " " - Error@28..33 - MutKw@28..31 "mut" - WhiteSpace@31..32 " " - Ident@32..33 "y" - Colon@33..34 ":" - WhiteSpace@34..35 " " - PathType@35..38 - Path@35..38 - PathSegment@35..38 - Ident@35..38 "u32" - Comma@38..39 "," - WhiteSpace@39..40 " " - FnParam@40..46 - Ident@40..41 "z" - Colon@41..42 ":" - WhiteSpace@42..43 " " - PathType@43..46 - Path@43..46 - PathSegment@43..46 - Ident@43..46 "u32" - RParen@46..47 ")" - WhiteSpace@47..48 " " - Arrow@48..50 "->" - WhiteSpace@50..52 " " - PathType@52..53 - Path@52..53 - PathSegment@52..53 - Ident@52..53 "T" - Error@53..56 - Comma@53..54 "," - WhiteSpace@54..55 " " - Ident@55..56 "u" - WhiteSpace@56..57 " " - WhereClause@57..74 - WhereKw@57..62 "where" - WhiteSpace@62..63 " " - WherePredicate@63..74 - PathType@63..64 - Path@63..64 - PathSegment@63..64 - Ident@63..64 "T" - TypeBoundList@64..72 - Colon@64..65 ":" - WhiteSpace@65..66 " " - TypeBound@66..72 - Path@66..72 - PathSegment@66..72 - Ident@66..72 "Trait2" - WhiteSpace@72..73 " " - Newline@73..74 "\n" - BlockExpr@74..78 - LBrace@74..75 "{" - Newline@75..77 "\n\n" - RBrace@77..78 "}" - Newline@78..80 "\n\n" - Func@80..132 - FnKw@80..82 "fn" - WhiteSpace@82..83 " " - Ident@83..86 "foo" - GenericParamList@86..98 - Lt@86..87 "<" - TypeGenericParam@87..98 - Error@87..98 - Lt@87..88 "<" - Lt@88..89 "<" - Ident@89..90 "T" - Colon@90..91 ":" - WhiteSpace@91..92 " " - Ident@92..97 "Trait" - Gt@97..98 ">" + Item@0..80 + Func@0..78 + FnKw@0..2 "fn" + WhiteSpace@2..3 " " + Ident@3..6 "foo" + GenericParamList@6..16 + Lt@6..7 "<" + TypeGenericParam@7..15 + Ident@7..8 "T" + TypeBoundList@8..15 + Colon@8..9 ":" + WhiteSpace@9..10 " " + TypeBound@10..15 + Path@10..15 + PathSegment@10..15 + Ident@10..15 "Trait" + Gt@15..16 ">" + Error@16..17 + Gt@16..17 ">" + FuncParamList@17..47 + LParen@17..18 "(" + FnParam@18..24 + Ident@18..19 "x" + Colon@19..20 ":" + WhiteSpace@20..21 " " + PathType@21..24 + Path@21..24 + PathSegment@21..24 + Ident@21..24 "i32" + Comma@24..25 "," + WhiteSpace@25..26 " " + FnParam@26..38 + Underscore@26..27 "_" + WhiteSpace@27..28 " " + Error@28..33 + MutKw@28..31 "mut" + WhiteSpace@31..32 " " + Ident@32..33 "y" + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..38 + Path@35..38 + PathSegment@35..38 + Ident@35..38 "u32" + Comma@38..39 "," + WhiteSpace@39..40 " " + FnParam@40..46 + Ident@40..41 "z" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + WhiteSpace@47..48 " " + Arrow@48..50 "->" + WhiteSpace@50..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "T" + Error@53..56 + Comma@53..54 "," + WhiteSpace@54..55 " " + Ident@55..56 "u" + WhiteSpace@56..57 " " + WhereClause@57..74 + WhereKw@57..62 "where" + WhiteSpace@62..63 " " + WherePredicate@63..74 + PathType@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "T" + TypeBoundList@64..72 + Colon@64..65 ":" + WhiteSpace@65..66 " " + TypeBound@66..72 + Path@66..72 + PathSegment@66..72 + Ident@66..72 "Trait2" + WhiteSpace@72..73 " " + Newline@73..74 "\n" + BlockExpr@74..78 + LBrace@74..75 "{" + Newline@75..77 "\n\n" + RBrace@77..78 "}" + Newline@78..80 "\n\n" + Item@80..133 + Func@80..132 + FnKw@80..82 "fn" + WhiteSpace@82..83 " " + Ident@83..86 "foo" + GenericParamList@86..98 + Lt@86..87 "<" + TypeGenericParam@87..98 + Error@87..98 + Lt@87..88 "<" + Lt@88..89 "<" + Ident@89..90 "T" + Colon@90..91 ":" + WhiteSpace@91..92 " " + Ident@92..97 "Trait" + Gt@97..98 ">" + Error@98..98 Error@98..98 - Error@98..98 - FuncParamList@98..106 - LParen@98..99 "(" - FnParam@99..105 - Ident@99..100 "x" - Colon@100..101 ":" - WhiteSpace@101..102 " " - PathType@102..105 - Path@102..105 - PathSegment@102..105 - Ident@102..105 "i32" - RParen@105..106 ")" - Newline@106..107 "\n" - WhiteSpace@107..111 " " - WhereClause@111..128 - WhereKw@111..116 "where" - WhiteSpace@116..117 " " - WherePredicate@117..128 - PathType@117..118 - Path@117..118 - PathSegment@117..118 - Ident@117..118 "T" - TypeBoundList@118..126 - Colon@118..119 ":" - WhiteSpace@119..120 " " - TypeBound@120..126 - Path@120..126 - PathSegment@120..126 - Ident@120..126 "Trait2" - WhiteSpace@126..127 " " - Newline@127..128 "\n" - BlockExpr@128..132 - LBrace@128..129 "{" - Newline@129..131 "\n\n" - RBrace@131..132 "}" - Newline@132..133 "\n" + FuncParamList@98..106 + LParen@98..99 "(" + FnParam@99..105 + Ident@99..100 "x" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..105 + Path@102..105 + PathSegment@102..105 + Ident@102..105 "i32" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + WhereClause@111..128 + WhereKw@111..116 "where" + WhiteSpace@116..117 " " + WherePredicate@117..128 + PathType@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "T" + TypeBoundList@118..126 + Colon@118..119 ":" + WhiteSpace@119..120 " " + TypeBound@120..126 + Path@120..126 + PathSegment@120..126 + Ident@120..126 "Trait2" + WhiteSpace@126..127 " " + Newline@127..128 "\n" + BlockExpr@128..132 + LBrace@128..129 "{" + Newline@129..131 "\n\n" + RBrace@131..132 "}" + Newline@132..133 "\n" diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap index ea8bdf1119..786cdb94da 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -5,77 +5,79 @@ input_file: crates/parser2/test_files/error_recovery/items/impl_.fe --- Root@0..56 ItemList@0..56 - Impl@0..37 - ImplKw@0..4 "impl" - WhiteSpace@4..5 " " - PathType@5..17 - Path@5..8 - PathSegment@5..8 - Ident@5..8 "Foo" - GenericArgList@8..17 - Lt@8..9 "<" - TypeGenericArg@9..10 - PathType@9..10 - Path@9..10 - PathSegment@9..10 - Ident@9..10 "T" - Comma@10..11 "," - WhiteSpace@11..12 " " - Newline@12..13 "\n" - WhiteSpace@13..17 " " - TypeGenericArg@17..17 - PathType@17..17 - Path@17..17 - PathSegment@17..17 - Error@17..17 + Item@0..39 + Impl@0..37 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..17 + Path@5..8 + PathSegment@5..8 + Ident@5..8 "Foo" + GenericArgList@8..17 + Lt@8..9 "<" + TypeGenericArg@9..10 + PathType@9..10 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "T" + Comma@10..11 "," + WhiteSpace@11..12 " " + Newline@12..13 "\n" + WhiteSpace@13..17 " " + TypeGenericArg@17..17 + PathType@17..17 + Path@17..17 + PathSegment@17..17 + Error@17..17 + Error@17..17 Error@17..17 - Error@17..17 - WhereClause@17..34 - WhereKw@17..22 "where" - WhiteSpace@22..23 " " - WherePredicate@23..34 - PathType@23..24 - Path@23..24 - PathSegment@23..24 - Ident@23..24 "T" - TypeBoundList@24..33 - Colon@24..25 ":" - WhiteSpace@25..26 " " - TypeBound@26..33 - Path@26..33 - PathSegment@26..33 - Ident@26..33 "Integer" - Newline@33..34 "\n" - ImplItemList@34..37 - LBrace@34..35 "{" - WhiteSpace@35..36 " " - RBrace@36..37 "}" - Newline@37..39 "\n\n" - Impl@39..56 - ImplKw@39..43 "impl" - WhiteSpace@43..44 " " - PathType@44..52 - Path@44..47 - PathSegment@44..47 - Ident@44..47 "Foo" - GenericArgList@47..52 - Lt@47..48 "<" - TypeGenericArg@48..49 - PathType@48..49 - Path@48..49 - PathSegment@48..49 - Ident@48..49 "T" - Comma@49..50 "," - WhiteSpace@50..51 " " - TypeGenericArg@51..51 - PathType@51..51 - Path@51..51 - PathSegment@51..51 - Error@51..51 - Gt@51..52 ">" - Newline@52..53 "\n" - ImplItemList@53..56 - LBrace@53..54 "{" - WhiteSpace@54..55 " " - RBrace@55..56 "}" + WhereClause@17..34 + WhereKw@17..22 "where" + WhiteSpace@22..23 " " + WherePredicate@23..34 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + TypeBoundList@24..33 + Colon@24..25 ":" + WhiteSpace@25..26 " " + TypeBound@26..33 + Path@26..33 + PathSegment@26..33 + Ident@26..33 "Integer" + Newline@33..34 "\n" + ImplItemList@34..37 + LBrace@34..35 "{" + WhiteSpace@35..36 " " + RBrace@36..37 "}" + Newline@37..39 "\n\n" + Item@39..56 + Impl@39..56 + ImplKw@39..43 "impl" + WhiteSpace@43..44 " " + PathType@44..52 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "Foo" + GenericArgList@47..52 + Lt@47..48 "<" + TypeGenericArg@48..49 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "T" + Comma@49..50 "," + WhiteSpace@50..51 " " + TypeGenericArg@51..51 + PathType@51..51 + Path@51..51 + PathSegment@51..51 + Error@51..51 + Gt@51..52 ">" + Newline@52..53 "\n" + ImplItemList@53..56 + LBrace@53..54 "{" + WhiteSpace@54..55 " " + RBrace@55..56 "}" diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.snap b/crates/parser2/test_files/error_recovery/items/impl_trait.snap index 1e1b017b2a..4bc909b1e6 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_trait.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.snap @@ -5,149 +5,152 @@ input_file: crates/parser2/test_files/error_recovery/items/impl_trait.fe --- Root@0..90 ItemList@0..90 - ImplTrait@0..36 - ImplKw@0..4 "impl" - WhiteSpace@4..5 " " - PathType@5..14 - Path@5..6 - PathSegment@5..6 - Ident@5..6 "X" - GenericArgList@6..14 - Lt@6..7 "<" - TypeGenericArg@7..8 - PathType@7..8 - Path@7..8 - PathSegment@7..8 - Ident@7..8 "T" - Comma@8..9 "," - WhiteSpace@9..10 " " - TypeGenericArg@10..13 - PathType@10..11 - Path@10..11 - PathSegment@10..11 - Ident@10..11 "u" - Error@11..13 - RParen@11..12 ")" - RParen@12..13 ")" - Gt@13..14 ">" - WhiteSpace@14..15 " " - ForKw@15..18 "for" - WhiteSpace@18..19 " " - PathType@19..23 - Path@19..20 - PathSegment@19..20 - Ident@19..20 "Y" - GenericArgList@20..23 - Lt@20..21 "<" - TypeGenericArg@21..23 - PathType@21..22 - Path@21..22 - PathSegment@21..22 - Ident@21..22 "T" - WhiteSpace@22..23 " " + Item@0..38 + ImplTrait@0..36 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..14 + Path@5..6 + PathSegment@5..6 + Ident@5..6 "X" + GenericArgList@6..14 + Lt@6..7 "<" + TypeGenericArg@7..8 + PathType@7..8 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "T" + Comma@8..9 "," + WhiteSpace@9..10 " " + TypeGenericArg@10..13 + PathType@10..11 + Path@10..11 + PathSegment@10..11 + Ident@10..11 "u" + Error@11..13 + RParen@11..12 ")" + RParen@12..13 ")" + Gt@13..14 ">" + WhiteSpace@14..15 " " + ForKw@15..18 "for" + WhiteSpace@18..19 " " + PathType@19..23 + Path@19..20 + PathSegment@19..20 + Ident@19..20 "Y" + GenericArgList@20..23 + Lt@20..21 "<" + TypeGenericArg@21..23 + PathType@21..22 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "T" + WhiteSpace@22..23 " " + Error@23..23 Error@23..23 - Error@23..23 - WhereClause@23..34 - WhereKw@23..28 "where" - WhiteSpace@28..29 " " - WherePredicate@29..34 - PathType@29..30 - Path@29..30 - PathSegment@29..30 - Ident@29..30 "T" - TypeBoundList@30..33 - Colon@30..31 ":" - WhiteSpace@31..32 " " - TypeBound@32..33 - Path@32..33 - PathSegment@32..33 - Ident@32..33 "X" - WhiteSpace@33..34 " " - Error@34..34 - ImplTraitItemList@34..36 - LBrace@34..35 "{" - RBrace@35..36 "}" - Newline@36..38 "\n\n" - ImplTrait@38..71 - ImplKw@38..42 "impl" - WhiteSpace@42..43 " " - PathType@43..50 - Path@43..44 - PathSegment@43..44 - Ident@43..44 "X" - GenericArgList@44..50 - Lt@44..45 "<" - TypeGenericArg@45..46 - PathType@45..46 - Path@45..46 - PathSegment@45..46 - Ident@45..46 "T" - Comma@46..47 "," - WhiteSpace@47..48 " " - TypeGenericArg@48..50 - PathType@48..49 - Path@48..49 - PathSegment@48..49 - Ident@48..49 "u" - WhiteSpace@49..50 " " + WhereClause@23..34 + WhereKw@23..28 "where" + WhiteSpace@28..29 " " + WherePredicate@29..34 + PathType@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "T" + TypeBoundList@30..33 + Colon@30..31 ":" + WhiteSpace@31..32 " " + TypeBound@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "X" + WhiteSpace@33..34 " " + Error@34..34 + ImplTraitItemList@34..36 + LBrace@34..35 "{" + RBrace@35..36 "}" + Newline@36..38 "\n\n" + Item@38..73 + ImplTrait@38..71 + ImplKw@38..42 "impl" + WhiteSpace@42..43 " " + PathType@43..50 + Path@43..44 + PathSegment@43..44 + Ident@43..44 "X" + GenericArgList@44..50 + Lt@44..45 "<" + TypeGenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericArg@48..50 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "u" + WhiteSpace@49..50 " " + Error@50..50 Error@50..50 - Error@50..50 - ForKw@50..53 "for" - WhiteSpace@53..54 " " - PathType@54..58 - Path@54..55 - PathSegment@54..55 - Ident@54..55 "Y" - GenericArgList@55..58 - Lt@55..56 "<" - TypeGenericArg@56..58 - PathType@56..57 - Path@56..57 - PathSegment@56..57 - Ident@56..57 "T" - WhiteSpace@57..58 " " + ForKw@50..53 "for" + WhiteSpace@53..54 " " + PathType@54..58 + Path@54..55 + PathSegment@54..55 + Ident@54..55 "Y" + GenericArgList@55..58 + Lt@55..56 "<" + TypeGenericArg@56..58 + PathType@56..57 + Path@56..57 + PathSegment@56..57 + Ident@56..57 "T" + WhiteSpace@57..58 " " + Error@58..58 Error@58..58 - Error@58..58 - WhereClause@58..69 - WhereKw@58..63 "where" - WhiteSpace@63..64 " " - WherePredicate@64..69 - PathType@64..65 - Path@64..65 - PathSegment@64..65 - Ident@64..65 "T" - TypeBoundList@65..68 - Colon@65..66 ":" - WhiteSpace@66..67 " " - TypeBound@67..68 - Path@67..68 - PathSegment@67..68 - Ident@67..68 "X" - WhiteSpace@68..69 " " - Error@69..69 - ImplTraitItemList@69..71 - LBrace@69..70 "{" - RBrace@70..71 "}" - Newline@71..73 "\n\n" - ImplTrait@73..90 - ImplKw@73..77 "impl" - WhiteSpace@77..78 " " - PathType@78..79 - Path@78..79 - PathSegment@78..79 - Ident@78..79 "X" - WhiteSpace@79..80 " " - Error@80..81 - InvalidToken@80..81 "@" - WhiteSpace@81..82 " " - ForKw@82..85 "for" - WhiteSpace@85..86 " " - PathType@86..87 - Path@86..87 - PathSegment@86..87 - Ident@86..87 "Y" - WhiteSpace@87..88 " " - ImplTraitItemList@88..90 - LBrace@88..89 "{" - RBrace@89..90 "}" + WhereClause@58..69 + WhereKw@58..63 "where" + WhiteSpace@63..64 " " + WherePredicate@64..69 + PathType@64..65 + Path@64..65 + PathSegment@64..65 + Ident@64..65 "T" + TypeBoundList@65..68 + Colon@65..66 ":" + WhiteSpace@66..67 " " + TypeBound@67..68 + Path@67..68 + PathSegment@67..68 + Ident@67..68 "X" + WhiteSpace@68..69 " " + Error@69..69 + ImplTraitItemList@69..71 + LBrace@69..70 "{" + RBrace@70..71 "}" + Newline@71..73 "\n\n" + Item@73..90 + ImplTrait@73..90 + ImplKw@73..77 "impl" + WhiteSpace@77..78 " " + PathType@78..79 + Path@78..79 + PathSegment@78..79 + Ident@78..79 "X" + WhiteSpace@79..80 " " + Error@80..81 + InvalidToken@80..81 "@" + WhiteSpace@81..82 " " + ForKw@82..85 "for" + WhiteSpace@85..86 " " + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "Y" + WhiteSpace@87..88 " " + ImplTraitItemList@88..90 + LBrace@88..89 "{" + RBrace@89..90 "}" diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index 05f61427bb..eaeab2cf22 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -5,71 +5,72 @@ input_file: crates/parser2/test_files/error_recovery/items/struct_.fe --- Root@0..74 ItemList@0..74 - Struct@0..74 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - StructKw@4..10 "struct" - Error@10..10 - GenericParamList@10..16 - Lt@10..11 "<" - TypeGenericParam@11..12 - Ident@11..12 "T" - Comma@12..13 "," - WhiteSpace@13..14 " " - TypeGenericParam@14..16 - Ident@14..15 "U" - Newline@15..16 "\n" + Item@0..74 + Struct@0..74 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + Error@10..10 + GenericParamList@10..16 + Lt@10..11 "<" + TypeGenericParam@11..12 + Ident@11..12 "T" + Comma@12..13 "," + WhiteSpace@13..14 " " + TypeGenericParam@14..16 + Ident@14..15 "U" + Newline@15..16 "\n" + Error@16..16 Error@16..16 - Error@16..16 - WhereClause@16..40 - WhereKw@16..21 "where" - WhiteSpace@21..22 " " - WherePredicate@22..24 - PathType@22..23 - Path@22..23 - PathSegment@22..23 - Ident@22..23 "T" - WhiteSpace@23..24 " " - Error@24..24 - Newline@24..25 "\n" - WhiteSpace@25..31 " " - WherePredicate@31..40 - PathType@31..32 - Path@31..32 - PathSegment@31..32 - Ident@31..32 "U" - TypeBoundList@32..39 - Colon@32..33 ":" - WhiteSpace@33..34 " " - TypeBound@34..39 - Path@34..39 - PathSegment@34..39 - Ident@34..39 "Trait" - Newline@39..40 "\n" - WhiteSpace@40..44 " " - Newline@44..45 "\n" - RecordFieldDefList@45..74 - LBrace@45..46 "{" - Newline@46..47 "\n" - WhiteSpace@47..51 " " - RecordFieldDef@51..54 - Ident@51..54 "foo" - Error@54..54 - Error@54..54 - Newline@54..55 "\n" - WhiteSpace@55..59 " " - RecordFieldDef@59..72 - Ident@59..62 "bar" - Colon@62..63 ":" - WhiteSpace@63..64 " " - PathType@64..72 - Path@64..72 - PathSegment@64..67 - Ident@64..67 "i32" - Colon2@67..69 "::" - PathSegment@69..72 - Ident@69..72 "foo" - Newline@72..73 "\n" - RBrace@73..74 "}" + WhereClause@16..40 + WhereKw@16..21 "where" + WhiteSpace@21..22 " " + WherePredicate@22..24 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + WhiteSpace@23..24 " " + Error@24..24 + Newline@24..25 "\n" + WhiteSpace@25..31 " " + WherePredicate@31..40 + PathType@31..32 + Path@31..32 + PathSegment@31..32 + Ident@31..32 "U" + TypeBoundList@32..39 + Colon@32..33 ":" + WhiteSpace@33..34 " " + TypeBound@34..39 + Path@34..39 + PathSegment@34..39 + Ident@34..39 "Trait" + Newline@39..40 "\n" + WhiteSpace@40..44 " " + Newline@44..45 "\n" + RecordFieldDefList@45..74 + LBrace@45..46 "{" + Newline@46..47 "\n" + WhiteSpace@47..51 " " + RecordFieldDef@51..54 + Ident@51..54 "foo" + Error@54..54 + Error@54..54 + Newline@54..55 "\n" + WhiteSpace@55..59 " " + RecordFieldDef@59..72 + Ident@59..62 "bar" + Colon@62..63 ":" + WhiteSpace@63..64 " " + PathType@64..72 + Path@64..72 + PathSegment@64..67 + Ident@64..67 "i32" + Colon2@67..69 "::" + PathSegment@69..72 + Ident@69..72 "foo" + Newline@72..73 "\n" + RBrace@73..74 "}" diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap index 8df2b3d109..82d40f797e 100644 --- a/crates/parser2/test_files/error_recovery/items/trait_.snap +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -5,144 +5,149 @@ input_file: crates/parser2/test_files/error_recovery/items/trait_.fe --- Root@0..133 ItemList@0..133 - Trait@0..18 - TraitKw@0..5 "trait" - WhiteSpace@5..6 " " - Ident@6..9 "Foo" - GenericParamList@9..16 - Lt@9..10 "<" - TypeGenericParam@10..11 - Ident@10..11 "T" - Comma@11..12 "," - WhiteSpace@12..13 " " - TypeGenericParam@13..14 - Ident@13..14 "Y" - Comma@14..15 "," - TypeGenericParam@15..15 - Error@15..15 - Gt@15..16 ">" - TraitItemList@16..18 - LBrace@16..17 "{" - RBrace@17..18 "}" - Newline@18..20 "\n\n" - Trait@20..35 - TraitKw@20..25 "trait" - WhiteSpace@25..26 " " - Ident@26..29 "Bar" - GenericParamList@29..33 - Lt@29..30 "<" - TypeGenericParam@30..31 - Ident@30..31 "Y" - Comma@31..32 "," - WhiteSpace@32..33 " " - TypeGenericParam@33..33 + Item@0..20 + Trait@0..18 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..9 "Foo" + GenericParamList@9..16 + Lt@9..10 "<" + TypeGenericParam@10..11 + Ident@10..11 "T" + Comma@11..12 "," + WhiteSpace@12..13 " " + TypeGenericParam@13..14 + Ident@13..14 "Y" + Comma@14..15 "," + TypeGenericParam@15..15 + Error@15..15 + Gt@15..16 ">" + TraitItemList@16..18 + LBrace@16..17 "{" + RBrace@17..18 "}" + Newline@18..20 "\n\n" + Item@20..37 + Trait@20..35 + TraitKw@20..25 "trait" + WhiteSpace@25..26 " " + Ident@26..29 "Bar" + GenericParamList@29..33 + Lt@29..30 "<" + TypeGenericParam@30..31 + Ident@30..31 "Y" + Comma@31..32 "," + WhiteSpace@32..33 " " + TypeGenericParam@33..33 + Error@33..33 + Error@33..33 Error@33..33 - Error@33..33 - Error@33..33 - TraitItemList@33..35 - LBrace@33..34 "{" - RBrace@34..35 "}" - Newline@35..37 "\n\n" - Trait@37..53 - TraitKw@37..42 "trait" - WhiteSpace@42..43 " " - Ident@43..46 "Bar" - GenericParamList@46..51 - Lt@46..47 "<" - TypeGenericParam@47..48 - Ident@47..48 "T" - Comma@48..49 "," - WhiteSpace@49..50 " " - TypeGenericParam@50..50 - Error@50..50 - Gt@50..51 ">" - Newline@51..53 "\n\n" - Error@53..53 - Error@53..53 + TraitItemList@33..35 + LBrace@33..34 "{" + RBrace@34..35 "}" + Newline@35..37 "\n\n" + Item@37..53 + Trait@37..53 + TraitKw@37..42 "trait" + WhiteSpace@42..43 " " + Ident@43..46 "Bar" + GenericParamList@46..51 + Lt@46..47 "<" + TypeGenericParam@47..48 + Ident@47..48 "T" + Comma@48..49 "," + WhiteSpace@49..50 " " + TypeGenericParam@50..50 + Error@50..50 + Gt@50..51 ">" + Newline@51..53 "\n\n" + Error@53..53 + Error@53..53 + Error@53..53 Error@53..53 - Error@53..53 - Trait@53..85 - TraitKw@53..58 "trait" - WhiteSpace@58..59 " " - Ident@59..62 "Bar" - GenericParamList@62..69 - Lt@62..63 "<" - TypeGenericParam@63..64 - Ident@63..64 "Y" - Comma@64..65 "," - WhiteSpace@65..66 " " - TypeGenericParam@66..67 - Ident@66..67 "T" - Comma@67..68 "," - TypeGenericParam@68..68 - Error@68..68 - Gt@68..69 ">" - WhiteSpace@69..70 " " - WhereClause@70..83 - WhereKw@70..75 "where" - WhiteSpace@75..76 " " - WherePredicate@76..83 - PathType@76..77 - Path@76..77 - PathSegment@76..77 - Ident@76..77 "T" - TypeBoundList@77..82 - Colon@77..78 ":" - WhiteSpace@78..79 " " - TypeBound@79..82 - Path@79..82 - PathSegment@79..82 - Ident@79..82 "Add" - WhiteSpace@82..83 " " - Error@83..83 - TraitItemList@83..85 - LBrace@83..84 "{" - RBrace@84..85 "}" - Newline@85..87 "\n\n" - Trait@87..133 - TraitKw@87..92 "trait" - WhiteSpace@92..93 " " - Ident@93..96 "Bar" - GenericParamList@96..108 - Lt@96..97 "<" - TypeGenericParam@97..99 - Error@97..99 - Lt@97..98 "<" - Ident@98..99 "Y" - Comma@99..100 "," - WhiteSpace@100..101 " " - TypeGenericParam@101..107 - Ident@101..102 "K" - TypeBoundList@102..107 - Colon@102..103 ":" - WhiteSpace@103..104 " " - TypeBound@104..107 - Path@104..107 - PathSegment@104..107 - Ident@104..107 "Sub" - Gt@107..108 ">" - WhiteSpace@108..110 " " - Newline@110..111 "\n" - WhiteSpace@111..115 " " - WhereClause@115..129 - WhereKw@115..120 "where" - WhiteSpace@120..121 " " - WherePredicate@121..129 - PathType@121..122 - Path@121..122 - PathSegment@121..122 - Ident@121..122 "T" - TypeBoundList@122..127 - Colon@122..123 ":" - WhiteSpace@123..124 " " - TypeBound@124..127 - Path@124..127 - PathSegment@124..127 - Ident@124..127 "Add" - WhiteSpace@127..128 " " - Newline@128..129 "\n" - TraitItemList@129..133 - LBrace@129..130 "{" - Newline@130..132 "\n\n" - RBrace@132..133 "}" + Item@53..87 + Trait@53..85 + TraitKw@53..58 "trait" + WhiteSpace@58..59 " " + Ident@59..62 "Bar" + GenericParamList@62..69 + Lt@62..63 "<" + TypeGenericParam@63..64 + Ident@63..64 "Y" + Comma@64..65 "," + WhiteSpace@65..66 " " + TypeGenericParam@66..67 + Ident@66..67 "T" + Comma@67..68 "," + TypeGenericParam@68..68 + Error@68..68 + Gt@68..69 ">" + WhiteSpace@69..70 " " + WhereClause@70..83 + WhereKw@70..75 "where" + WhiteSpace@75..76 " " + WherePredicate@76..83 + PathType@76..77 + Path@76..77 + PathSegment@76..77 + Ident@76..77 "T" + TypeBoundList@77..82 + Colon@77..78 ":" + WhiteSpace@78..79 " " + TypeBound@79..82 + Path@79..82 + PathSegment@79..82 + Ident@79..82 "Add" + WhiteSpace@82..83 " " + Error@83..83 + TraitItemList@83..85 + LBrace@83..84 "{" + RBrace@84..85 "}" + Newline@85..87 "\n\n" + Item@87..133 + Trait@87..133 + TraitKw@87..92 "trait" + WhiteSpace@92..93 " " + Ident@93..96 "Bar" + GenericParamList@96..108 + Lt@96..97 "<" + TypeGenericParam@97..99 + Error@97..99 + Lt@97..98 "<" + Ident@98..99 "Y" + Comma@99..100 "," + WhiteSpace@100..101 " " + TypeGenericParam@101..107 + Ident@101..102 "K" + TypeBoundList@102..107 + Colon@102..103 ":" + WhiteSpace@103..104 " " + TypeBound@104..107 + Path@104..107 + PathSegment@104..107 + Ident@104..107 "Sub" + Gt@107..108 ">" + WhiteSpace@108..110 " " + Newline@110..111 "\n" + WhiteSpace@111..115 " " + WhereClause@115..129 + WhereKw@115..120 "where" + WhiteSpace@120..121 " " + WherePredicate@121..129 + PathType@121..122 + Path@121..122 + PathSegment@121..122 + Ident@121..122 "T" + TypeBoundList@122..127 + Colon@122..123 ":" + WhiteSpace@123..124 " " + TypeBound@124..127 + Path@124..127 + PathSegment@124..127 + Ident@124..127 "Add" + WhiteSpace@127..128 " " + Newline@128..129 "\n" + TraitItemList@129..133 + LBrace@129..130 "{" + Newline@130..132 "\n\n" + RBrace@132..133 "}" diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap index 47dfa1c535..c6e00fea09 100644 --- a/crates/parser2/test_files/error_recovery/items/type_.snap +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -5,39 +5,40 @@ input_file: crates/parser2/test_files/error_recovery/items/type_.fe --- Root@0..29 ItemList@0..29 - TypeAlias@0..29 - TypeKw@0..4 "type" - WhiteSpace@4..5 " " - Ident@5..11 "Result" - GenericParamList@11..15 - Lt@11..12 "<" - TypeGenericParam@12..13 - Ident@12..13 "T" - Comma@13..14 "," - WhiteSpace@14..15 " " - TypeGenericParam@15..15 + Item@0..29 + TypeAlias@0..29 + TypeKw@0..4 "type" + WhiteSpace@4..5 " " + Ident@5..11 "Result" + GenericParamList@11..15 + Lt@11..12 "<" + TypeGenericParam@12..13 + Ident@12..13 "T" + Comma@13..14 "," + WhiteSpace@14..15 " " + TypeGenericParam@15..15 + Error@15..15 + Error@15..15 Error@15..15 - Error@15..15 - Error@15..15 - Eq@15..16 "=" - WhiteSpace@16..17 " " - PathType@17..29 - Path@17..23 - PathSegment@17..23 - Ident@17..23 "Result" - GenericArgList@23..29 - Lt@23..24 "<" - TypeGenericArg@24..25 - PathType@24..25 - Path@24..25 - PathSegment@24..25 - Ident@24..25 "T" - Comma@25..26 "," - WhiteSpace@26..27 " " - TypeGenericArg@27..28 - PathType@27..28 - Path@27..28 - PathSegment@27..28 - Ident@27..28 "E" - Gt@28..29 ">" + Eq@15..16 "=" + WhiteSpace@16..17 " " + PathType@17..29 + Path@17..23 + PathSegment@17..23 + Ident@17..23 "Result" + GenericArgList@23..29 + Lt@23..24 "<" + TypeGenericArg@24..25 + PathType@24..25 + Path@24..25 + PathSegment@24..25 + Ident@24..25 "T" + Comma@25..26 "," + WhiteSpace@26..27 " " + TypeGenericArg@27..28 + PathType@27..28 + Path@27..28 + PathSegment@27..28 + Ident@27..28 "E" + Gt@28..29 ">" diff --git a/crates/parser2/test_files/error_recovery/items/use_.snap b/crates/parser2/test_files/error_recovery/items/use_.snap index 5c3a51ab9b..e9f2c8d85a 100644 --- a/crates/parser2/test_files/error_recovery/items/use_.snap +++ b/crates/parser2/test_files/error_recovery/items/use_.snap @@ -5,62 +5,65 @@ input_file: crates/parser2/test_files/error_recovery/items/use_.fe --- Root@0..63 ItemList@0..63 - Use@0..18 - UseKw@0..3 "use" - WhiteSpace@3..4 " " - UseTree@4..18 - UsePath@4..18 - UsePathSegment@4..7 - Ident@4..7 "foo" - Colon2@7..9 "::" - UsePathSegment@9..12 - Ident@9..12 "bar" - Colon2@12..14 "::" - UsePathSegment@14..15 - Star@14..15 "*" - Error@15..15 - Colon2@15..17 "::" - UsePathSegment@17..18 - Ident@17..18 "A" - Newline@18..19 "\n" - Use@19..42 - UseKw@19..22 "use" - WhiteSpace@22..23 " " - UseTree@23..42 - UsePath@23..34 - UsePathSegment@23..26 - Ident@23..26 "foo" - Colon2@26..28 "::" - UsePathSegment@28..31 - Ident@28..31 "bar" - Colon2@31..33 "::" - UsePathSegment@33..34 - Star@33..34 "*" - Colon2@34..36 "::" - Error@36..42 - LBrace@36..37 "{" - Ident@37..38 "A" - Comma@38..39 "," - WhiteSpace@39..40 " " - Ident@40..41 "B" - RBrace@41..42 "}" - Newline@42..43 "\n" - Use@43..63 - UseKw@43..46 "use" - WhiteSpace@46..47 " " - UseTree@47..63 - UsePath@47..58 - UsePathSegment@47..50 - Ident@47..50 "foo" - Colon2@50..52 "::" - UsePathSegment@52..55 - Ident@52..55 "bar" - Colon2@55..57 "::" - UsePathSegment@57..58 - Star@57..58 "*" - WhiteSpace@58..59 " " - Error@59..63 - AsKw@59..61 "as" - WhiteSpace@61..62 " " - Ident@62..63 "B" + Item@0..19 + Use@0..18 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..18 + UsePath@4..18 + UsePathSegment@4..7 + Ident@4..7 "foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "bar" + Colon2@12..14 "::" + UsePathSegment@14..15 + Star@14..15 "*" + Error@15..15 + Colon2@15..17 "::" + UsePathSegment@17..18 + Ident@17..18 "A" + Newline@18..19 "\n" + Item@19..43 + Use@19..42 + UseKw@19..22 "use" + WhiteSpace@22..23 " " + UseTree@23..42 + UsePath@23..34 + UsePathSegment@23..26 + Ident@23..26 "foo" + Colon2@26..28 "::" + UsePathSegment@28..31 + Ident@28..31 "bar" + Colon2@31..33 "::" + UsePathSegment@33..34 + Star@33..34 "*" + Colon2@34..36 "::" + Error@36..42 + LBrace@36..37 "{" + Ident@37..38 "A" + Comma@38..39 "," + WhiteSpace@39..40 " " + Ident@40..41 "B" + RBrace@41..42 "}" + Newline@42..43 "\n" + Item@43..63 + Use@43..63 + UseKw@43..46 "use" + WhiteSpace@46..47 " " + UseTree@47..63 + UsePath@47..58 + UsePathSegment@47..50 + Ident@47..50 "foo" + Colon2@50..52 "::" + UsePathSegment@52..55 + Ident@52..55 "bar" + Colon2@55..57 "::" + UsePathSegment@57..58 + Star@57..58 "*" + WhiteSpace@58..59 " " + Error@59..63 + AsKw@59..61 "as" + WhiteSpace@61..62 " " + Ident@62..63 "B" diff --git a/crates/parser2/test_files/syntax_node/exprs/block.fe b/crates/parser2/test_files/syntax_node/exprs/block.fe new file mode 100644 index 0000000000..bd6c0aed13 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/block.fe @@ -0,0 +1,7 @@ +{ + use super::Foo + struct Foo {} + fn foo() {} + + let x = 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/block.snap b/crates/parser2/test_files/syntax_node/exprs/block.snap new file mode 100644 index 0000000000..b19de81ef4 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/block.snap @@ -0,0 +1,66 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/block.fe +--- +Root@0..75 + BlockExpr@0..75 + LBrace@0..1 "{" + Newline@1..2 "\n" + WhiteSpace@2..6 " " + Item@6..21 + Use@6..20 + UseKw@6..9 "use" + WhiteSpace@9..10 " " + UseTree@10..20 + UsePath@10..20 + UsePathSegment@10..15 + SuperKw@10..15 "super" + Colon2@15..17 "::" + UsePathSegment@17..20 + Ident@17..20 "Foo" + Newline@20..21 "\n" + WhiteSpace@21..25 " " + Item@25..39 + Struct@25..38 + StructKw@25..31 "struct" + WhiteSpace@31..32 " " + Ident@32..35 "Foo" + WhiteSpace@35..36 " " + RecordFieldDefList@36..38 + LBrace@36..37 "{" + RBrace@37..38 "}" + Newline@38..39 "\n" + WhiteSpace@39..43 " " + Item@43..55 + Func@43..54 + FnKw@43..45 "fn" + WhiteSpace@45..46 " " + Ident@46..49 "foo" + FuncParamList@49..51 + LParen@49..50 "(" + RParen@50..51 ")" + WhiteSpace@51..52 " " + BlockExpr@52..54 + LBrace@52..53 "{" + RBrace@53..54 "}" + Newline@54..55 "\n" + WhiteSpace@55..59 " " + Newline@59..60 "\n" + WhiteSpace@60..64 " " + LetStmt@64..73 + LetKw@64..67 "let" + WhiteSpace@67..68 " " + PathPat@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "x" + WhiteSpace@69..70 " " + Eq@70..71 "=" + WhiteSpace@71..72 " " + LitExpr@72..73 + Lit@72..73 + Int@72..73 "1" + Newline@73..74 "\n" + RBrace@74..75 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/const.snap b/crates/parser2/test_files/syntax_node/items/const.snap index 02f1b689c2..8808db0b79 100644 --- a/crates/parser2/test_files/syntax_node/items/const.snap +++ b/crates/parser2/test_files/syntax_node/items/const.snap @@ -5,137 +5,139 @@ input_file: crates/parser2/test_files/syntax_node/items/const.fe --- Root@0..160 ItemList@0..160 - Const@0..22 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - ConstKw@4..9 "const" - WhiteSpace@9..10 " " - Ident@10..13 "FOO" - Colon@13..14 ":" - WhiteSpace@14..15 " " - PathType@15..18 - Path@15..18 - PathSegment@15..18 - Ident@15..18 "i32" - WhiteSpace@18..19 " " - Eq@19..20 "=" - WhiteSpace@20..21 " " - LitExpr@21..22 - Lit@21..22 - Int@21..22 "1" - Newline@22..24 "\n\n" - Const@24..159 - ConstKw@24..29 "const" - WhiteSpace@29..30 " " - Ident@30..33 "BAR" - Colon@33..34 ":" - WhiteSpace@34..35 " " - PathType@35..39 - Path@35..39 - PathSegment@35..39 - Ident@35..39 "u256" - WhiteSpace@39..40 " " - Eq@40..41 "=" - WhiteSpace@41..42 " " - BlockExpr@42..159 - LBrace@42..43 "{" - Newline@43..44 "\n" - WhiteSpace@44..48 " " - LetStmt@48..60 - LetKw@48..51 "let" - WhiteSpace@51..52 " " - PathPat@52..53 - Path@52..53 - PathSegment@52..53 - Ident@52..53 "b" - WhiteSpace@53..54 " " - Eq@54..55 "=" - WhiteSpace@55..56 " " - LitExpr@56..60 - Lit@56..60 - TrueKw@56..60 "true" - Newline@60..61 "\n" - WhiteSpace@61..65 " " - LetStmt@65..74 - LetKw@65..68 "let" - WhiteSpace@68..69 " " - PathPat@69..70 - Path@69..70 - PathSegment@69..70 - Ident@69..70 "x" - WhiteSpace@70..71 " " - Eq@71..72 "=" - WhiteSpace@72..73 " " - LitExpr@73..74 - Lit@73..74 - Int@73..74 "1" - Newline@74..75 "\n" - WhiteSpace@75..79 " " - ExprStmt@79..157 - IfExpr@79..157 - IfKw@79..81 "if" - WhiteSpace@81..82 " " - PathExpr@82..83 - Path@82..83 - PathSegment@82..83 - Ident@82..83 "b" - WhiteSpace@83..84 " " - BlockExpr@84..101 - LBrace@84..85 "{" - Newline@85..86 "\n" - WhiteSpace@86..94 " " - ExprStmt@94..95 - LitExpr@94..95 - Lit@94..95 - Int@94..95 "1" - Newline@95..96 "\n" - WhiteSpace@96..100 " " - RBrace@100..101 "}" - WhiteSpace@101..102 " " - ElseKw@102..106 "else" - WhiteSpace@106..107 " " - IfExpr@107..157 - IfKw@107..109 "if" - WhiteSpace@109..110 " " - BinExpr@110..116 - PathExpr@110..111 - Path@110..111 - PathSegment@110..111 - Ident@110..111 "x" - WhiteSpace@111..112 " " - Eq2@112..114 "==" - WhiteSpace@114..115 " " - LitExpr@115..116 - Lit@115..116 - Int@115..116 "1" - WhiteSpace@116..117 " " - BlockExpr@117..134 - LBrace@117..118 "{" - Newline@118..119 "\n" - WhiteSpace@119..127 " " - ExprStmt@127..128 - LitExpr@127..128 - Lit@127..128 - Int@127..128 "2" - Newline@128..129 "\n" - WhiteSpace@129..133 " " - RBrace@133..134 "}" - WhiteSpace@134..135 " " - ElseKw@135..139 "else" - WhiteSpace@139..140 " " - BlockExpr@140..157 - LBrace@140..141 "{" - Newline@141..142 "\n" - WhiteSpace@142..150 " " - ExprStmt@150..151 - LitExpr@150..151 - Lit@150..151 - Int@150..151 "3" - Newline@151..152 "\n" - WhiteSpace@152..156 " " - RBrace@156..157 "}" - Newline@157..158 "\n" - RBrace@158..159 "}" - Newline@159..160 "\n" + Item@0..24 + Const@0..22 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ConstKw@4..9 "const" + WhiteSpace@9..10 " " + Ident@10..13 "FOO" + Colon@13..14 ":" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Eq@19..20 "=" + WhiteSpace@20..21 " " + LitExpr@21..22 + Lit@21..22 + Int@21..22 "1" + Newline@22..24 "\n\n" + Item@24..160 + Const@24..159 + ConstKw@24..29 "const" + WhiteSpace@29..30 " " + Ident@30..33 "BAR" + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..39 + Path@35..39 + PathSegment@35..39 + Ident@35..39 "u256" + WhiteSpace@39..40 " " + Eq@40..41 "=" + WhiteSpace@41..42 " " + BlockExpr@42..159 + LBrace@42..43 "{" + Newline@43..44 "\n" + WhiteSpace@44..48 " " + LetStmt@48..60 + LetKw@48..51 "let" + WhiteSpace@51..52 " " + PathPat@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "b" + WhiteSpace@53..54 " " + Eq@54..55 "=" + WhiteSpace@55..56 " " + LitExpr@56..60 + Lit@56..60 + TrueKw@56..60 "true" + Newline@60..61 "\n" + WhiteSpace@61..65 " " + LetStmt@65..74 + LetKw@65..68 "let" + WhiteSpace@68..69 " " + PathPat@69..70 + Path@69..70 + PathSegment@69..70 + Ident@69..70 "x" + WhiteSpace@70..71 " " + Eq@71..72 "=" + WhiteSpace@72..73 " " + LitExpr@73..74 + Lit@73..74 + Int@73..74 "1" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + ExprStmt@79..157 + IfExpr@79..157 + IfKw@79..81 "if" + WhiteSpace@81..82 " " + PathExpr@82..83 + Path@82..83 + PathSegment@82..83 + Ident@82..83 "b" + WhiteSpace@83..84 " " + BlockExpr@84..101 + LBrace@84..85 "{" + Newline@85..86 "\n" + WhiteSpace@86..94 " " + ExprStmt@94..95 + LitExpr@94..95 + Lit@94..95 + Int@94..95 "1" + Newline@95..96 "\n" + WhiteSpace@96..100 " " + RBrace@100..101 "}" + WhiteSpace@101..102 " " + ElseKw@102..106 "else" + WhiteSpace@106..107 " " + IfExpr@107..157 + IfKw@107..109 "if" + WhiteSpace@109..110 " " + BinExpr@110..116 + PathExpr@110..111 + Path@110..111 + PathSegment@110..111 + Ident@110..111 "x" + WhiteSpace@111..112 " " + Eq2@112..114 "==" + WhiteSpace@114..115 " " + LitExpr@115..116 + Lit@115..116 + Int@115..116 "1" + WhiteSpace@116..117 " " + BlockExpr@117..134 + LBrace@117..118 "{" + Newline@118..119 "\n" + WhiteSpace@119..127 " " + ExprStmt@127..128 + LitExpr@127..128 + Lit@127..128 + Int@127..128 "2" + Newline@128..129 "\n" + WhiteSpace@129..133 " " + RBrace@133..134 "}" + WhiteSpace@134..135 " " + ElseKw@135..139 "else" + WhiteSpace@139..140 " " + BlockExpr@140..157 + LBrace@140..141 "{" + Newline@141..142 "\n" + WhiteSpace@142..150 " " + ExprStmt@150..151 + LitExpr@150..151 + Lit@150..151 + Int@150..151 "3" + Newline@151..152 "\n" + WhiteSpace@152..156 " " + RBrace@156..157 "}" + Newline@157..158 "\n" + RBrace@158..159 "}" + Newline@159..160 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/contract.snap b/crates/parser2/test_files/syntax_node/items/contract.snap index e214038046..61421f5eb6 100644 --- a/crates/parser2/test_files/syntax_node/items/contract.snap +++ b/crates/parser2/test_files/syntax_node/items/contract.snap @@ -1,61 +1,64 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/items/contract.fe --- Root@0..87 ItemList@0..87 - Contract@0..17 - ContractKw@0..8 "contract" - WhiteSpace@8..9 " " - Ident@9..14 "Empty" - WhiteSpace@14..15 " " - RecordFieldDefList@15..17 - LBrace@15..16 "{" - RBrace@16..17 "}" - Newline@17..19 "\n\n" - Contract@19..87 - ItemModifier@19..22 - PubKw@19..22 "pub" - WhiteSpace@22..23 " " - ContractKw@23..31 "contract" - WhiteSpace@31..32 " " - Ident@32..33 "C" - WhiteSpace@33..34 " " - RecordFieldDefList@34..87 - LBrace@34..35 "{" - Newline@35..36 "\n" - WhiteSpace@36..40 " " - RecordFieldDef@40..46 - Ident@40..41 "x" - Colon@41..42 ":" - WhiteSpace@42..43 " " - PathType@43..46 - Path@43..46 - PathSegment@43..46 - Ident@43..46 "i32" - Newline@46..47 "\n" - WhiteSpace@47..51 " " - RecordFieldDef@51..58 - Ident@51..52 "y" - Colon@52..53 ":" - WhiteSpace@53..54 " " - PathType@54..58 - Path@54..58 - PathSegment@54..58 - Ident@54..58 "u256" - Newline@58..59 "\n" - WhiteSpace@59..63 " " - RecordFieldDef@63..85 - Ident@63..64 "z" - Colon@64..65 ":" - WhiteSpace@65..66 " " - PathType@66..85 - Path@66..85 - PathSegment@66..74 - Ident@66..74 "MyStruct" - Colon2@74..76 "::" - PathSegment@76..85 - Ident@76..85 "Encodable" - Newline@85..86 "\n" - RBrace@86..87 "}" + Item@0..19 + Contract@0..17 + ContractKw@0..8 "contract" + WhiteSpace@8..9 " " + Ident@9..14 "Empty" + WhiteSpace@14..15 " " + RecordFieldDefList@15..17 + LBrace@15..16 "{" + RBrace@16..17 "}" + Newline@17..19 "\n\n" + Item@19..87 + Contract@19..87 + ItemModifier@19..22 + PubKw@19..22 "pub" + WhiteSpace@22..23 " " + ContractKw@23..31 "contract" + WhiteSpace@31..32 " " + Ident@32..33 "C" + WhiteSpace@33..34 " " + RecordFieldDefList@34..87 + LBrace@34..35 "{" + Newline@35..36 "\n" + WhiteSpace@36..40 " " + RecordFieldDef@40..46 + Ident@40..41 "x" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "i32" + Newline@46..47 "\n" + WhiteSpace@47..51 " " + RecordFieldDef@51..58 + Ident@51..52 "y" + Colon@52..53 ":" + WhiteSpace@53..54 " " + PathType@54..58 + Path@54..58 + PathSegment@54..58 + Ident@54..58 "u256" + Newline@58..59 "\n" + WhiteSpace@59..63 " " + RecordFieldDef@63..85 + Ident@63..64 "z" + Colon@64..65 ":" + WhiteSpace@65..66 " " + PathType@66..85 + Path@66..85 + PathSegment@66..74 + Ident@66..74 "MyStruct" + Colon2@74..76 "::" + PathSegment@76..85 + Ident@76..85 "Encodable" + Newline@85..86 "\n" + RBrace@86..87 "}" diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 071ab01ac8..ff5101ebf2 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -5,189 +5,193 @@ input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- Root@0..220 ItemList@0..220 - Enum@0..13 - EnumKw@0..4 "enum" - WhiteSpace@4..5 " " - Ident@5..10 "Empty" - WhiteSpace@10..11 " " - VariantDefList@11..13 - LBrace@11..12 "{" - RBrace@12..13 "}" - Newline@13..15 "\n\n" - Enum@15..56 - EnumKw@15..19 "enum" - WhiteSpace@19..20 " " - Ident@20..25 "Basic" - WhiteSpace@25..26 " " - VariantDefList@26..56 - LBrace@26..27 "{" - Newline@27..28 "\n" - WhiteSpace@28..32 " " - VariantDef@32..36 - Ident@32..36 "Unit" - Newline@36..37 "\n" - WhiteSpace@37..41 " " - VariantDef@41..54 - Ident@41..44 "Tup" - TupleType@44..54 - LParen@44..45 "(" - PathType@45..48 - Path@45..48 - PathSegment@45..48 - Ident@45..48 "i32" - Comma@48..49 "," - WhiteSpace@49..50 " " - PathType@50..53 - Path@50..53 - PathSegment@50..53 - Ident@50..53 "u32" - RParen@53..54 ")" - Newline@54..55 "\n" - RBrace@55..56 "}" - Newline@56..58 "\n\n" - Enum@58..117 - EnumKw@58..62 "enum" - WhiteSpace@62..63 " " - Ident@63..69 "Option" - GenericParamList@69..72 - Lt@69..70 "<" - TypeGenericParam@70..71 - Ident@70..71 "T" - Gt@71..72 ">" - WhiteSpace@72..73 " " - Newline@73..74 "\n" - WhiteSpace@74..78 " " - WhereClause@78..93 - WhereKw@78..83 "where" - WhiteSpace@83..84 " " - WherePredicate@84..93 - PathType@84..85 - Path@84..85 - PathSegment@84..85 - Ident@84..85 "T" - TypeBoundList@85..92 - Colon@85..86 ":" - WhiteSpace@86..87 " " - TypeBound@87..92 - Path@87..92 - PathSegment@87..92 - Ident@87..92 "Clone" - Newline@92..93 "\n" - VariantDefList@93..117 - LBrace@93..94 "{" - Newline@94..95 "\n" - WhiteSpace@95..99 " " - VariantDef@99..106 - Ident@99..103 "Some" - TupleType@103..106 - LParen@103..104 "(" - PathType@104..105 - Path@104..105 - PathSegment@104..105 - Ident@104..105 "T" - RParen@105..106 ")" - Newline@106..107 "\n" - WhiteSpace@107..111 " " - VariantDef@111..115 - Ident@111..115 "None" - Newline@115..116 "\n" - RBrace@116..117 "}" - Newline@117..119 "\n\n" - Enum@119..220 - EnumKw@119..123 "enum" - WhiteSpace@123..124 " " - Ident@124..133 "BoundEnum" - GenericParamList@133..162 - Lt@133..134 "<" - TypeGenericParam@134..146 - Ident@134..135 "T" - TypeBoundList@135..146 - Colon@135..136 ":" - WhiteSpace@136..137 " " - TypeBound@137..140 - Path@137..140 - PathSegment@137..140 - Ident@137..140 "Add" - WhiteSpace@140..141 " " - Plus@141..142 "+" - WhiteSpace@142..143 " " - TypeBound@143..146 - Path@143..146 - PathSegment@143..146 - Ident@143..146 "Mul" - WhiteSpace@146..147 " " - Comma@147..148 "," - WhiteSpace@148..149 " " - TypeGenericParam@149..161 - Ident@149..150 "U" - TypeBoundList@150..161 - Colon@150..151 ":" - WhiteSpace@151..152 " " - TypeBound@152..155 - Path@152..155 - PathSegment@152..155 - Ident@152..155 "Sub" - WhiteSpace@155..156 " " - Plus@156..157 "+" - WhiteSpace@157..158 " " - TypeBound@158..161 - Path@158..161 - PathSegment@158..161 - Ident@158..161 "Div" - Gt@161..162 ">" - WhiteSpace@162..163 " " - Newline@163..164 "\n" - WhereClause@164..189 - WhereKw@164..169 "where" - WhiteSpace@169..170 " " - WherePredicate@170..189 - PathType@170..181 - Path@170..178 - PathSegment@170..173 - Ident@170..173 "Foo" - Colon2@173..175 "::" - PathSegment@175..178 - Ident@175..178 "Bar" - GenericArgList@178..181 - Lt@178..179 "<" - TypeGenericArg@179..180 - PathType@179..180 - Path@179..180 - PathSegment@179..180 - Ident@179..180 "T" - Gt@180..181 ">" - TypeBoundList@181..188 - Colon@181..182 ":" - WhiteSpace@182..183 " " - TypeBound@183..188 - Path@183..188 - PathSegment@183..188 - Ident@183..188 "Trait" - Newline@188..189 "\n" - VariantDefList@189..220 - LBrace@189..190 "{" - Newline@190..191 "\n" - WhiteSpace@191..195 " " - VariantDef@195..204 - Ident@195..201 "AddMul" - TupleType@201..204 - LParen@201..202 "(" - PathType@202..203 - Path@202..203 - PathSegment@202..203 - Ident@202..203 "T" - RParen@203..204 ")" - Newline@204..205 "\n" - WhiteSpace@205..209 " " - VariantDef@209..218 - Ident@209..215 "SubDiv" - TupleType@215..218 - LParen@215..216 "(" - PathType@216..217 - Path@216..217 - PathSegment@216..217 - Ident@216..217 "U" - RParen@217..218 ")" - Newline@218..219 "\n" - RBrace@219..220 "}" + Item@0..15 + Enum@0..13 + EnumKw@0..4 "enum" + WhiteSpace@4..5 " " + Ident@5..10 "Empty" + WhiteSpace@10..11 " " + VariantDefList@11..13 + LBrace@11..12 "{" + RBrace@12..13 "}" + Newline@13..15 "\n\n" + Item@15..58 + Enum@15..56 + EnumKw@15..19 "enum" + WhiteSpace@19..20 " " + Ident@20..25 "Basic" + WhiteSpace@25..26 " " + VariantDefList@26..56 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + VariantDef@32..36 + Ident@32..36 "Unit" + Newline@36..37 "\n" + WhiteSpace@37..41 " " + VariantDef@41..54 + Ident@41..44 "Tup" + TupleType@44..54 + LParen@44..45 "(" + PathType@45..48 + Path@45..48 + PathSegment@45..48 + Ident@45..48 "i32" + Comma@48..49 "," + WhiteSpace@49..50 " " + PathType@50..53 + Path@50..53 + PathSegment@50..53 + Ident@50..53 "u32" + RParen@53..54 ")" + Newline@54..55 "\n" + RBrace@55..56 "}" + Newline@56..58 "\n\n" + Item@58..119 + Enum@58..117 + EnumKw@58..62 "enum" + WhiteSpace@62..63 " " + Ident@63..69 "Option" + GenericParamList@69..72 + Lt@69..70 "<" + TypeGenericParam@70..71 + Ident@70..71 "T" + Gt@71..72 ">" + WhiteSpace@72..73 " " + Newline@73..74 "\n" + WhiteSpace@74..78 " " + WhereClause@78..93 + WhereKw@78..83 "where" + WhiteSpace@83..84 " " + WherePredicate@84..93 + PathType@84..85 + Path@84..85 + PathSegment@84..85 + Ident@84..85 "T" + TypeBoundList@85..92 + Colon@85..86 ":" + WhiteSpace@86..87 " " + TypeBound@87..92 + Path@87..92 + PathSegment@87..92 + Ident@87..92 "Clone" + Newline@92..93 "\n" + VariantDefList@93..117 + LBrace@93..94 "{" + Newline@94..95 "\n" + WhiteSpace@95..99 " " + VariantDef@99..106 + Ident@99..103 "Some" + TupleType@103..106 + LParen@103..104 "(" + PathType@104..105 + Path@104..105 + PathSegment@104..105 + Ident@104..105 "T" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + VariantDef@111..115 + Ident@111..115 "None" + Newline@115..116 "\n" + RBrace@116..117 "}" + Newline@117..119 "\n\n" + Item@119..220 + Enum@119..220 + EnumKw@119..123 "enum" + WhiteSpace@123..124 " " + Ident@124..133 "BoundEnum" + GenericParamList@133..162 + Lt@133..134 "<" + TypeGenericParam@134..146 + Ident@134..135 "T" + TypeBoundList@135..146 + Colon@135..136 ":" + WhiteSpace@136..137 " " + TypeBound@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "Add" + WhiteSpace@140..141 " " + Plus@141..142 "+" + WhiteSpace@142..143 " " + TypeBound@143..146 + Path@143..146 + PathSegment@143..146 + Ident@143..146 "Mul" + WhiteSpace@146..147 " " + Comma@147..148 "," + WhiteSpace@148..149 " " + TypeGenericParam@149..161 + Ident@149..150 "U" + TypeBoundList@150..161 + Colon@150..151 ":" + WhiteSpace@151..152 " " + TypeBound@152..155 + Path@152..155 + PathSegment@152..155 + Ident@152..155 "Sub" + WhiteSpace@155..156 " " + Plus@156..157 "+" + WhiteSpace@157..158 " " + TypeBound@158..161 + Path@158..161 + PathSegment@158..161 + Ident@158..161 "Div" + Gt@161..162 ">" + WhiteSpace@162..163 " " + Newline@163..164 "\n" + WhereClause@164..189 + WhereKw@164..169 "where" + WhiteSpace@169..170 " " + WherePredicate@170..189 + PathType@170..181 + Path@170..178 + PathSegment@170..173 + Ident@170..173 "Foo" + Colon2@173..175 "::" + PathSegment@175..178 + Ident@175..178 "Bar" + GenericArgList@178..181 + Lt@178..179 "<" + TypeGenericArg@179..180 + PathType@179..180 + Path@179..180 + PathSegment@179..180 + Ident@179..180 "T" + Gt@180..181 ">" + TypeBoundList@181..188 + Colon@181..182 ":" + WhiteSpace@182..183 " " + TypeBound@183..188 + Path@183..188 + PathSegment@183..188 + Ident@183..188 "Trait" + Newline@188..189 "\n" + VariantDefList@189..220 + LBrace@189..190 "{" + Newline@190..191 "\n" + WhiteSpace@191..195 " " + VariantDef@195..204 + Ident@195..201 "AddMul" + TupleType@201..204 + LParen@201..202 "(" + PathType@202..203 + Path@202..203 + PathSegment@202..203 + Ident@202..203 "T" + RParen@203..204 ")" + Newline@204..205 "\n" + WhiteSpace@205..209 " " + VariantDef@209..218 + Ident@209..215 "SubDiv" + TupleType@215..218 + LParen@215..216 "(" + PathType@216..217 + Path@216..217 + PathSegment@216..217 + Ident@216..217 "U" + RParen@217..218 ")" + Newline@218..219 "\n" + RBrace@219..220 "}" diff --git a/crates/parser2/test_files/syntax_node/items/extern.snap b/crates/parser2/test_files/syntax_node/items/extern.snap index fe27a22685..05a72a60d2 100644 --- a/crates/parser2/test_files/syntax_node/items/extern.snap +++ b/crates/parser2/test_files/syntax_node/items/extern.snap @@ -5,110 +5,112 @@ input_file: crates/parser2/test_files/syntax_node/items/extern.fe --- Root@0..146 ItemList@0..146 - Extern@0..11 - ExternKw@0..6 "extern" - WhiteSpace@6..7 " " - ExternItemList@7..11 - LBrace@7..8 "{" - Newline@8..10 "\n\n" - RBrace@10..11 "}" - Newline@11..13 "\n\n" - Extern@13..146 - ExternKw@13..19 "extern" - WhiteSpace@19..20 " " - ExternItemList@20..146 - LBrace@20..21 "{" - Newline@21..22 "\n" - WhiteSpace@22..26 " " - Func@26..76 - ItemModifier@26..36 - PubKw@26..29 "pub" - WhiteSpace@29..30 " " - UnsafeKw@30..36 "unsafe" - WhiteSpace@36..37 " " - FnKw@37..39 "fn" - WhiteSpace@39..40 " " - Ident@40..45 "write" - FuncParamList@45..68 - LParen@45..46 "(" - FnParam@46..55 - Ident@46..49 "loc" - Colon@49..50 ":" - WhiteSpace@50..51 " " - PtrType@51..55 - Star@51..52 "*" - PathType@52..55 - Path@52..55 - PathSegment@52..55 - Ident@52..55 "u32" - Comma@55..56 "," - WhiteSpace@56..57 " " - FnParam@57..67 - Ident@57..62 "value" - Colon@62..63 ":" - WhiteSpace@63..64 " " - PathType@64..67 - Path@64..67 - PathSegment@64..67 - Ident@64..67 "u32" - RParen@67..68 ")" - WhiteSpace@68..69 " " - Arrow@69..71 "->" - WhiteSpace@71..72 " " - PathType@72..76 - Path@72..76 - PathSegment@72..76 - Ident@72..76 "bool" - Newline@76..77 "\n" - WhiteSpace@77..81 " " - Func@81..131 - ItemModifier@81..91 - PubKw@81..84 "pub" - WhiteSpace@84..85 " " - UnsafeKw@85..91 "unsafe" - WhiteSpace@91..92 " " - FnKw@92..94 "fn" - WhiteSpace@94..95 " " - Ident@95..99 "read" - FuncParamList@99..122 - LParen@99..100 "(" - FnParam@100..109 - Ident@100..103 "loc" - Colon@103..104 ":" - WhiteSpace@104..105 " " - PtrType@105..109 - Star@105..106 "*" - PathType@106..109 - Path@106..109 - PathSegment@106..109 - Ident@106..109 "u32" - Comma@109..110 "," - WhiteSpace@110..111 " " - FnParam@111..121 - Ident@111..114 "len" - Colon@114..115 ":" - WhiteSpace@115..116 " " - PathType@116..121 - Path@116..121 - PathSegment@116..121 - Ident@116..121 "usize" - RParen@121..122 ")" - WhiteSpace@122..123 " " - Arrow@123..125 "->" - WhiteSpace@125..126 " " - PathType@126..131 - Path@126..131 - PathSegment@126..131 - Ident@126..131 "usize" - Newline@131..132 "\n" - WhiteSpace@132..136 " " - Func@136..144 - FnKw@136..138 "fn" - WhiteSpace@138..139 " " - Ident@139..142 "foo" - FuncParamList@142..144 - LParen@142..143 "(" - RParen@143..144 ")" - Newline@144..145 "\n" - RBrace@145..146 "}" + Item@0..13 + Extern@0..11 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..11 + LBrace@7..8 "{" + Newline@8..10 "\n\n" + RBrace@10..11 "}" + Newline@11..13 "\n\n" + Item@13..146 + Extern@13..146 + ExternKw@13..19 "extern" + WhiteSpace@19..20 " " + ExternItemList@20..146 + LBrace@20..21 "{" + Newline@21..22 "\n" + WhiteSpace@22..26 " " + Func@26..76 + ItemModifier@26..36 + PubKw@26..29 "pub" + WhiteSpace@29..30 " " + UnsafeKw@30..36 "unsafe" + WhiteSpace@36..37 " " + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..45 "write" + FuncParamList@45..68 + LParen@45..46 "(" + FnParam@46..55 + Ident@46..49 "loc" + Colon@49..50 ":" + WhiteSpace@50..51 " " + PtrType@51..55 + Star@51..52 "*" + PathType@52..55 + Path@52..55 + PathSegment@52..55 + Ident@52..55 "u32" + Comma@55..56 "," + WhiteSpace@56..57 " " + FnParam@57..67 + Ident@57..62 "value" + Colon@62..63 ":" + WhiteSpace@63..64 " " + PathType@64..67 + Path@64..67 + PathSegment@64..67 + Ident@64..67 "u32" + RParen@67..68 ")" + WhiteSpace@68..69 " " + Arrow@69..71 "->" + WhiteSpace@71..72 " " + PathType@72..76 + Path@72..76 + PathSegment@72..76 + Ident@72..76 "bool" + Newline@76..77 "\n" + WhiteSpace@77..81 " " + Func@81..131 + ItemModifier@81..91 + PubKw@81..84 "pub" + WhiteSpace@84..85 " " + UnsafeKw@85..91 "unsafe" + WhiteSpace@91..92 " " + FnKw@92..94 "fn" + WhiteSpace@94..95 " " + Ident@95..99 "read" + FuncParamList@99..122 + LParen@99..100 "(" + FnParam@100..109 + Ident@100..103 "loc" + Colon@103..104 ":" + WhiteSpace@104..105 " " + PtrType@105..109 + Star@105..106 "*" + PathType@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "u32" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnParam@111..121 + Ident@111..114 "len" + Colon@114..115 ":" + WhiteSpace@115..116 " " + PathType@116..121 + Path@116..121 + PathSegment@116..121 + Ident@116..121 "usize" + RParen@121..122 ")" + WhiteSpace@122..123 " " + Arrow@123..125 "->" + WhiteSpace@125..126 " " + PathType@126..131 + Path@126..131 + PathSegment@126..131 + Ident@126..131 "usize" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Func@136..144 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + FuncParamList@142..144 + LParen@142..143 "(" + RParen@143..144 ")" + Newline@144..145 "\n" + RBrace@145..146 "}" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index ab98a0d767..8d3658b37f 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -5,339 +5,344 @@ input_file: crates/parser2/test_files/syntax_node/items/func.fe --- Root@0..361 ItemList@0..361 - Func@0..30 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - FnKw@4..6 "fn" - WhiteSpace@6..7 " " - Ident@7..10 "foo" - FuncParamList@10..12 - LParen@10..11 "(" - RParen@11..12 ")" - WhiteSpace@12..13 " " - BlockExpr@13..30 - LBrace@13..14 "{" - Newline@14..15 "\n" - WhiteSpace@15..19 " " - LetStmt@19..28 - LetKw@19..22 "let" - WhiteSpace@22..23 " " - PathPat@23..24 - Path@23..24 - PathSegment@23..24 - Ident@23..24 "x" - WhiteSpace@24..25 " " - Eq@25..26 "=" - WhiteSpace@26..27 " " - LitExpr@27..28 - Lit@27..28 - Int@27..28 "1" - Newline@28..29 "\n" - RBrace@29..30 "}" - Newline@30..32 "\n\n" - Func@32..80 - FnKw@32..34 "fn" - WhiteSpace@34..35 " " - Ident@35..38 "bar" - FuncParamList@38..63 - LParen@38..39 "(" - FnParam@39..47 - Ident@39..42 "bar" - Colon@42..43 ":" - WhiteSpace@43..44 " " - PathType@44..47 - Path@44..47 - PathSegment@44..47 - Ident@44..47 "i32" - Comma@47..48 "," - WhiteSpace@48..49 " " - FnParam@49..62 - MutKw@49..52 "mut" - WhiteSpace@52..53 " " - Ident@53..56 "baz" - Colon@56..57 ":" - WhiteSpace@57..58 " " - PathType@58..62 - Path@58..62 - PathSegment@58..62 - Ident@58..62 "u256" - RParen@62..63 ")" - WhiteSpace@63..64 " " - Arrow@64..66 "->" - WhiteSpace@66..67 " " - PathType@67..70 - Path@67..70 - PathSegment@67..70 - Ident@67..70 "i32" - WhiteSpace@70..71 " " - BlockExpr@71..80 - LBrace@71..72 "{" - Newline@72..73 "\n" - WhiteSpace@73..77 " " - ExprStmt@77..78 - LitExpr@77..78 - Lit@77..78 - Int@77..78 "1" - Newline@78..79 "\n" - RBrace@79..80 "}" - Newline@80..82 "\n\n" - Func@82..178 - FnKw@82..84 "fn" - WhiteSpace@84..85 " " - Ident@85..88 "baz" - FuncParamList@88..161 - LParen@88..89 "(" - FnParam@89..109 - Ident@89..93 "from" - WhiteSpace@93..94 " " - Ident@94..100 "sender" - Colon@100..101 ":" - WhiteSpace@101..102 " " - PathType@102..109 - Path@102..109 - PathSegment@102..109 - Ident@102..109 "address" - Comma@109..110 "," - WhiteSpace@110..111 " " - FnParam@111..136 - MutKw@111..114 "mut" - WhiteSpace@114..115 " " - Ident@115..117 "to" - WhiteSpace@117..118 " " - Ident@118..127 "recipient" - Colon@127..128 ":" - WhiteSpace@128..129 " " - PathType@129..136 - Path@129..136 - PathSegment@129..136 - Ident@129..136 "address" - Comma@136..137 "," - WhiteSpace@137..138 " " - FnParam@138..149 - Underscore@138..139 "_" - WhiteSpace@139..140 " " - Ident@140..143 "val" - Colon@143..144 ":" - WhiteSpace@144..145 " " - PathType@145..149 - Path@145..149 - PathSegment@145..149 - Ident@145..149 "u256" - Comma@149..150 "," - WhiteSpace@150..151 " " - FnParam@151..160 - Underscore@151..152 "_" - WhiteSpace@152..153 " " - Underscore@153..154 "_" - Colon@154..155 ":" - WhiteSpace@155..156 " " - PathType@156..160 - Path@156..160 - PathSegment@156..160 - Ident@156..160 "u256" - RParen@160..161 ")" - WhiteSpace@161..162 " " - Arrow@162..164 "->" - WhiteSpace@164..165 " " - PathType@165..168 - Path@165..168 - PathSegment@165..168 - Ident@165..168 "i32" - WhiteSpace@168..169 " " - BlockExpr@169..178 - LBrace@169..170 "{" - Newline@170..171 "\n" - WhiteSpace@171..175 " " - ExprStmt@175..176 - LitExpr@175..176 - Lit@175..176 - Int@175..176 "1" - Newline@176..177 "\n" - RBrace@177..178 "}" - Newline@178..180 "\n\n" - Func@180..306 - FnKw@180..182 "fn" - WhiteSpace@182..183 " " - Ident@183..192 "generics1" - GenericParamList@192..205 - Lt@192..193 "<" - TypeGenericParam@193..201 - Ident@193..194 "T" - TypeBoundList@194..201 - Colon@194..195 ":" - WhiteSpace@195..196 " " - TypeBound@196..201 - Path@196..201 - PathSegment@196..201 - Ident@196..201 "Trait" - Comma@201..202 "," - WhiteSpace@202..203 " " - TypeGenericParam@203..204 - Ident@203..204 "U" - Gt@204..205 ">" - FuncParamList@205..225 - LParen@205..206 "(" - FnParam@206..210 - Ident@206..207 "t" - Colon@207..208 ":" - WhiteSpace@208..209 " " - PathType@209..210 - Path@209..210 - PathSegment@209..210 - Ident@209..210 "T" - Comma@210..211 "," - WhiteSpace@211..212 " " - FnParam@212..224 - Ident@212..213 "u" - Colon@213..214 ":" - WhiteSpace@214..215 " " - PathType@215..224 - Path@215..221 - PathSegment@215..221 - Ident@215..221 "Option" - GenericArgList@221..224 - Lt@221..222 "<" - TypeGenericArg@222..223 - PathType@222..223 - Path@222..223 - PathSegment@222..223 - Ident@222..223 "U" - Gt@223..224 ">" - RParen@224..225 ")" - WhiteSpace@225..226 " " - Arrow@226..228 "->" - WhiteSpace@228..229 " " - PathType@229..230 - Path@229..230 - PathSegment@229..230 - Ident@229..230 "T" - Newline@230..231 "\n" - WhiteSpace@231..235 " " - WhereClause@235..286 - WhereKw@235..240 "where" - WhiteSpace@240..241 " " - WherePredicate@241..259 - PathType@241..250 - Path@241..247 - PathSegment@241..247 - Ident@241..247 "Result" - GenericArgList@247..250 - Lt@247..248 "<" - TypeGenericArg@248..249 - PathType@248..249 - Path@248..249 - PathSegment@248..249 - Ident@248..249 "T" - Gt@249..250 ">" - TypeBoundList@250..257 - Colon@250..251 ":" - WhiteSpace@251..252 " " - TypeBound@252..257 - Path@252..257 - PathSegment@252..257 - Ident@252..257 "Trait" - WhiteSpace@257..258 " " - Newline@258..259 "\n" - WhiteSpace@259..269 " " - WherePredicate@269..286 - PathType@269..278 - Path@269..275 - PathSegment@269..275 - Ident@269..275 "Option" - GenericArgList@275..278 - Lt@275..276 "<" - TypeGenericArg@276..277 - PathType@276..277 - Path@276..277 - PathSegment@276..277 - Ident@276..277 "U" - Gt@277..278 ">" - TypeBoundList@278..285 - Colon@278..279 ":" - WhiteSpace@279..280 " " - TypeBound@280..285 - Path@280..285 - PathSegment@280..285 - Ident@280..285 "Clone" - Newline@285..286 "\n" - WhiteSpace@286..296 " " - Newline@296..297 "\n" - BlockExpr@297..306 - LBrace@297..298 "{" - Newline@298..299 "\n" - WhiteSpace@299..303 " " - ExprStmt@303..304 - PathExpr@303..304 - Path@303..304 - PathSegment@303..304 - Ident@303..304 "t" - Newline@304..305 "\n" - RBrace@305..306 "}" - Newline@306..308 "\n\n" - Func@308..361 - FnKw@308..310 "fn" - WhiteSpace@310..311 " " - Ident@311..315 "decl" - GenericParamList@315..321 - Lt@315..316 "<" - TypeGenericParam@316..317 - Ident@316..317 "T" - Comma@317..318 "," - WhiteSpace@318..319 " " - TypeGenericParam@319..320 - Ident@319..320 "U" - Gt@320..321 ">" - FuncParamList@321..340 - LParen@321..322 "(" - FnParam@322..339 - Ident@322..323 "t" - Colon@323..324 ":" - WhiteSpace@324..325 " " - PathType@325..339 - Path@325..333 - PathSegment@325..333 - Ident@325..333 "MyStruct" - GenericArgList@333..339 - Lt@333..334 "<" - TypeGenericArg@334..335 - PathType@334..335 - Path@334..335 - PathSegment@334..335 - Ident@334..335 "T" - Comma@335..336 "," - WhiteSpace@336..337 " " - TypeGenericArg@337..338 - PathType@337..338 - Path@337..338 - PathSegment@337..338 - Ident@337..338 "U" - Gt@338..339 ">" - RParen@339..340 ")" - WhiteSpace@340..341 " " - Arrow@341..343 "->" - WhiteSpace@343..344 " " - PathType@344..358 - Path@344..350 - PathSegment@344..350 - Ident@344..350 "Result" - GenericArgList@350..358 - Lt@350..351 "<" - TypeGenericArg@351..352 - PathType@351..352 - Path@351..352 - PathSegment@351..352 - Ident@351..352 "T" - Comma@352..353 "," - WhiteSpace@353..354 " " - TypeGenericArg@354..357 - PathType@354..357 - Path@354..357 - PathSegment@354..357 - Ident@354..357 "Err" - Gt@357..358 ">" - WhiteSpace@358..359 " " - BlockExpr@359..361 - LBrace@359..360 "{" - RBrace@360..361 "}" + Item@0..32 + Func@0..30 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + FnKw@4..6 "fn" + WhiteSpace@6..7 " " + Ident@7..10 "foo" + FuncParamList@10..12 + LParen@10..11 "(" + RParen@11..12 ")" + WhiteSpace@12..13 " " + BlockExpr@13..30 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + LetStmt@19..28 + LetKw@19..22 "let" + WhiteSpace@22..23 " " + PathPat@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "x" + WhiteSpace@24..25 " " + Eq@25..26 "=" + WhiteSpace@26..27 " " + LitExpr@27..28 + Lit@27..28 + Int@27..28 "1" + Newline@28..29 "\n" + RBrace@29..30 "}" + Newline@30..32 "\n\n" + Item@32..82 + Func@32..80 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "bar" + FuncParamList@38..63 + LParen@38..39 "(" + FnParam@39..47 + Ident@39..42 "bar" + Colon@42..43 ":" + WhiteSpace@43..44 " " + PathType@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "i32" + Comma@47..48 "," + WhiteSpace@48..49 " " + FnParam@49..62 + MutKw@49..52 "mut" + WhiteSpace@52..53 " " + Ident@53..56 "baz" + Colon@56..57 ":" + WhiteSpace@57..58 " " + PathType@58..62 + Path@58..62 + PathSegment@58..62 + Ident@58..62 "u256" + RParen@62..63 ")" + WhiteSpace@63..64 " " + Arrow@64..66 "->" + WhiteSpace@66..67 " " + PathType@67..70 + Path@67..70 + PathSegment@67..70 + Ident@67..70 "i32" + WhiteSpace@70..71 " " + BlockExpr@71..80 + LBrace@71..72 "{" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + ExprStmt@77..78 + LitExpr@77..78 + Lit@77..78 + Int@77..78 "1" + Newline@78..79 "\n" + RBrace@79..80 "}" + Newline@80..82 "\n\n" + Item@82..180 + Func@82..178 + FnKw@82..84 "fn" + WhiteSpace@84..85 " " + Ident@85..88 "baz" + FuncParamList@88..161 + LParen@88..89 "(" + FnParam@89..109 + Ident@89..93 "from" + WhiteSpace@93..94 " " + Ident@94..100 "sender" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..109 + Path@102..109 + PathSegment@102..109 + Ident@102..109 "address" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnParam@111..136 + MutKw@111..114 "mut" + WhiteSpace@114..115 " " + Ident@115..117 "to" + WhiteSpace@117..118 " " + Ident@118..127 "recipient" + Colon@127..128 ":" + WhiteSpace@128..129 " " + PathType@129..136 + Path@129..136 + PathSegment@129..136 + Ident@129..136 "address" + Comma@136..137 "," + WhiteSpace@137..138 " " + FnParam@138..149 + Underscore@138..139 "_" + WhiteSpace@139..140 " " + Ident@140..143 "val" + Colon@143..144 ":" + WhiteSpace@144..145 " " + PathType@145..149 + Path@145..149 + PathSegment@145..149 + Ident@145..149 "u256" + Comma@149..150 "," + WhiteSpace@150..151 " " + FnParam@151..160 + Underscore@151..152 "_" + WhiteSpace@152..153 " " + Underscore@153..154 "_" + Colon@154..155 ":" + WhiteSpace@155..156 " " + PathType@156..160 + Path@156..160 + PathSegment@156..160 + Ident@156..160 "u256" + RParen@160..161 ")" + WhiteSpace@161..162 " " + Arrow@162..164 "->" + WhiteSpace@164..165 " " + PathType@165..168 + Path@165..168 + PathSegment@165..168 + Ident@165..168 "i32" + WhiteSpace@168..169 " " + BlockExpr@169..178 + LBrace@169..170 "{" + Newline@170..171 "\n" + WhiteSpace@171..175 " " + ExprStmt@175..176 + LitExpr@175..176 + Lit@175..176 + Int@175..176 "1" + Newline@176..177 "\n" + RBrace@177..178 "}" + Newline@178..180 "\n\n" + Item@180..308 + Func@180..306 + FnKw@180..182 "fn" + WhiteSpace@182..183 " " + Ident@183..192 "generics1" + GenericParamList@192..205 + Lt@192..193 "<" + TypeGenericParam@193..201 + Ident@193..194 "T" + TypeBoundList@194..201 + Colon@194..195 ":" + WhiteSpace@195..196 " " + TypeBound@196..201 + Path@196..201 + PathSegment@196..201 + Ident@196..201 "Trait" + Comma@201..202 "," + WhiteSpace@202..203 " " + TypeGenericParam@203..204 + Ident@203..204 "U" + Gt@204..205 ">" + FuncParamList@205..225 + LParen@205..206 "(" + FnParam@206..210 + Ident@206..207 "t" + Colon@207..208 ":" + WhiteSpace@208..209 " " + PathType@209..210 + Path@209..210 + PathSegment@209..210 + Ident@209..210 "T" + Comma@210..211 "," + WhiteSpace@211..212 " " + FnParam@212..224 + Ident@212..213 "u" + Colon@213..214 ":" + WhiteSpace@214..215 " " + PathType@215..224 + Path@215..221 + PathSegment@215..221 + Ident@215..221 "Option" + GenericArgList@221..224 + Lt@221..222 "<" + TypeGenericArg@222..223 + PathType@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + RParen@224..225 ")" + WhiteSpace@225..226 " " + Arrow@226..228 "->" + WhiteSpace@228..229 " " + PathType@229..230 + Path@229..230 + PathSegment@229..230 + Ident@229..230 "T" + Newline@230..231 "\n" + WhiteSpace@231..235 " " + WhereClause@235..286 + WhereKw@235..240 "where" + WhiteSpace@240..241 " " + WherePredicate@241..259 + PathType@241..250 + Path@241..247 + PathSegment@241..247 + Ident@241..247 "Result" + GenericArgList@247..250 + Lt@247..248 "<" + TypeGenericArg@248..249 + PathType@248..249 + Path@248..249 + PathSegment@248..249 + Ident@248..249 "T" + Gt@249..250 ">" + TypeBoundList@250..257 + Colon@250..251 ":" + WhiteSpace@251..252 " " + TypeBound@252..257 + Path@252..257 + PathSegment@252..257 + Ident@252..257 "Trait" + WhiteSpace@257..258 " " + Newline@258..259 "\n" + WhiteSpace@259..269 " " + WherePredicate@269..286 + PathType@269..278 + Path@269..275 + PathSegment@269..275 + Ident@269..275 "Option" + GenericArgList@275..278 + Lt@275..276 "<" + TypeGenericArg@276..277 + PathType@276..277 + Path@276..277 + PathSegment@276..277 + Ident@276..277 "U" + Gt@277..278 ">" + TypeBoundList@278..285 + Colon@278..279 ":" + WhiteSpace@279..280 " " + TypeBound@280..285 + Path@280..285 + PathSegment@280..285 + Ident@280..285 "Clone" + Newline@285..286 "\n" + WhiteSpace@286..296 " " + Newline@296..297 "\n" + BlockExpr@297..306 + LBrace@297..298 "{" + Newline@298..299 "\n" + WhiteSpace@299..303 " " + ExprStmt@303..304 + PathExpr@303..304 + Path@303..304 + PathSegment@303..304 + Ident@303..304 "t" + Newline@304..305 "\n" + RBrace@305..306 "}" + Newline@306..308 "\n\n" + Item@308..361 + Func@308..361 + FnKw@308..310 "fn" + WhiteSpace@310..311 " " + Ident@311..315 "decl" + GenericParamList@315..321 + Lt@315..316 "<" + TypeGenericParam@316..317 + Ident@316..317 "T" + Comma@317..318 "," + WhiteSpace@318..319 " " + TypeGenericParam@319..320 + Ident@319..320 "U" + Gt@320..321 ">" + FuncParamList@321..340 + LParen@321..322 "(" + FnParam@322..339 + Ident@322..323 "t" + Colon@323..324 ":" + WhiteSpace@324..325 " " + PathType@325..339 + Path@325..333 + PathSegment@325..333 + Ident@325..333 "MyStruct" + GenericArgList@333..339 + Lt@333..334 "<" + TypeGenericArg@334..335 + PathType@334..335 + Path@334..335 + PathSegment@334..335 + Ident@334..335 "T" + Comma@335..336 "," + WhiteSpace@336..337 " " + TypeGenericArg@337..338 + PathType@337..338 + Path@337..338 + PathSegment@337..338 + Ident@337..338 "U" + Gt@338..339 ">" + RParen@339..340 ")" + WhiteSpace@340..341 " " + Arrow@341..343 "->" + WhiteSpace@343..344 " " + PathType@344..358 + Path@344..350 + PathSegment@344..350 + Ident@344..350 "Result" + GenericArgList@350..358 + Lt@350..351 "<" + TypeGenericArg@351..352 + PathType@351..352 + Path@351..352 + PathSegment@351..352 + Ident@351..352 "T" + Comma@352..353 "," + WhiteSpace@353..354 " " + TypeGenericArg@354..357 + PathType@354..357 + Path@354..357 + PathSegment@354..357 + Ident@354..357 "Err" + Gt@357..358 ">" + WhiteSpace@358..359 " " + BlockExpr@359..361 + LBrace@359..360 "{" + RBrace@360..361 "}" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index d2485f2d2a..b9b5f92d81 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -5,247 +5,249 @@ input_file: crates/parser2/test_files/syntax_node/items/impl.fe --- Root@0..272 ItemList@0..272 - Impl@0..137 - ImplKw@0..4 "impl" - GenericParamList@4..12 - Lt@4..5 "<" - TypeGenericParam@5..11 - Ident@5..6 "T" - TypeBoundList@6..11 - Colon@6..7 ":" - WhiteSpace@7..8 " " - TypeBound@8..11 - Path@8..11 - PathSegment@8..11 - Ident@8..11 "Add" - Gt@11..12 ">" - WhiteSpace@12..13 " " - PathType@13..24 - Path@13..21 - PathSegment@13..16 - Ident@13..16 "Foo" - Colon2@16..18 "::" - PathSegment@18..21 - Ident@18..21 "Bar" - GenericArgList@21..24 - Lt@21..22 "<" - TypeGenericArg@22..23 - PathType@22..23 - Path@22..23 - PathSegment@22..23 - Ident@22..23 "T" - Gt@23..24 ">" - WhiteSpace@24..25 " " - ImplItemList@25..137 - LBrace@25..26 "{" - Newline@26..27 "\n" - WhiteSpace@27..31 " " - Func@31..135 - ItemModifier@31..34 - PubKw@31..34 "pub" - WhiteSpace@34..35 " " - FnKw@35..37 "fn" - WhiteSpace@37..38 " " - Ident@38..41 "add" - FuncParamList@41..58 - LParen@41..42 "(" - FnParam@42..46 - SelfKw@42..46 "self" - Comma@46..47 "," - WhiteSpace@47..48 " " - FnParam@48..57 - Ident@48..51 "rhs" - Colon@51..52 ":" - WhiteSpace@52..53 " " - SelfType@53..57 - SelfTypeKw@53..57 "Self" - RParen@57..58 ")" - WhiteSpace@58..59 " " - Arrow@59..61 "->" - WhiteSpace@61..62 " " - SelfType@62..66 - SelfTypeKw@62..66 "Self" - WhiteSpace@66..67 " " - BlockExpr@67..135 - LBrace@67..68 "{" - Newline@68..69 "\n" - WhiteSpace@69..77 " " - ExprStmt@77..129 - RecordInitExpr@77..129 - Path@77..81 - PathSegment@77..81 - SelfTypeKw@77..81 "Self" - WhiteSpace@81..82 " " - RecordFieldList@82..129 - LBrace@82..83 "{" - Newline@83..84 "\n" - WhiteSpace@84..96 " " - RecordField@96..119 - Ident@96..99 "val" - Colon@99..100 ":" - WhiteSpace@100..101 " " - BinExpr@101..119 - FieldExpr@101..109 - PathExpr@101..105 - Path@101..105 - PathSegment@101..105 - SelfKw@101..105 "self" - Dot@105..106 "." - Ident@106..109 "val" - WhiteSpace@109..110 " " - Plus@110..111 "+" - WhiteSpace@111..112 " " - FieldExpr@112..119 - PathExpr@112..115 - Path@112..115 - PathSegment@112..115 - Ident@112..115 "rhs" - Dot@115..116 "." - Ident@116..119 "val" - Newline@119..120 "\n" - WhiteSpace@120..128 " " - RBrace@128..129 "}" - Newline@129..130 "\n" - WhiteSpace@130..134 " " - RBrace@134..135 "}" - Newline@135..136 "\n" - RBrace@136..137 "}" - Newline@137..139 "\n\n" - Impl@139..272 - ImplKw@139..143 "impl" - GenericParamList@143..146 - Lt@143..144 "<" - TypeGenericParam@144..145 - Ident@144..145 "T" - Gt@145..146 ">" - WhiteSpace@146..147 " " - PathType@147..153 - Path@147..150 - PathSegment@147..150 - Ident@147..150 "Foo" - GenericArgList@150..153 - Lt@150..151 "<" - TypeGenericArg@151..152 - PathType@151..152 - Path@151..152 - PathSegment@151..152 - Ident@151..152 "T" - Gt@152..153 ">" - WhiteSpace@153..154 " " - Newline@154..155 "\n" - WhereClause@155..175 - WhereKw@155..160 "where" - WhiteSpace@160..161 " " - WherePredicate@161..175 - PathType@161..167 - Path@161..164 - PathSegment@161..164 - Ident@161..164 "Foo" - GenericArgList@164..167 - Lt@164..165 "<" - TypeGenericArg@165..166 - PathType@165..166 - Path@165..166 - PathSegment@165..166 - Ident@165..166 "T" - Gt@166..167 ">" - TypeBoundList@167..174 - Colon@167..168 ":" - WhiteSpace@168..169 " " - TypeBound@169..174 - Path@169..174 - PathSegment@169..174 - Ident@169..174 "Clone" - Newline@174..175 "\n" - ImplItemList@175..272 - LBrace@175..176 "{" - Newline@176..177 "\n" - WhiteSpace@177..181 " " - Func@181..270 - FnKw@181..183 "fn" - WhiteSpace@183..184 " " - Ident@184..187 "add" - GenericParamList@187..198 - Lt@187..188 "<" - TypeGenericParam@188..197 - Ident@188..189 "U" - TypeBoundList@189..197 - Colon@189..190 ":" - WhiteSpace@190..191 " " - TypeBound@191..197 - Path@191..194 - PathSegment@191..194 - Ident@191..194 "Add" - GenericArgList@194..197 - Lt@194..195 "<" - TypeGenericArg@195..196 - PathType@195..196 - Path@195..196 - PathSegment@195..196 - Ident@195..196 "T" - Gt@196..197 ">" - Gt@197..198 ">" - FuncParamList@198..212 - LParen@198..199 "(" - FnParam@199..203 - SelfKw@199..203 "self" - Comma@203..204 "," - WhiteSpace@204..205 " " - FnParam@205..211 - Ident@205..208 "rhs" - Colon@208..209 ":" - WhiteSpace@209..210 " " - PathType@210..211 - Path@210..211 - PathSegment@210..211 - Ident@210..211 "U" - RParen@211..212 ")" - WhiteSpace@212..213 " " - Newline@213..214 "\n" - WhiteSpace@214..222 " " - WhereClause@222..236 - WhereKw@222..227 "where" - WhiteSpace@227..228 " " - WherePredicate@228..236 - PathType@228..229 - Path@228..229 - PathSegment@228..229 - Ident@228..229 "T" - TypeBoundList@229..235 - Colon@229..230 ":" - WhiteSpace@230..231 " " - TypeBound@231..235 - Path@231..235 - PathSegment@231..235 - Ident@231..235 "Copy" - Newline@235..236 "\n" - WhiteSpace@236..240 " " - BlockExpr@240..270 - LBrace@240..241 "{" - Newline@241..242 "\n" - WhiteSpace@242..250 " " - ExprStmt@250..264 - ParenExpr@250..264 - LParen@250..251 "(" - BinExpr@251..263 - PathExpr@251..254 - Path@251..254 - PathSegment@251..254 - Ident@251..254 "rhs" - WhiteSpace@254..255 " " - Minus@255..256 "-" - WhiteSpace@256..257 " " - FieldExpr@257..263 - PathExpr@257..261 - Path@257..261 - PathSegment@257..261 - SelfKw@257..261 "self" - Dot@261..262 "." - Ident@262..263 "t" - RParen@263..264 ")" - Newline@264..265 "\n" - WhiteSpace@265..269 " " - RBrace@269..270 "}" - Newline@270..271 "\n" - RBrace@271..272 "}" + Item@0..139 + Impl@0..137 + ImplKw@0..4 "impl" + GenericParamList@4..12 + Lt@4..5 "<" + TypeGenericParam@5..11 + Ident@5..6 "T" + TypeBoundList@6..11 + Colon@6..7 ":" + WhiteSpace@7..8 " " + TypeBound@8..11 + Path@8..11 + PathSegment@8..11 + Ident@8..11 "Add" + Gt@11..12 ">" + WhiteSpace@12..13 " " + PathType@13..24 + Path@13..21 + PathSegment@13..16 + Ident@13..16 "Foo" + Colon2@16..18 "::" + PathSegment@18..21 + Ident@18..21 "Bar" + GenericArgList@21..24 + Lt@21..22 "<" + TypeGenericArg@22..23 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + Gt@23..24 ">" + WhiteSpace@24..25 " " + ImplItemList@25..137 + LBrace@25..26 "{" + Newline@26..27 "\n" + WhiteSpace@27..31 " " + Func@31..135 + ItemModifier@31..34 + PubKw@31..34 "pub" + WhiteSpace@34..35 " " + FnKw@35..37 "fn" + WhiteSpace@37..38 " " + Ident@38..41 "add" + FuncParamList@41..58 + LParen@41..42 "(" + FnParam@42..46 + SelfKw@42..46 "self" + Comma@46..47 "," + WhiteSpace@47..48 " " + FnParam@48..57 + Ident@48..51 "rhs" + Colon@51..52 ":" + WhiteSpace@52..53 " " + SelfType@53..57 + SelfTypeKw@53..57 "Self" + RParen@57..58 ")" + WhiteSpace@58..59 " " + Arrow@59..61 "->" + WhiteSpace@61..62 " " + SelfType@62..66 + SelfTypeKw@62..66 "Self" + WhiteSpace@66..67 " " + BlockExpr@67..135 + LBrace@67..68 "{" + Newline@68..69 "\n" + WhiteSpace@69..77 " " + ExprStmt@77..129 + RecordInitExpr@77..129 + Path@77..81 + PathSegment@77..81 + SelfTypeKw@77..81 "Self" + WhiteSpace@81..82 " " + RecordFieldList@82..129 + LBrace@82..83 "{" + Newline@83..84 "\n" + WhiteSpace@84..96 " " + RecordField@96..119 + Ident@96..99 "val" + Colon@99..100 ":" + WhiteSpace@100..101 " " + BinExpr@101..119 + FieldExpr@101..109 + PathExpr@101..105 + Path@101..105 + PathSegment@101..105 + SelfKw@101..105 "self" + Dot@105..106 "." + Ident@106..109 "val" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + FieldExpr@112..119 + PathExpr@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "rhs" + Dot@115..116 "." + Ident@116..119 "val" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + RBrace@128..129 "}" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + RBrace@134..135 "}" + Newline@135..136 "\n" + RBrace@136..137 "}" + Newline@137..139 "\n\n" + Item@139..272 + Impl@139..272 + ImplKw@139..143 "impl" + GenericParamList@143..146 + Lt@143..144 "<" + TypeGenericParam@144..145 + Ident@144..145 "T" + Gt@145..146 ">" + WhiteSpace@146..147 " " + PathType@147..153 + Path@147..150 + PathSegment@147..150 + Ident@147..150 "Foo" + GenericArgList@150..153 + Lt@150..151 "<" + TypeGenericArg@151..152 + PathType@151..152 + Path@151..152 + PathSegment@151..152 + Ident@151..152 "T" + Gt@152..153 ">" + WhiteSpace@153..154 " " + Newline@154..155 "\n" + WhereClause@155..175 + WhereKw@155..160 "where" + WhiteSpace@160..161 " " + WherePredicate@161..175 + PathType@161..167 + Path@161..164 + PathSegment@161..164 + Ident@161..164 "Foo" + GenericArgList@164..167 + Lt@164..165 "<" + TypeGenericArg@165..166 + PathType@165..166 + Path@165..166 + PathSegment@165..166 + Ident@165..166 "T" + Gt@166..167 ">" + TypeBoundList@167..174 + Colon@167..168 ":" + WhiteSpace@168..169 " " + TypeBound@169..174 + Path@169..174 + PathSegment@169..174 + Ident@169..174 "Clone" + Newline@174..175 "\n" + ImplItemList@175..272 + LBrace@175..176 "{" + Newline@176..177 "\n" + WhiteSpace@177..181 " " + Func@181..270 + FnKw@181..183 "fn" + WhiteSpace@183..184 " " + Ident@184..187 "add" + GenericParamList@187..198 + Lt@187..188 "<" + TypeGenericParam@188..197 + Ident@188..189 "U" + TypeBoundList@189..197 + Colon@189..190 ":" + WhiteSpace@190..191 " " + TypeBound@191..197 + Path@191..194 + PathSegment@191..194 + Ident@191..194 "Add" + GenericArgList@194..197 + Lt@194..195 "<" + TypeGenericArg@195..196 + PathType@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "T" + Gt@196..197 ">" + Gt@197..198 ">" + FuncParamList@198..212 + LParen@198..199 "(" + FnParam@199..203 + SelfKw@199..203 "self" + Comma@203..204 "," + WhiteSpace@204..205 " " + FnParam@205..211 + Ident@205..208 "rhs" + Colon@208..209 ":" + WhiteSpace@209..210 " " + PathType@210..211 + Path@210..211 + PathSegment@210..211 + Ident@210..211 "U" + RParen@211..212 ")" + WhiteSpace@212..213 " " + Newline@213..214 "\n" + WhiteSpace@214..222 " " + WhereClause@222..236 + WhereKw@222..227 "where" + WhiteSpace@227..228 " " + WherePredicate@228..236 + PathType@228..229 + Path@228..229 + PathSegment@228..229 + Ident@228..229 "T" + TypeBoundList@229..235 + Colon@229..230 ":" + WhiteSpace@230..231 " " + TypeBound@231..235 + Path@231..235 + PathSegment@231..235 + Ident@231..235 "Copy" + Newline@235..236 "\n" + WhiteSpace@236..240 " " + BlockExpr@240..270 + LBrace@240..241 "{" + Newline@241..242 "\n" + WhiteSpace@242..250 " " + ExprStmt@250..264 + ParenExpr@250..264 + LParen@250..251 "(" + BinExpr@251..263 + PathExpr@251..254 + Path@251..254 + PathSegment@251..254 + Ident@251..254 "rhs" + WhiteSpace@254..255 " " + Minus@255..256 "-" + WhiteSpace@256..257 " " + FieldExpr@257..263 + PathExpr@257..261 + Path@257..261 + PathSegment@257..261 + SelfKw@257..261 "self" + Dot@261..262 "." + Ident@262..263 "t" + RParen@263..264 ")" + Newline@264..265 "\n" + WhiteSpace@265..269 " " + RBrace@269..270 "}" + Newline@270..271 "\n" + RBrace@271..272 "}" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index a80d601968..6df8df7c7a 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -5,360 +5,363 @@ input_file: crates/parser2/test_files/syntax_node/items/impl_trait.fe --- Root@0..335 ItemList@0..335 - ImplTrait@0..67 - ImplKw@0..4 "impl" - GenericParamList@4..7 - Lt@4..5 "<" - TypeGenericParam@5..6 - Ident@5..6 "T" - Gt@6..7 ">" - WhiteSpace@7..8 " " - PathType@8..16 - Path@8..13 - PathSegment@8..13 - Ident@8..13 "Trait" - GenericArgList@13..16 - Lt@13..14 "<" - TypeGenericArg@14..15 - PathType@14..15 - Path@14..15 - PathSegment@14..15 - Ident@14..15 "T" - Gt@15..16 ">" - WhiteSpace@16..17 " " - ForKw@17..20 "for" - WhiteSpace@20..21 " " - PathType@21..25 - Path@21..22 - PathSegment@21..22 - Ident@21..22 "F" - GenericArgList@22..25 - Lt@22..23 "<" - TypeGenericArg@23..24 - PathType@23..24 - Path@23..24 - PathSegment@23..24 - Ident@23..24 "T" - Gt@24..25 ">" - WhiteSpace@25..26 " " - ImplTraitItemList@26..67 - LBrace@26..27 "{" - Newline@27..28 "\n" - WhiteSpace@28..32 " " - Func@32..65 - FnKw@32..34 "fn" - WhiteSpace@34..35 " " - Ident@35..38 "foo" - FuncParamList@38..40 - LParen@38..39 "(" - RParen@39..40 ")" - WhiteSpace@40..41 " " - BlockExpr@41..65 - LBrace@41..42 "{" - Newline@42..43 "\n" - WhiteSpace@43..51 " " - ReturnStmt@51..59 - ReturnKw@51..57 "return" - WhiteSpace@57..58 " " - LitExpr@58..59 - Lit@58..59 - Int@58..59 "1" - Newline@59..60 "\n" - WhiteSpace@60..64 " " - RBrace@64..65 "}" - Newline@65..66 "\n" - RBrace@66..67 "}" - Newline@67..69 "\n\n" - ImplTrait@69..205 - ImplKw@69..73 "impl" - GenericParamList@73..79 - Lt@73..74 "<" - TypeGenericParam@74..75 - Ident@74..75 "T" - Comma@75..76 "," - WhiteSpace@76..77 " " - TypeGenericParam@77..78 - Ident@77..78 "U" - Gt@78..79 ">" - WhiteSpace@79..80 " " - PathType@80..91 - Path@80..85 - PathSegment@80..85 - Ident@80..85 "Trait" - GenericArgList@85..91 - Lt@85..86 "<" - TypeGenericArg@86..87 - PathType@86..87 - Path@86..87 - PathSegment@86..87 - Ident@86..87 "T" - Comma@87..88 "," - WhiteSpace@88..89 " " - TypeGenericArg@89..90 - PathType@89..90 - Path@89..90 - PathSegment@89..90 - Ident@89..90 "U" - Gt@90..91 ">" - WhiteSpace@91..92 " " - ForKw@92..95 "for" - WhiteSpace@95..96 " " - PathType@96..100 - Path@96..97 - PathSegment@96..97 - Ident@96..97 "F" - GenericArgList@97..100 - Lt@97..98 "<" - TypeGenericArg@98..99 - PathType@98..99 - Path@98..99 - PathSegment@98..99 - Ident@98..99 "T" - Gt@99..100 ">" - WhiteSpace@100..101 " " - Newline@101..102 "\n" - WhereClause@102..130 - WhereKw@102..107 "where" - WhiteSpace@107..108 " " - WherePredicate@108..117 - PathType@108..109 - Path@108..109 - PathSegment@108..109 - Ident@108..109 "T" - TypeBoundList@109..116 - Colon@109..110 ":" - WhiteSpace@110..111 " " - TypeBound@111..116 - Path@111..116 - PathSegment@111..116 - Ident@111..116 "Clone" - Newline@116..117 "\n" - WhiteSpace@117..123 " " - WherePredicate@123..130 - PathType@123..124 - Path@123..124 - PathSegment@123..124 - Ident@123..124 "U" - TypeBoundList@124..129 - Colon@124..125 ":" - WhiteSpace@125..126 " " - TypeBound@126..129 - Path@126..129 - PathSegment@126..129 - Ident@126..129 "Bar" - Newline@129..130 "\n" - ImplTraitItemList@130..205 - LBrace@130..131 "{" - Newline@131..132 "\n" - WhiteSpace@132..136 " " - Func@136..203 - FnKw@136..138 "fn" - WhiteSpace@138..139 " " - Ident@139..142 "foo" - GenericParamList@142..160 - Lt@142..143 "<" - TypeGenericParam@143..159 - Ident@143..144 "T" - TypeBoundList@144..159 - Colon@144..145 ":" - WhiteSpace@145..146 " " - TypeBound@146..159 - Path@146..156 - PathSegment@146..156 - Ident@146..156 "OtherTrait" - GenericArgList@156..159 - Lt@156..157 "<" - TypeGenericArg@157..158 - PathType@157..158 - Path@157..158 - PathSegment@157..158 - Ident@157..158 "U" - Gt@158..159 ">" - Gt@159..160 ">" - FuncParamList@160..166 - LParen@160..161 "(" - FnParam@161..165 - Ident@161..162 "t" - Colon@162..163 ":" - WhiteSpace@163..164 " " - PathType@164..165 - Path@164..165 - PathSegment@164..165 - Ident@164..165 "T" - RParen@165..166 ")" - WhiteSpace@166..167 " " - BlockExpr@167..203 - LBrace@167..168 "{" - Newline@168..169 "\n" - WhiteSpace@169..177 " " - ExprStmt@177..197 - CallExpr@177..197 - PathExpr@177..189 - Path@177..189 - PathSegment@177..189 - Ident@177..189 "do_something" - GenericArgList@189..194 - Lt@189..190 "<" - TypeGenericArg@190..193 - PathType@190..193 - Path@190..193 - PathSegment@190..193 - Ident@190..193 "i32" - Gt@193..194 ">" - CallArgList@194..197 - LParen@194..195 "(" - CallArg@195..196 - PathExpr@195..196 - Path@195..196 - PathSegment@195..196 - Ident@195..196 "t" - RParen@196..197 ")" - Newline@197..198 "\n" - WhiteSpace@198..202 " " - RBrace@202..203 "}" - Newline@203..204 "\n" - RBrace@204..205 "}" - Newline@205..207 "\n\n" - ImplTrait@207..335 - ImplKw@207..211 "impl" - GenericParamList@211..224 - Lt@211..212 "<" - TypeGenericParam@212..220 - Ident@212..213 "T" - TypeBoundList@213..220 - Colon@213..214 ":" - WhiteSpace@214..215 " " - TypeBound@215..220 - Path@215..220 - PathSegment@215..220 - Ident@215..220 "Clone" - Comma@220..221 "," - WhiteSpace@221..222 " " - TypeGenericParam@222..223 - Ident@222..223 "U" - Gt@223..224 ">" - WhiteSpace@224..225 " " - PathType@225..236 - Path@225..230 - PathSegment@225..230 - Ident@225..230 "Trait" - GenericArgList@230..236 - Lt@230..231 "<" - TypeGenericArg@231..232 - PathType@231..232 - Path@231..232 - PathSegment@231..232 - Ident@231..232 "T" - Comma@232..233 "," - WhiteSpace@233..234 " " - TypeGenericArg@234..235 - PathType@234..235 - Path@234..235 - PathSegment@234..235 - Ident@234..235 "U" - Gt@235..236 ">" - WhiteSpace@236..237 " " - ForKw@237..240 "for" - WhiteSpace@240..241 " " - PathType@241..245 - Path@241..242 - PathSegment@241..242 - Ident@241..242 "F" - GenericArgList@242..245 - Lt@242..243 "<" - TypeGenericArg@243..244 - PathType@243..244 - Path@243..244 - PathSegment@243..244 - Ident@243..244 "U" - Gt@244..245 ">" - WhiteSpace@245..246 " " - Newline@246..247 "\n" - WhereClause@247..260 - WhereKw@247..252 "where" - WhiteSpace@252..253 " " - WherePredicate@253..260 - PathType@253..254 - Path@253..254 - PathSegment@253..254 - Ident@253..254 "U" - TypeBoundList@254..259 - Colon@254..255 ":" - WhiteSpace@255..256 " " - TypeBound@256..259 - Path@256..259 - PathSegment@256..259 - Ident@256..259 "Bar" - Newline@259..260 "\n" - ImplTraitItemList@260..335 - LBrace@260..261 "{" - Newline@261..262 "\n" - WhiteSpace@262..266 " " - Func@266..333 - FnKw@266..268 "fn" - WhiteSpace@268..269 " " - Ident@269..272 "foo" - GenericParamList@272..290 - Lt@272..273 "<" - TypeGenericParam@273..289 - Ident@273..274 "T" - TypeBoundList@274..289 - Colon@274..275 ":" - WhiteSpace@275..276 " " - TypeBound@276..289 - Path@276..286 - PathSegment@276..286 - Ident@276..286 "OtherTrait" - GenericArgList@286..289 - Lt@286..287 "<" - TypeGenericArg@287..288 - PathType@287..288 - Path@287..288 - PathSegment@287..288 - Ident@287..288 "U" - Gt@288..289 ">" - Gt@289..290 ">" - FuncParamList@290..296 - LParen@290..291 "(" - FnParam@291..295 - Ident@291..292 "t" - Colon@292..293 ":" - WhiteSpace@293..294 " " - PathType@294..295 - Path@294..295 - PathSegment@294..295 - Ident@294..295 "T" - RParen@295..296 ")" - WhiteSpace@296..297 " " - BlockExpr@297..333 - LBrace@297..298 "{" - Newline@298..299 "\n" - WhiteSpace@299..307 " " - ExprStmt@307..327 - CallExpr@307..327 - PathExpr@307..319 - Path@307..319 - PathSegment@307..319 - Ident@307..319 "do_something" - GenericArgList@319..324 - Lt@319..320 "<" - TypeGenericArg@320..323 - PathType@320..323 - Path@320..323 - PathSegment@320..323 - Ident@320..323 "i32" - Gt@323..324 ">" - CallArgList@324..327 - LParen@324..325 "(" - CallArg@325..326 - PathExpr@325..326 - Path@325..326 - PathSegment@325..326 - Ident@325..326 "t" - RParen@326..327 ")" - Newline@327..328 "\n" - WhiteSpace@328..332 " " - RBrace@332..333 "}" - Newline@333..334 "\n" - RBrace@334..335 "}" + Item@0..69 + ImplTrait@0..67 + ImplKw@0..4 "impl" + GenericParamList@4..7 + Lt@4..5 "<" + TypeGenericParam@5..6 + Ident@5..6 "T" + Gt@6..7 ">" + WhiteSpace@7..8 " " + PathType@8..16 + Path@8..13 + PathSegment@8..13 + Ident@8..13 "Trait" + GenericArgList@13..16 + Lt@13..14 "<" + TypeGenericArg@14..15 + PathType@14..15 + Path@14..15 + PathSegment@14..15 + Ident@14..15 "T" + Gt@15..16 ">" + WhiteSpace@16..17 " " + ForKw@17..20 "for" + WhiteSpace@20..21 " " + PathType@21..25 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "F" + GenericArgList@22..25 + Lt@22..23 "<" + TypeGenericArg@23..24 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + Gt@24..25 ">" + WhiteSpace@25..26 " " + ImplTraitItemList@26..67 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + Func@32..65 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "foo" + FuncParamList@38..40 + LParen@38..39 "(" + RParen@39..40 ")" + WhiteSpace@40..41 " " + BlockExpr@41..65 + LBrace@41..42 "{" + Newline@42..43 "\n" + WhiteSpace@43..51 " " + ReturnStmt@51..59 + ReturnKw@51..57 "return" + WhiteSpace@57..58 " " + LitExpr@58..59 + Lit@58..59 + Int@58..59 "1" + Newline@59..60 "\n" + WhiteSpace@60..64 " " + RBrace@64..65 "}" + Newline@65..66 "\n" + RBrace@66..67 "}" + Newline@67..69 "\n\n" + Item@69..207 + ImplTrait@69..205 + ImplKw@69..73 "impl" + GenericParamList@73..79 + Lt@73..74 "<" + TypeGenericParam@74..75 + Ident@74..75 "T" + Comma@75..76 "," + WhiteSpace@76..77 " " + TypeGenericParam@77..78 + Ident@77..78 "U" + Gt@78..79 ">" + WhiteSpace@79..80 " " + PathType@80..91 + Path@80..85 + PathSegment@80..85 + Ident@80..85 "Trait" + GenericArgList@85..91 + Lt@85..86 "<" + TypeGenericArg@86..87 + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "T" + Comma@87..88 "," + WhiteSpace@88..89 " " + TypeGenericArg@89..90 + PathType@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "U" + Gt@90..91 ">" + WhiteSpace@91..92 " " + ForKw@92..95 "for" + WhiteSpace@95..96 " " + PathType@96..100 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "F" + GenericArgList@97..100 + Lt@97..98 "<" + TypeGenericArg@98..99 + PathType@98..99 + Path@98..99 + PathSegment@98..99 + Ident@98..99 "T" + Gt@99..100 ">" + WhiteSpace@100..101 " " + Newline@101..102 "\n" + WhereClause@102..130 + WhereKw@102..107 "where" + WhiteSpace@107..108 " " + WherePredicate@108..117 + PathType@108..109 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "T" + TypeBoundList@109..116 + Colon@109..110 ":" + WhiteSpace@110..111 " " + TypeBound@111..116 + Path@111..116 + PathSegment@111..116 + Ident@111..116 "Clone" + Newline@116..117 "\n" + WhiteSpace@117..123 " " + WherePredicate@123..130 + PathType@123..124 + Path@123..124 + PathSegment@123..124 + Ident@123..124 "U" + TypeBoundList@124..129 + Colon@124..125 ":" + WhiteSpace@125..126 " " + TypeBound@126..129 + Path@126..129 + PathSegment@126..129 + Ident@126..129 "Bar" + Newline@129..130 "\n" + ImplTraitItemList@130..205 + LBrace@130..131 "{" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Func@136..203 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + GenericParamList@142..160 + Lt@142..143 "<" + TypeGenericParam@143..159 + Ident@143..144 "T" + TypeBoundList@144..159 + Colon@144..145 ":" + WhiteSpace@145..146 " " + TypeBound@146..159 + Path@146..156 + PathSegment@146..156 + Ident@146..156 "OtherTrait" + GenericArgList@156..159 + Lt@156..157 "<" + TypeGenericArg@157..158 + PathType@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "U" + Gt@158..159 ">" + Gt@159..160 ">" + FuncParamList@160..166 + LParen@160..161 "(" + FnParam@161..165 + Ident@161..162 "t" + Colon@162..163 ":" + WhiteSpace@163..164 " " + PathType@164..165 + Path@164..165 + PathSegment@164..165 + Ident@164..165 "T" + RParen@165..166 ")" + WhiteSpace@166..167 " " + BlockExpr@167..203 + LBrace@167..168 "{" + Newline@168..169 "\n" + WhiteSpace@169..177 " " + ExprStmt@177..197 + CallExpr@177..197 + PathExpr@177..189 + Path@177..189 + PathSegment@177..189 + Ident@177..189 "do_something" + GenericArgList@189..194 + Lt@189..190 "<" + TypeGenericArg@190..193 + PathType@190..193 + Path@190..193 + PathSegment@190..193 + Ident@190..193 "i32" + Gt@193..194 ">" + CallArgList@194..197 + LParen@194..195 "(" + CallArg@195..196 + PathExpr@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "t" + RParen@196..197 ")" + Newline@197..198 "\n" + WhiteSpace@198..202 " " + RBrace@202..203 "}" + Newline@203..204 "\n" + RBrace@204..205 "}" + Newline@205..207 "\n\n" + Item@207..335 + ImplTrait@207..335 + ImplKw@207..211 "impl" + GenericParamList@211..224 + Lt@211..212 "<" + TypeGenericParam@212..220 + Ident@212..213 "T" + TypeBoundList@213..220 + Colon@213..214 ":" + WhiteSpace@214..215 " " + TypeBound@215..220 + Path@215..220 + PathSegment@215..220 + Ident@215..220 "Clone" + Comma@220..221 "," + WhiteSpace@221..222 " " + TypeGenericParam@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + WhiteSpace@224..225 " " + PathType@225..236 + Path@225..230 + PathSegment@225..230 + Ident@225..230 "Trait" + GenericArgList@230..236 + Lt@230..231 "<" + TypeGenericArg@231..232 + PathType@231..232 + Path@231..232 + PathSegment@231..232 + Ident@231..232 "T" + Comma@232..233 "," + WhiteSpace@233..234 " " + TypeGenericArg@234..235 + PathType@234..235 + Path@234..235 + PathSegment@234..235 + Ident@234..235 "U" + Gt@235..236 ">" + WhiteSpace@236..237 " " + ForKw@237..240 "for" + WhiteSpace@240..241 " " + PathType@241..245 + Path@241..242 + PathSegment@241..242 + Ident@241..242 "F" + GenericArgList@242..245 + Lt@242..243 "<" + TypeGenericArg@243..244 + PathType@243..244 + Path@243..244 + PathSegment@243..244 + Ident@243..244 "U" + Gt@244..245 ">" + WhiteSpace@245..246 " " + Newline@246..247 "\n" + WhereClause@247..260 + WhereKw@247..252 "where" + WhiteSpace@252..253 " " + WherePredicate@253..260 + PathType@253..254 + Path@253..254 + PathSegment@253..254 + Ident@253..254 "U" + TypeBoundList@254..259 + Colon@254..255 ":" + WhiteSpace@255..256 " " + TypeBound@256..259 + Path@256..259 + PathSegment@256..259 + Ident@256..259 "Bar" + Newline@259..260 "\n" + ImplTraitItemList@260..335 + LBrace@260..261 "{" + Newline@261..262 "\n" + WhiteSpace@262..266 " " + Func@266..333 + FnKw@266..268 "fn" + WhiteSpace@268..269 " " + Ident@269..272 "foo" + GenericParamList@272..290 + Lt@272..273 "<" + TypeGenericParam@273..289 + Ident@273..274 "T" + TypeBoundList@274..289 + Colon@274..275 ":" + WhiteSpace@275..276 " " + TypeBound@276..289 + Path@276..286 + PathSegment@276..286 + Ident@276..286 "OtherTrait" + GenericArgList@286..289 + Lt@286..287 "<" + TypeGenericArg@287..288 + PathType@287..288 + Path@287..288 + PathSegment@287..288 + Ident@287..288 "U" + Gt@288..289 ">" + Gt@289..290 ">" + FuncParamList@290..296 + LParen@290..291 "(" + FnParam@291..295 + Ident@291..292 "t" + Colon@292..293 ":" + WhiteSpace@293..294 " " + PathType@294..295 + Path@294..295 + PathSegment@294..295 + Ident@294..295 "T" + RParen@295..296 ")" + WhiteSpace@296..297 " " + BlockExpr@297..333 + LBrace@297..298 "{" + Newline@298..299 "\n" + WhiteSpace@299..307 " " + ExprStmt@307..327 + CallExpr@307..327 + PathExpr@307..319 + Path@307..319 + PathSegment@307..319 + Ident@307..319 "do_something" + GenericArgList@319..324 + Lt@319..320 "<" + TypeGenericArg@320..323 + PathType@320..323 + Path@320..323 + PathSegment@320..323 + Ident@320..323 "i32" + Gt@323..324 ">" + CallArgList@324..327 + LParen@324..325 "(" + CallArg@325..326 + PathExpr@325..326 + Path@325..326 + PathSegment@325..326 + Ident@325..326 "t" + RParen@326..327 ")" + Newline@327..328 "\n" + WhiteSpace@328..332 " " + RBrace@332..333 "}" + Newline@333..334 "\n" + RBrace@334..335 "}" diff --git a/crates/parser2/test_files/syntax_node/items/mod.snap b/crates/parser2/test_files/syntax_node/items/mod.snap index 15f6d25ddc..d5cd3c1e7b 100644 --- a/crates/parser2/test_files/syntax_node/items/mod.snap +++ b/crates/parser2/test_files/syntax_node/items/mod.snap @@ -5,105 +5,110 @@ input_file: crates/parser2/test_files/syntax_node/items/mod.fe --- Root@0..146 ItemList@0..146 - Mod@0..107 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - ModKw@4..7 "mod" - WhiteSpace@7..8 " " - Ident@8..11 "foo" - WhiteSpace@11..12 " " - ItemList@12..107 - LBrace@12..13 "{" - Newline@13..14 "\n" - WhiteSpace@14..18 " " - Func@18..78 - FnKw@18..20 "fn" - WhiteSpace@20..21 " " - Ident@21..28 "foo_foo" - FuncParamList@28..53 - LParen@28..29 "(" - FnParam@29..37 - Ident@29..32 "bar" - Colon@32..33 ":" - WhiteSpace@33..34 " " - PathType@34..37 - Path@34..37 - PathSegment@34..37 - Ident@34..37 "i32" - Comma@37..38 "," - WhiteSpace@38..39 " " - FnParam@39..52 - MutKw@39..42 "mut" - WhiteSpace@42..43 " " - Ident@43..46 "baz" - Colon@46..47 ":" - WhiteSpace@47..48 " " - PathType@48..52 - Path@48..52 - PathSegment@48..52 - Ident@48..52 "u256" - RParen@52..53 ")" - WhiteSpace@53..54 " " - Arrow@54..56 "->" - WhiteSpace@56..57 " " - PathType@57..60 - Path@57..60 - PathSegment@57..60 - Ident@57..60 "i32" - WhiteSpace@60..61 " " - BlockExpr@61..78 - LBrace@61..62 "{" - Newline@62..63 "\n" - WhiteSpace@63..71 " " - ExprStmt@71..72 - LitExpr@71..72 - Lit@71..72 - Int@71..72 "1" - Newline@72..73 "\n" - WhiteSpace@73..77 " " - RBrace@77..78 "}" - Newline@78..79 "\n" - WhiteSpace@79..83 " " - Newline@83..84 "\n" - WhiteSpace@84..88 " " - Struct@88..105 - ItemModifier@88..91 - PubKw@88..91 "pub" - WhiteSpace@91..92 " " - StructKw@92..98 "struct" - WhiteSpace@98..99 " " - Ident@99..102 "Foo" - WhiteSpace@102..103 " " - RecordFieldDefList@103..105 - LBrace@103..104 "{" - RBrace@104..105 "}" - Newline@105..106 "\n" - RBrace@106..107 "}" - Newline@107..109 "\n\n" - Mod@109..146 - ItemModifier@109..112 - PubKw@109..112 "pub" - WhiteSpace@112..113 " " - ModKw@113..116 "mod" - WhiteSpace@116..117 " " - Ident@117..120 "bar" - WhiteSpace@120..121 " " - ItemList@121..146 - LBrace@121..122 "{" - Newline@122..123 "\n" - WhiteSpace@123..127 " " - Struct@127..144 - ItemModifier@127..130 - PubKw@127..130 "pub" - WhiteSpace@130..131 " " - StructKw@131..137 "struct" - WhiteSpace@137..138 " " - Ident@138..141 "Bar" - WhiteSpace@141..142 " " - RecordFieldDefList@142..144 - LBrace@142..143 "{" - RBrace@143..144 "}" - Newline@144..145 "\n" - RBrace@145..146 "}" + Item@0..109 + Mod@0..107 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ModKw@4..7 "mod" + WhiteSpace@7..8 " " + Ident@8..11 "foo" + WhiteSpace@11..12 " " + ItemList@12..107 + LBrace@12..13 "{" + Newline@13..14 "\n" + WhiteSpace@14..18 " " + Item@18..79 + Func@18..78 + FnKw@18..20 "fn" + WhiteSpace@20..21 " " + Ident@21..28 "foo_foo" + FuncParamList@28..53 + LParen@28..29 "(" + FnParam@29..37 + Ident@29..32 "bar" + Colon@32..33 ":" + WhiteSpace@33..34 " " + PathType@34..37 + Path@34..37 + PathSegment@34..37 + Ident@34..37 "i32" + Comma@37..38 "," + WhiteSpace@38..39 " " + FnParam@39..52 + MutKw@39..42 "mut" + WhiteSpace@42..43 " " + Ident@43..46 "baz" + Colon@46..47 ":" + WhiteSpace@47..48 " " + PathType@48..52 + Path@48..52 + PathSegment@48..52 + Ident@48..52 "u256" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Arrow@54..56 "->" + WhiteSpace@56..57 " " + PathType@57..60 + Path@57..60 + PathSegment@57..60 + Ident@57..60 "i32" + WhiteSpace@60..61 " " + BlockExpr@61..78 + LBrace@61..62 "{" + Newline@62..63 "\n" + WhiteSpace@63..71 " " + ExprStmt@71..72 + LitExpr@71..72 + Lit@71..72 + Int@71..72 "1" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + RBrace@77..78 "}" + Newline@78..79 "\n" + WhiteSpace@79..83 " " + Newline@83..84 "\n" + WhiteSpace@84..88 " " + Item@88..106 + Struct@88..105 + ItemModifier@88..91 + PubKw@88..91 "pub" + WhiteSpace@91..92 " " + StructKw@92..98 "struct" + WhiteSpace@98..99 " " + Ident@99..102 "Foo" + WhiteSpace@102..103 " " + RecordFieldDefList@103..105 + LBrace@103..104 "{" + RBrace@104..105 "}" + Newline@105..106 "\n" + RBrace@106..107 "}" + Newline@107..109 "\n\n" + Item@109..146 + Mod@109..146 + ItemModifier@109..112 + PubKw@109..112 "pub" + WhiteSpace@112..113 " " + ModKw@113..116 "mod" + WhiteSpace@116..117 " " + Ident@117..120 "bar" + WhiteSpace@120..121 " " + ItemList@121..146 + LBrace@121..122 "{" + Newline@122..123 "\n" + WhiteSpace@123..127 " " + Item@127..145 + Struct@127..144 + ItemModifier@127..130 + PubKw@127..130 "pub" + WhiteSpace@130..131 " " + StructKw@131..137 "struct" + WhiteSpace@137..138 " " + Ident@138..141 "Bar" + WhiteSpace@141..142 " " + RecordFieldDefList@142..144 + LBrace@142..143 "{" + RBrace@143..144 "}" + Newline@144..145 "\n" + RBrace@145..146 "}" diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index cc4f36d9c1..7e6d9685e5 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -5,468 +5,473 @@ input_file: crates/parser2/test_files/syntax_node/items/trait.fe --- Root@0..592 ItemList@0..592 - Trait@0..15 - TraitKw@0..5 "trait" - WhiteSpace@5..6 " " - Ident@6..12 "Marker" - WhiteSpace@12..13 " " - TraitItemList@13..15 - LBrace@13..14 "{" - RBrace@14..15 "}" - Newline@15..17 "\n\n" - Trait@17..182 - ItemModifier@17..20 - PubKw@17..20 "pub" - WhiteSpace@20..21 " " - TraitKw@21..26 "trait" - WhiteSpace@26..27 " " - Ident@27..30 "Foo" - WhiteSpace@30..31 " " - TraitItemList@31..182 - LBrace@31..32 "{" - Newline@32..33 "\n" - WhiteSpace@33..37 " " - Func@37..79 - FnKw@37..39 "fn" - WhiteSpace@39..40 " " - Ident@40..43 "foo" - GenericParamList@43..67 - Lt@43..44 "<" - TypeGenericParam@44..52 - Ident@44..45 "T" - TypeBoundList@45..52 - Colon@45..46 ":" - WhiteSpace@46..47 " " - TypeBound@47..52 - Path@47..52 - PathSegment@47..52 - Ident@47..52 "Trait" - Comma@52..53 "," - WhiteSpace@53..54 " " - ConstGenericParam@54..66 - ConstKw@54..59 "const" - WhiteSpace@59..60 " " - Ident@60..61 "U" - Colon@61..62 ":" - WhiteSpace@62..63 " " - PathType@63..66 - Path@63..66 - PathSegment@63..66 - Ident@63..66 "i32" - Gt@66..67 ">" - FuncParamList@67..79 - LParen@67..68 "(" - FnParam@68..72 - Ident@68..69 "t" - Colon@69..70 ":" - WhiteSpace@70..71 " " - PathType@71..72 - Path@71..72 - PathSegment@71..72 - Ident@71..72 "T" - Comma@72..73 "," - WhiteSpace@73..74 " " - FnParam@74..78 - Ident@74..75 "u" - Colon@75..76 ":" - WhiteSpace@76..77 " " - PathType@77..78 - Path@77..78 - PathSegment@77..78 - Ident@77..78 "U" - RParen@78..79 ")" - Newline@79..81 "\n\n" - WhiteSpace@81..85 " " - Func@85..180 - FnKw@85..87 "fn" - WhiteSpace@87..88 " " - Ident@88..102 "default_method" - GenericParamList@102..116 - Lt@102..103 "<" - TypeGenericParam@103..115 - Ident@103..104 "T" - TypeBoundList@104..115 - Colon@104..105 ":" - WhiteSpace@105..106 " " - TypeBound@106..109 - Path@106..109 - PathSegment@106..109 - Ident@106..109 "Add" - WhiteSpace@109..110 " " - Plus@110..111 "+" - WhiteSpace@111..112 " " - TypeBound@112..115 - Path@112..115 - PathSegment@112..115 - Ident@112..115 "Sub" - Gt@115..116 ">" - FuncParamList@116..132 - LParen@116..117 "(" - FnParam@117..123 - Ident@117..120 "lhs" - Colon@120..121 ":" - WhiteSpace@121..122 " " - PathType@122..123 - Path@122..123 - PathSegment@122..123 - Ident@122..123 "T" - Comma@123..124 "," - WhiteSpace@124..125 " " - FnParam@125..131 - Ident@125..128 "rhs" - Colon@128..129 ":" - WhiteSpace@129..130 " " - PathType@130..131 - Path@130..131 - PathSegment@130..131 - Ident@130..131 "T" - RParen@131..132 ")" - WhiteSpace@132..134 " " - Arrow@134..136 "->" - WhiteSpace@136..137 " " - PathType@137..140 - Path@137..140 - PathSegment@137..140 - Ident@137..140 "i32" - WhiteSpace@140..141 " " - BlockExpr@141..180 - LBrace@141..142 "{" - Newline@142..143 "\n" - WhiteSpace@143..151 " " - ExprStmt@151..174 - BinExpr@151..174 - BinExpr@151..160 - PathExpr@151..154 - Path@151..154 - PathSegment@151..154 - Ident@151..154 "lhs" - WhiteSpace@154..155 " " - Plus@155..156 "+" - WhiteSpace@156..157 " " - PathExpr@157..160 - Path@157..160 - PathSegment@157..160 - Ident@157..160 "lhs" - WhiteSpace@160..161 " " - Minus@161..162 "-" - WhiteSpace@162..163 " " - ParenExpr@163..174 - LParen@163..164 "(" - BinExpr@164..173 - PathExpr@164..167 - Path@164..167 - PathSegment@164..167 - Ident@164..167 "rhs" - WhiteSpace@167..168 " " - Plus@168..169 "+" - WhiteSpace@169..170 " " - PathExpr@170..173 - Path@170..173 - PathSegment@170..173 - Ident@170..173 "rhs" - RParen@173..174 ")" - Newline@174..175 "\n" - WhiteSpace@175..179 " " - RBrace@179..180 "}" - Newline@180..181 "\n" - RBrace@181..182 "}" - Newline@182..184 "\n\n" - Trait@184..271 - ItemModifier@184..187 - PubKw@184..187 "pub" - WhiteSpace@187..188 " " - TraitKw@188..193 "trait" - WhiteSpace@193..194 " " - Ident@194..197 "Add" - GenericParamList@197..207 - Lt@197..198 "<" - TypeGenericParam@198..206 - Ident@198..201 "RHS" - TypeBoundList@201..206 - Colon@201..202 ":" - WhiteSpace@202..203 " " - TypeBound@203..206 - Path@203..206 - PathSegment@203..206 - Ident@203..206 "Add" - Gt@206..207 ">" - WhiteSpace@207..208 " " - Newline@208..209 "\n" - TraitItemList@209..271 - LBrace@209..210 "{" - Newline@210..211 "\n" - WhiteSpace@211..215 " " - Func@215..270 - FnKw@215..217 "fn" - WhiteSpace@217..218 " " - Ident@218..221 "add" - FuncParamList@221..237 - LParen@221..222 "(" - FnParam@222..226 - SelfKw@222..226 "self" - Comma@226..227 "," - WhiteSpace@227..228 " " - FnParam@228..236 - Ident@228..231 "rhs" - Colon@231..232 ":" - WhiteSpace@232..233 " " - PathType@233..236 - Path@233..236 - PathSegment@233..236 - Ident@233..236 "Rhs" - RParen@236..237 ")" - WhiteSpace@237..238 " " - Arrow@238..240 "->" - WhiteSpace@240..241 " " - SelfType@241..245 - SelfTypeKw@241..245 "Self" - WhiteSpace@245..246 " " - Newline@246..247 "\n" - WhiteSpace@247..255 " " - WhereClause@255..270 - WhereKw@255..260 "where" - WhiteSpace@260..261 " " - WherePredicate@261..270 - PathType@261..264 - Path@261..264 - PathSegment@261..264 - Ident@261..264 "RHS" - TypeBoundList@264..269 - Colon@264..265 ":" - WhiteSpace@265..266 " " - TypeBound@266..269 - Path@266..269 - PathSegment@266..269 - Ident@266..269 "Sub" - Newline@269..270 "\n" - RBrace@270..271 "}" - Newline@271..274 "\n\n\n" - Trait@274..355 - ItemModifier@274..277 - PubKw@274..277 "pub" - WhiteSpace@277..278 " " - TraitKw@278..283 "trait" - WhiteSpace@283..284 " " - Ident@284..289 "Parse" - WhiteSpace@289..290 " " - TraitItemList@290..355 - LBrace@290..291 "{" - Newline@291..292 "\n" - WhiteSpace@292..296 " " - Func@296..353 - FnKw@296..298 "fn" - WhiteSpace@298..299 " " - Ident@299..304 "parse" - GenericParamList@304..320 - Lt@304..305 "<" - TypeGenericParam@305..319 - Ident@305..306 "S" - TypeBoundList@306..319 - Colon@306..307 ":" - WhiteSpace@307..308 " " - TypeBound@308..319 - Path@308..319 - PathSegment@308..319 - Ident@308..319 "TokenStream" - Gt@319..320 ">" - FuncParamList@320..353 - LParen@320..321 "(" - FnParam@321..329 - MutKw@321..324 "mut" - WhiteSpace@324..325 " " - SelfKw@325..329 "self" - Comma@329..330 "," - WhiteSpace@330..331 " " - FnParam@331..352 - MutKw@331..334 "mut" - WhiteSpace@334..335 " " - Ident@335..341 "parser" - Colon@341..342 ":" - WhiteSpace@342..343 " " - PathType@343..352 - Path@343..349 - PathSegment@343..349 - Ident@343..349 "Parser" - GenericArgList@349..352 - Lt@349..350 "<" - TypeGenericArg@350..351 - PathType@350..351 - Path@350..351 - PathSegment@350..351 - Ident@350..351 "S" - Gt@351..352 ">" - RParen@352..353 ")" - Newline@353..354 "\n" - RBrace@354..355 "}" - Newline@355..357 "\n\n" - Impl@357..592 - ImplKw@357..361 "impl" - GenericParamList@361..364 - Lt@361..362 "<" - TypeGenericParam@362..363 - Ident@362..363 "S" - Gt@363..364 ">" - WhiteSpace@364..365 " " - PathType@365..374 - Path@365..371 - PathSegment@365..371 - Ident@365..371 "Parser" - GenericArgList@371..374 - Lt@371..372 "<" - TypeGenericArg@372..373 - PathType@372..373 - Path@372..373 - PathSegment@372..373 - Ident@372..373 "S" - Gt@373..374 ">" - WhiteSpace@374..375 " " - Newline@375..376 "\n" - WhiteSpace@376..380 " " - WhereClause@380..409 - WhereKw@380..385 "where" - WhiteSpace@385..386 " " - WherePredicate@386..409 - PathType@386..387 - Path@386..387 - PathSegment@386..387 - Ident@386..387 "S" - TypeBoundList@387..408 - Colon@387..388 ":" - WhiteSpace@388..389 " " - TypeBound@389..400 - Path@389..400 - PathSegment@389..400 - Ident@389..400 "TokenStream" - WhiteSpace@400..401 " " - Plus@401..402 "+" - WhiteSpace@402..403 " " - TypeBound@403..408 - Path@403..408 - PathSegment@403..408 - Ident@403..408 "Clone" - Newline@408..409 "\n" - ImplItemList@409..592 - LBrace@409..410 "{" - Newline@410..411 "\n" - WhiteSpace@411..415 " " - Func@415..590 - ItemModifier@415..418 - PubKw@415..418 "pub" - WhiteSpace@418..419 " " - FnKw@419..421 "fn" - WhiteSpace@421..422 " " - Ident@422..427 "parse" - GenericParamList@427..437 - Lt@427..428 "<" - TypeGenericParam@428..436 - Ident@428..429 "T" - TypeBoundList@429..436 - Colon@429..430 ":" - WhiteSpace@430..431 " " - TypeBound@431..436 - Path@431..436 - PathSegment@431..436 - Ident@431..436 "Parse" - Gt@436..437 ">" - FuncParamList@437..493 - LParen@437..438 "(" - FnParam@438..446 - MutKw@438..441 "mut" - WhiteSpace@441..442 " " - SelfKw@442..446 "self" - Comma@446..447 "," - WhiteSpace@447..448 " " - FnParam@448..460 - MutKw@448..451 "mut" - WhiteSpace@451..452 " " - Ident@452..457 "scope" - Colon@457..458 ":" - WhiteSpace@458..459 " " - PathType@459..460 - Path@459..460 - PathSegment@459..460 - Ident@459..460 "T" - Comma@460..461 "," - WhiteSpace@461..462 " " - FnParam@462..492 - Ident@462..472 "checkpoint" - Colon@472..473 ":" - WhiteSpace@473..474 " " - PathType@474..492 - Path@474..480 - PathSegment@474..480 - Ident@474..480 "Option" - GenericArgList@480..492 - Lt@480..481 "<" - TypeGenericArg@481..491 - PathType@481..491 - Path@481..491 - PathSegment@481..491 - Ident@481..491 "Checkpoint" - Gt@491..492 ">" - RParen@492..493 ")" - WhiteSpace@493..494 " " - Arrow@494..496 "->" - WhiteSpace@496..497 " " - TupleType@497..515 - LParen@497..498 "(" - PathType@498..502 - Path@498..502 - PathSegment@498..502 - Ident@498..502 "bool" - Comma@502..503 "," - WhiteSpace@503..504 " " - PathType@504..514 - Path@504..514 - PathSegment@504..514 - Ident@504..514 "Checkpoint" - RParen@514..515 ")" - WhiteSpace@515..516 " " - BlockExpr@516..590 - LBrace@516..517 "{" - Newline@517..518 "\n" - WhiteSpace@518..526 " " - ExprStmt@526..584 - TupleExpr@526..584 - LParen@526..527 "(" - CallExpr@527..570 - PathExpr@527..547 - Path@527..547 - PathSegment@527..537 - Ident@527..537 "SyntaxNode" - Colon2@537..539 "::" - PathSegment@539..547 - Ident@539..547 "new_root" - CallArgList@547..570 - LParen@547..548 "(" - CallArg@548..569 - MethodCallExpr@548..569 - FieldExpr@548..560 - PathExpr@548..552 - Path@548..552 - PathSegment@548..552 - SelfKw@548..552 "self" - Dot@552..553 "." - Ident@553..560 "builder" - Dot@560..561 "." - Ident@561..567 "finish" - CallArgList@567..569 - LParen@567..568 "(" - RParen@568..569 ")" - RParen@569..570 ")" - Comma@570..571 "," - WhiteSpace@571..572 " " - FieldExpr@572..583 - PathExpr@572..576 - Path@572..576 - PathSegment@572..576 - SelfKw@572..576 "self" - Dot@576..577 "." - Ident@577..583 "errors" - RParen@583..584 ")" - Newline@584..585 "\n" - WhiteSpace@585..589 " " - RBrace@589..590 "}" - Newline@590..591 "\n" - RBrace@591..592 "}" + Item@0..17 + Trait@0..15 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..12 "Marker" + WhiteSpace@12..13 " " + TraitItemList@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" + Newline@15..17 "\n\n" + Item@17..184 + Trait@17..182 + ItemModifier@17..20 + PubKw@17..20 "pub" + WhiteSpace@20..21 " " + TraitKw@21..26 "trait" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + WhiteSpace@30..31 " " + TraitItemList@31..182 + LBrace@31..32 "{" + Newline@32..33 "\n" + WhiteSpace@33..37 " " + Func@37..79 + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..43 "foo" + GenericParamList@43..67 + Lt@43..44 "<" + TypeGenericParam@44..52 + Ident@44..45 "T" + TypeBoundList@45..52 + Colon@45..46 ":" + WhiteSpace@46..47 " " + TypeBound@47..52 + Path@47..52 + PathSegment@47..52 + Ident@47..52 "Trait" + Comma@52..53 "," + WhiteSpace@53..54 " " + ConstGenericParam@54..66 + ConstKw@54..59 "const" + WhiteSpace@59..60 " " + Ident@60..61 "U" + Colon@61..62 ":" + WhiteSpace@62..63 " " + PathType@63..66 + Path@63..66 + PathSegment@63..66 + Ident@63..66 "i32" + Gt@66..67 ">" + FuncParamList@67..79 + LParen@67..68 "(" + FnParam@68..72 + Ident@68..69 "t" + Colon@69..70 ":" + WhiteSpace@70..71 " " + PathType@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "T" + Comma@72..73 "," + WhiteSpace@73..74 " " + FnParam@74..78 + Ident@74..75 "u" + Colon@75..76 ":" + WhiteSpace@76..77 " " + PathType@77..78 + Path@77..78 + PathSegment@77..78 + Ident@77..78 "U" + RParen@78..79 ")" + Newline@79..81 "\n\n" + WhiteSpace@81..85 " " + Func@85..180 + FnKw@85..87 "fn" + WhiteSpace@87..88 " " + Ident@88..102 "default_method" + GenericParamList@102..116 + Lt@102..103 "<" + TypeGenericParam@103..115 + Ident@103..104 "T" + TypeBoundList@104..115 + Colon@104..105 ":" + WhiteSpace@105..106 " " + TypeBound@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "Add" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + TypeBound@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "Sub" + Gt@115..116 ">" + FuncParamList@116..132 + LParen@116..117 "(" + FnParam@117..123 + Ident@117..120 "lhs" + Colon@120..121 ":" + WhiteSpace@121..122 " " + PathType@122..123 + Path@122..123 + PathSegment@122..123 + Ident@122..123 "T" + Comma@123..124 "," + WhiteSpace@124..125 " " + FnParam@125..131 + Ident@125..128 "rhs" + Colon@128..129 ":" + WhiteSpace@129..130 " " + PathType@130..131 + Path@130..131 + PathSegment@130..131 + Ident@130..131 "T" + RParen@131..132 ")" + WhiteSpace@132..134 " " + Arrow@134..136 "->" + WhiteSpace@136..137 " " + PathType@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "i32" + WhiteSpace@140..141 " " + BlockExpr@141..180 + LBrace@141..142 "{" + Newline@142..143 "\n" + WhiteSpace@143..151 " " + ExprStmt@151..174 + BinExpr@151..174 + BinExpr@151..160 + PathExpr@151..154 + Path@151..154 + PathSegment@151..154 + Ident@151..154 "lhs" + WhiteSpace@154..155 " " + Plus@155..156 "+" + WhiteSpace@156..157 " " + PathExpr@157..160 + Path@157..160 + PathSegment@157..160 + Ident@157..160 "lhs" + WhiteSpace@160..161 " " + Minus@161..162 "-" + WhiteSpace@162..163 " " + ParenExpr@163..174 + LParen@163..164 "(" + BinExpr@164..173 + PathExpr@164..167 + Path@164..167 + PathSegment@164..167 + Ident@164..167 "rhs" + WhiteSpace@167..168 " " + Plus@168..169 "+" + WhiteSpace@169..170 " " + PathExpr@170..173 + Path@170..173 + PathSegment@170..173 + Ident@170..173 "rhs" + RParen@173..174 ")" + Newline@174..175 "\n" + WhiteSpace@175..179 " " + RBrace@179..180 "}" + Newline@180..181 "\n" + RBrace@181..182 "}" + Newline@182..184 "\n\n" + Item@184..274 + Trait@184..271 + ItemModifier@184..187 + PubKw@184..187 "pub" + WhiteSpace@187..188 " " + TraitKw@188..193 "trait" + WhiteSpace@193..194 " " + Ident@194..197 "Add" + GenericParamList@197..207 + Lt@197..198 "<" + TypeGenericParam@198..206 + Ident@198..201 "RHS" + TypeBoundList@201..206 + Colon@201..202 ":" + WhiteSpace@202..203 " " + TypeBound@203..206 + Path@203..206 + PathSegment@203..206 + Ident@203..206 "Add" + Gt@206..207 ">" + WhiteSpace@207..208 " " + Newline@208..209 "\n" + TraitItemList@209..271 + LBrace@209..210 "{" + Newline@210..211 "\n" + WhiteSpace@211..215 " " + Func@215..270 + FnKw@215..217 "fn" + WhiteSpace@217..218 " " + Ident@218..221 "add" + FuncParamList@221..237 + LParen@221..222 "(" + FnParam@222..226 + SelfKw@222..226 "self" + Comma@226..227 "," + WhiteSpace@227..228 " " + FnParam@228..236 + Ident@228..231 "rhs" + Colon@231..232 ":" + WhiteSpace@232..233 " " + PathType@233..236 + Path@233..236 + PathSegment@233..236 + Ident@233..236 "Rhs" + RParen@236..237 ")" + WhiteSpace@237..238 " " + Arrow@238..240 "->" + WhiteSpace@240..241 " " + SelfType@241..245 + SelfTypeKw@241..245 "Self" + WhiteSpace@245..246 " " + Newline@246..247 "\n" + WhiteSpace@247..255 " " + WhereClause@255..270 + WhereKw@255..260 "where" + WhiteSpace@260..261 " " + WherePredicate@261..270 + PathType@261..264 + Path@261..264 + PathSegment@261..264 + Ident@261..264 "RHS" + TypeBoundList@264..269 + Colon@264..265 ":" + WhiteSpace@265..266 " " + TypeBound@266..269 + Path@266..269 + PathSegment@266..269 + Ident@266..269 "Sub" + Newline@269..270 "\n" + RBrace@270..271 "}" + Newline@271..274 "\n\n\n" + Item@274..357 + Trait@274..355 + ItemModifier@274..277 + PubKw@274..277 "pub" + WhiteSpace@277..278 " " + TraitKw@278..283 "trait" + WhiteSpace@283..284 " " + Ident@284..289 "Parse" + WhiteSpace@289..290 " " + TraitItemList@290..355 + LBrace@290..291 "{" + Newline@291..292 "\n" + WhiteSpace@292..296 " " + Func@296..353 + FnKw@296..298 "fn" + WhiteSpace@298..299 " " + Ident@299..304 "parse" + GenericParamList@304..320 + Lt@304..305 "<" + TypeGenericParam@305..319 + Ident@305..306 "S" + TypeBoundList@306..319 + Colon@306..307 ":" + WhiteSpace@307..308 " " + TypeBound@308..319 + Path@308..319 + PathSegment@308..319 + Ident@308..319 "TokenStream" + Gt@319..320 ">" + FuncParamList@320..353 + LParen@320..321 "(" + FnParam@321..329 + MutKw@321..324 "mut" + WhiteSpace@324..325 " " + SelfKw@325..329 "self" + Comma@329..330 "," + WhiteSpace@330..331 " " + FnParam@331..352 + MutKw@331..334 "mut" + WhiteSpace@334..335 " " + Ident@335..341 "parser" + Colon@341..342 ":" + WhiteSpace@342..343 " " + PathType@343..352 + Path@343..349 + PathSegment@343..349 + Ident@343..349 "Parser" + GenericArgList@349..352 + Lt@349..350 "<" + TypeGenericArg@350..351 + PathType@350..351 + Path@350..351 + PathSegment@350..351 + Ident@350..351 "S" + Gt@351..352 ">" + RParen@352..353 ")" + Newline@353..354 "\n" + RBrace@354..355 "}" + Newline@355..357 "\n\n" + Item@357..592 + Impl@357..592 + ImplKw@357..361 "impl" + GenericParamList@361..364 + Lt@361..362 "<" + TypeGenericParam@362..363 + Ident@362..363 "S" + Gt@363..364 ">" + WhiteSpace@364..365 " " + PathType@365..374 + Path@365..371 + PathSegment@365..371 + Ident@365..371 "Parser" + GenericArgList@371..374 + Lt@371..372 "<" + TypeGenericArg@372..373 + PathType@372..373 + Path@372..373 + PathSegment@372..373 + Ident@372..373 "S" + Gt@373..374 ">" + WhiteSpace@374..375 " " + Newline@375..376 "\n" + WhiteSpace@376..380 " " + WhereClause@380..409 + WhereKw@380..385 "where" + WhiteSpace@385..386 " " + WherePredicate@386..409 + PathType@386..387 + Path@386..387 + PathSegment@386..387 + Ident@386..387 "S" + TypeBoundList@387..408 + Colon@387..388 ":" + WhiteSpace@388..389 " " + TypeBound@389..400 + Path@389..400 + PathSegment@389..400 + Ident@389..400 "TokenStream" + WhiteSpace@400..401 " " + Plus@401..402 "+" + WhiteSpace@402..403 " " + TypeBound@403..408 + Path@403..408 + PathSegment@403..408 + Ident@403..408 "Clone" + Newline@408..409 "\n" + ImplItemList@409..592 + LBrace@409..410 "{" + Newline@410..411 "\n" + WhiteSpace@411..415 " " + Func@415..590 + ItemModifier@415..418 + PubKw@415..418 "pub" + WhiteSpace@418..419 " " + FnKw@419..421 "fn" + WhiteSpace@421..422 " " + Ident@422..427 "parse" + GenericParamList@427..437 + Lt@427..428 "<" + TypeGenericParam@428..436 + Ident@428..429 "T" + TypeBoundList@429..436 + Colon@429..430 ":" + WhiteSpace@430..431 " " + TypeBound@431..436 + Path@431..436 + PathSegment@431..436 + Ident@431..436 "Parse" + Gt@436..437 ">" + FuncParamList@437..493 + LParen@437..438 "(" + FnParam@438..446 + MutKw@438..441 "mut" + WhiteSpace@441..442 " " + SelfKw@442..446 "self" + Comma@446..447 "," + WhiteSpace@447..448 " " + FnParam@448..460 + MutKw@448..451 "mut" + WhiteSpace@451..452 " " + Ident@452..457 "scope" + Colon@457..458 ":" + WhiteSpace@458..459 " " + PathType@459..460 + Path@459..460 + PathSegment@459..460 + Ident@459..460 "T" + Comma@460..461 "," + WhiteSpace@461..462 " " + FnParam@462..492 + Ident@462..472 "checkpoint" + Colon@472..473 ":" + WhiteSpace@473..474 " " + PathType@474..492 + Path@474..480 + PathSegment@474..480 + Ident@474..480 "Option" + GenericArgList@480..492 + Lt@480..481 "<" + TypeGenericArg@481..491 + PathType@481..491 + Path@481..491 + PathSegment@481..491 + Ident@481..491 "Checkpoint" + Gt@491..492 ">" + RParen@492..493 ")" + WhiteSpace@493..494 " " + Arrow@494..496 "->" + WhiteSpace@496..497 " " + TupleType@497..515 + LParen@497..498 "(" + PathType@498..502 + Path@498..502 + PathSegment@498..502 + Ident@498..502 "bool" + Comma@502..503 "," + WhiteSpace@503..504 " " + PathType@504..514 + Path@504..514 + PathSegment@504..514 + Ident@504..514 "Checkpoint" + RParen@514..515 ")" + WhiteSpace@515..516 " " + BlockExpr@516..590 + LBrace@516..517 "{" + Newline@517..518 "\n" + WhiteSpace@518..526 " " + ExprStmt@526..584 + TupleExpr@526..584 + LParen@526..527 "(" + CallExpr@527..570 + PathExpr@527..547 + Path@527..547 + PathSegment@527..537 + Ident@527..537 "SyntaxNode" + Colon2@537..539 "::" + PathSegment@539..547 + Ident@539..547 "new_root" + CallArgList@547..570 + LParen@547..548 "(" + CallArg@548..569 + MethodCallExpr@548..569 + FieldExpr@548..560 + PathExpr@548..552 + Path@548..552 + PathSegment@548..552 + SelfKw@548..552 "self" + Dot@552..553 "." + Ident@553..560 "builder" + Dot@560..561 "." + Ident@561..567 "finish" + CallArgList@567..569 + LParen@567..568 "(" + RParen@568..569 ")" + RParen@569..570 ")" + Comma@570..571 "," + WhiteSpace@571..572 " " + FieldExpr@572..583 + PathExpr@572..576 + Path@572..576 + PathSegment@572..576 + SelfKw@572..576 "self" + Dot@576..577 "." + Ident@577..583 "errors" + RParen@583..584 ")" + Newline@584..585 "\n" + WhiteSpace@585..589 " " + RBrace@589..590 "}" + Newline@590..591 "\n" + RBrace@591..592 "}" diff --git a/crates/parser2/test_files/syntax_node/items/type.snap b/crates/parser2/test_files/syntax_node/items/type.snap index 9a4a5c48ab..cf1e51d3ca 100644 --- a/crates/parser2/test_files/syntax_node/items/type.snap +++ b/crates/parser2/test_files/syntax_node/items/type.snap @@ -5,83 +5,86 @@ input_file: crates/parser2/test_files/syntax_node/items/type.fe --- Root@0..98 ItemList@0..98 - TypeAlias@0..18 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - TypeKw@4..8 "type" - WhiteSpace@8..9 " " - Ident@9..12 "Int" - WhiteSpace@12..13 " " - Eq@13..14 "=" - WhiteSpace@14..15 " " - PathType@15..18 - Path@15..18 - PathSegment@15..18 - Ident@15..18 "i32" - WhiteSpace@18..19 " " - Newline@19..21 "\n\n" - TypeAlias@21..54 - TypeKw@21..25 "type" - WhiteSpace@25..26 " " - Ident@26..32 "Result" - GenericParamList@32..35 - Lt@32..33 "<" - TypeGenericParam@33..34 - Ident@33..34 "T" - Gt@34..35 ">" - WhiteSpace@35..36 " " - Eq@36..37 "=" - WhiteSpace@37..38 " " - PathType@38..54 - Path@38..44 - PathSegment@38..44 - Ident@38..44 "Result" - GenericArgList@44..54 - Lt@44..45 "<" - TypeGenericArg@45..46 - PathType@45..46 - Path@45..46 - PathSegment@45..46 - Ident@45..46 "T" - Comma@46..47 "," - WhiteSpace@47..48 " " - TypeGenericArg@48..53 - PathType@48..53 - Path@48..53 - PathSegment@48..53 - Ident@48..53 "Error" - Gt@53..54 ">" - Newline@54..56 "\n\n" - TypeAlias@56..98 - TypeKw@56..60 "type" - WhiteSpace@60..61 " " - Ident@61..70 "WithBound" - GenericParamList@70..85 - Lt@70..71 "<" - TypeGenericParam@71..84 - Ident@71..72 "T" - TypeBoundList@72..84 - Colon@72..73 ":" - WhiteSpace@73..74 " " - TypeBound@74..84 - Path@74..84 - PathSegment@74..84 - Ident@74..84 "TraitBound" - Gt@84..85 ">" - WhiteSpace@85..86 " " - Eq@86..87 "=" - WhiteSpace@87..88 " " - PathType@88..98 - Path@88..95 - PathSegment@88..95 - Ident@88..95 "NoBound" - GenericArgList@95..98 - Lt@95..96 "<" - TypeGenericArg@96..97 - PathType@96..97 - Path@96..97 - PathSegment@96..97 - Ident@96..97 "T" - Gt@97..98 ">" + Item@0..21 + TypeAlias@0..18 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + TypeKw@4..8 "type" + WhiteSpace@8..9 " " + Ident@9..12 "Int" + WhiteSpace@12..13 " " + Eq@13..14 "=" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Newline@19..21 "\n\n" + Item@21..56 + TypeAlias@21..54 + TypeKw@21..25 "type" + WhiteSpace@25..26 " " + Ident@26..32 "Result" + GenericParamList@32..35 + Lt@32..33 "<" + TypeGenericParam@33..34 + Ident@33..34 "T" + Gt@34..35 ">" + WhiteSpace@35..36 " " + Eq@36..37 "=" + WhiteSpace@37..38 " " + PathType@38..54 + Path@38..44 + PathSegment@38..44 + Ident@38..44 "Result" + GenericArgList@44..54 + Lt@44..45 "<" + TypeGenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericArg@48..53 + PathType@48..53 + Path@48..53 + PathSegment@48..53 + Ident@48..53 "Error" + Gt@53..54 ">" + Newline@54..56 "\n\n" + Item@56..98 + TypeAlias@56..98 + TypeKw@56..60 "type" + WhiteSpace@60..61 " " + Ident@61..70 "WithBound" + GenericParamList@70..85 + Lt@70..71 "<" + TypeGenericParam@71..84 + Ident@71..72 "T" + TypeBoundList@72..84 + Colon@72..73 ":" + WhiteSpace@73..74 " " + TypeBound@74..84 + Path@74..84 + PathSegment@74..84 + Ident@74..84 "TraitBound" + Gt@84..85 ">" + WhiteSpace@85..86 " " + Eq@86..87 "=" + WhiteSpace@87..88 " " + PathType@88..98 + Path@88..95 + PathSegment@88..95 + Ident@88..95 "NoBound" + GenericArgList@95..98 + Lt@95..96 "<" + TypeGenericArg@96..97 + PathType@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "T" + Gt@97..98 ">" diff --git a/crates/parser2/test_files/syntax_node/items/use.snap b/crates/parser2/test_files/syntax_node/items/use.snap index 102ff6d924..c7ceedab25 100644 --- a/crates/parser2/test_files/syntax_node/items/use.snap +++ b/crates/parser2/test_files/syntax_node/items/use.snap @@ -5,282 +5,295 @@ input_file: crates/parser2/test_files/syntax_node/items/use.fe --- Root@0..308 ItemList@0..308 - Use@0..12 - UseKw@0..3 "use" - WhiteSpace@3..4 " " - UseTree@4..12 - UsePath@4..12 - UsePathSegment@4..7 - Ident@4..7 "Foo" - Colon2@7..9 "::" - UsePathSegment@9..12 - Ident@9..12 "Bar" - Newline@12..13 "\n" - Use@13..29 - ItemModifier@13..16 - PubKw@13..16 "pub" - WhiteSpace@16..17 " " - UseKw@17..20 "use" - WhiteSpace@20..21 " " - UseTree@21..29 - UsePath@21..29 - UsePathSegment@21..24 - Ident@21..24 "Foo" - Colon2@24..26 "::" - UsePathSegment@26..29 - Ident@26..29 "Bar" - Newline@29..30 "\n" - Use@30..40 - UseKw@30..33 "use" - WhiteSpace@33..34 " " - UseTree@34..40 - UsePath@34..40 - UsePathSegment@34..37 - Ident@34..37 "Foo" - Colon2@37..39 "::" - UsePathSegment@39..40 - Star@39..40 "*" - Newline@40..41 "\n" - Use@41..61 - UseKw@41..44 "use" - WhiteSpace@44..45 " " - UseTree@45..61 - UsePath@45..53 - UsePathSegment@45..48 - Ident@45..48 "Foo" - Colon2@48..50 "::" - UsePathSegment@50..53 - Ident@50..53 "Bar" - WhiteSpace@53..54 " " - UseTreeRename@54..61 - AsKw@54..56 "as" - WhiteSpace@56..57 " " - Ident@57..61 "Bar1" - Newline@61..62 "\n" - Use@62..81 - UseKw@62..65 "use" - WhiteSpace@65..66 " " - UseTree@66..81 - UsePath@66..76 - UsePathSegment@66..69 - Ident@66..69 "Foo" - Colon2@69..71 "::" - UsePathSegment@71..76 - Ident@71..76 "Trait" - WhiteSpace@76..77 " " - UseTreeRename@77..81 - AsKw@77..79 "as" - WhiteSpace@79..80 " " - Underscore@80..81 "_" - Newline@81..83 "\n\n" - Use@83..102 - UseKw@83..86 "use" - WhiteSpace@86..87 " " - UseTree@87..102 - UsePath@87..90 - UsePathSegment@87..90 - Ident@87..90 "Foo" - Colon2@90..92 "::" - UseTreeList@92..102 - LBrace@92..93 "{" - UseTree@93..96 - UsePath@93..96 - UsePathSegment@93..96 - Ident@93..96 "Foo" - Comma@96..97 "," - WhiteSpace@97..98 " " - UseTree@98..101 - UsePath@98..101 - UsePathSegment@98..101 - Ident@98..101 "Bar" - RBrace@101..102 "}" - Newline@102..103 "\n" - Use@103..123 - UseKw@103..106 "use" - WhiteSpace@106..107 " " - UseTree@107..123 - UsePath@107..110 - UsePathSegment@107..110 - Ident@107..110 "Foo" - Colon2@110..112 "::" - UseTreeList@112..123 - LBrace@112..113 "{" - UseTree@113..117 - UsePath@113..117 - UsePathSegment@113..117 - SelfKw@113..117 "self" - Comma@117..118 "," - WhiteSpace@118..119 " " - UseTree@119..122 - UsePath@119..122 - UsePathSegment@119..122 - Ident@119..122 "Bar" - RBrace@122..123 "}" - Newline@123..124 "\n" - Use@124..152 - UseKw@124..127 "use" - WhiteSpace@127..128 " " - UseTree@128..152 - UsePath@128..131 - UsePathSegment@128..131 - Ident@128..131 "Foo" - Colon2@131..133 "::" - UseTreeList@133..152 - LBrace@133..134 "{" - UseTree@134..138 - UsePath@134..138 - UsePathSegment@134..138 - SelfKw@134..138 "self" - Comma@138..139 "," - WhiteSpace@139..140 " " - UseTree@140..151 - UsePath@140..143 - UsePathSegment@140..143 - Ident@140..143 "Bar" - WhiteSpace@143..144 " " - UseTreeRename@144..151 - AsKw@144..146 "as" - WhiteSpace@146..147 " " - Ident@147..151 "Bar1" - RBrace@151..152 "}" - Newline@152..153 "\n" - Use@153..202 - UseKw@153..156 "use" - WhiteSpace@156..157 " " - UseTree@157..202 - UsePath@157..160 - UsePathSegment@157..160 - Ident@157..160 "Foo" - Colon2@160..162 "::" - UseTreeList@162..202 - LBrace@162..163 "{" - UseTree@163..176 - UsePath@163..167 - UsePathSegment@163..167 - SelfKw@163..167 "self" - WhiteSpace@167..168 " " - UseTreeRename@168..176 - AsKw@168..170 "as" - WhiteSpace@170..171 " " - Ident@171..176 "self_" - Comma@176..177 "," - WhiteSpace@177..178 " " - UseTree@178..198 - UsePath@178..181 - UsePathSegment@178..181 - Ident@178..181 "Bar" - Colon2@181..183 "::" - UseTreeList@183..198 - LBrace@183..184 "{" - UseTree@184..192 - UsePath@184..187 - UsePathSegment@184..187 - Ident@184..187 "Bar" - WhiteSpace@187..188 " " - UseTreeRename@188..192 - AsKw@188..190 "as" - WhiteSpace@190..191 " " - Underscore@191..192 "_" - Comma@192..193 "," - WhiteSpace@193..194 " " - UseTree@194..197 - UsePath@194..197 - UsePathSegment@194..197 - Ident@194..197 "Baz" - RBrace@197..198 "}" - Comma@198..199 "," - WhiteSpace@199..200 " " - UseTree@200..201 - UsePath@200..201 - UsePathSegment@200..201 - Star@200..201 "*" - RBrace@201..202 "}" - Newline@202..204 "\n\n" - Use@204..272 - UseKw@204..207 "use" - WhiteSpace@207..208 " " - UseTree@208..272 - UseTreeList@208..272 - LBrace@208..209 "{" - UseTree@209..225 - UsePath@209..217 - UsePathSegment@209..212 - Ident@209..212 "Foo" - Colon2@212..214 "::" - UsePathSegment@214..217 - Ident@214..217 "Bar" - WhiteSpace@217..218 " " - UseTreeRename@218..225 - AsKw@218..220 "as" - WhiteSpace@220..221 " " - Ident@221..225 "Bar1" - Comma@225..226 "," - WhiteSpace@226..227 " " - UseTree@227..243 - UsePath@227..235 - UsePathSegment@227..230 - Ident@227..230 "Bar" - Colon2@230..232 "::" - UsePathSegment@232..235 - Ident@232..235 "Bar" - WhiteSpace@235..236 " " - UseTreeRename@236..243 - AsKw@236..238 "as" - WhiteSpace@238..239 " " - Ident@239..243 "Bar2" - Comma@243..244 "," - WhiteSpace@244..245 " " - UseTree@245..261 - UsePath@245..253 - UsePathSegment@245..248 - Ident@245..248 "Baz" - Colon2@248..250 "::" - UsePathSegment@250..253 - Ident@250..253 "Bar" - WhiteSpace@253..254 " " - UseTreeRename@254..261 - AsKw@254..256 "as" - WhiteSpace@256..257 " " - Ident@257..261 "Bar3" - Comma@261..262 "," - WhiteSpace@262..263 " " - UseTree@263..271 - UsePath@263..271 - UsePathSegment@263..268 - Ident@263..268 "Trait" - Colon2@268..270 "::" - UsePathSegment@270..271 - Ident@270..271 "T" - RBrace@271..272 "}" - Newline@272..273 "\n" - Use@273..278 - UseKw@273..276 "use" - WhiteSpace@276..277 " " - UseTree@277..278 - UsePath@277..278 - UsePathSegment@277..278 - Star@277..278 "*" - Newline@278..280 "\n\n" - Use@280..292 - UseKw@280..283 "use" - WhiteSpace@283..284 " " - UseTree@284..292 - UsePath@284..292 - UsePathSegment@284..289 - SuperKw@284..289 "super" - Colon2@289..291 "::" - UsePathSegment@291..292 - Star@291..292 "*" - Newline@292..293 "\n" - Use@293..307 - UseKw@293..296 "use" - WhiteSpace@296..297 " " - UseTree@297..307 - UsePath@297..307 - UsePathSegment@297..302 - IngotKw@297..302 "ingot" - Colon2@302..304 "::" - UsePathSegment@304..307 - Ident@304..307 "Foo" - Newline@307..308 "\n" + Item@0..13 + Use@0..12 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..12 + UsePath@4..12 + UsePathSegment@4..7 + Ident@4..7 "Foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "Bar" + Newline@12..13 "\n" + Item@13..30 + Use@13..29 + ItemModifier@13..16 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UseKw@17..20 "use" + WhiteSpace@20..21 " " + UseTree@21..29 + UsePath@21..29 + UsePathSegment@21..24 + Ident@21..24 "Foo" + Colon2@24..26 "::" + UsePathSegment@26..29 + Ident@26..29 "Bar" + Newline@29..30 "\n" + Item@30..41 + Use@30..40 + UseKw@30..33 "use" + WhiteSpace@33..34 " " + UseTree@34..40 + UsePath@34..40 + UsePathSegment@34..37 + Ident@34..37 "Foo" + Colon2@37..39 "::" + UsePathSegment@39..40 + Star@39..40 "*" + Newline@40..41 "\n" + Item@41..62 + Use@41..61 + UseKw@41..44 "use" + WhiteSpace@44..45 " " + UseTree@45..61 + UsePath@45..53 + UsePathSegment@45..48 + Ident@45..48 "Foo" + Colon2@48..50 "::" + UsePathSegment@50..53 + Ident@50..53 "Bar" + WhiteSpace@53..54 " " + UseTreeRename@54..61 + AsKw@54..56 "as" + WhiteSpace@56..57 " " + Ident@57..61 "Bar1" + Newline@61..62 "\n" + Item@62..83 + Use@62..81 + UseKw@62..65 "use" + WhiteSpace@65..66 " " + UseTree@66..81 + UsePath@66..76 + UsePathSegment@66..69 + Ident@66..69 "Foo" + Colon2@69..71 "::" + UsePathSegment@71..76 + Ident@71..76 "Trait" + WhiteSpace@76..77 " " + UseTreeRename@77..81 + AsKw@77..79 "as" + WhiteSpace@79..80 " " + Underscore@80..81 "_" + Newline@81..83 "\n\n" + Item@83..103 + Use@83..102 + UseKw@83..86 "use" + WhiteSpace@86..87 " " + UseTree@87..102 + UsePath@87..90 + UsePathSegment@87..90 + Ident@87..90 "Foo" + Colon2@90..92 "::" + UseTreeList@92..102 + LBrace@92..93 "{" + UseTree@93..96 + UsePath@93..96 + UsePathSegment@93..96 + Ident@93..96 "Foo" + Comma@96..97 "," + WhiteSpace@97..98 " " + UseTree@98..101 + UsePath@98..101 + UsePathSegment@98..101 + Ident@98..101 "Bar" + RBrace@101..102 "}" + Newline@102..103 "\n" + Item@103..124 + Use@103..123 + UseKw@103..106 "use" + WhiteSpace@106..107 " " + UseTree@107..123 + UsePath@107..110 + UsePathSegment@107..110 + Ident@107..110 "Foo" + Colon2@110..112 "::" + UseTreeList@112..123 + LBrace@112..113 "{" + UseTree@113..117 + UsePath@113..117 + UsePathSegment@113..117 + SelfKw@113..117 "self" + Comma@117..118 "," + WhiteSpace@118..119 " " + UseTree@119..122 + UsePath@119..122 + UsePathSegment@119..122 + Ident@119..122 "Bar" + RBrace@122..123 "}" + Newline@123..124 "\n" + Item@124..153 + Use@124..152 + UseKw@124..127 "use" + WhiteSpace@127..128 " " + UseTree@128..152 + UsePath@128..131 + UsePathSegment@128..131 + Ident@128..131 "Foo" + Colon2@131..133 "::" + UseTreeList@133..152 + LBrace@133..134 "{" + UseTree@134..138 + UsePath@134..138 + UsePathSegment@134..138 + SelfKw@134..138 "self" + Comma@138..139 "," + WhiteSpace@139..140 " " + UseTree@140..151 + UsePath@140..143 + UsePathSegment@140..143 + Ident@140..143 "Bar" + WhiteSpace@143..144 " " + UseTreeRename@144..151 + AsKw@144..146 "as" + WhiteSpace@146..147 " " + Ident@147..151 "Bar1" + RBrace@151..152 "}" + Newline@152..153 "\n" + Item@153..204 + Use@153..202 + UseKw@153..156 "use" + WhiteSpace@156..157 " " + UseTree@157..202 + UsePath@157..160 + UsePathSegment@157..160 + Ident@157..160 "Foo" + Colon2@160..162 "::" + UseTreeList@162..202 + LBrace@162..163 "{" + UseTree@163..176 + UsePath@163..167 + UsePathSegment@163..167 + SelfKw@163..167 "self" + WhiteSpace@167..168 " " + UseTreeRename@168..176 + AsKw@168..170 "as" + WhiteSpace@170..171 " " + Ident@171..176 "self_" + Comma@176..177 "," + WhiteSpace@177..178 " " + UseTree@178..198 + UsePath@178..181 + UsePathSegment@178..181 + Ident@178..181 "Bar" + Colon2@181..183 "::" + UseTreeList@183..198 + LBrace@183..184 "{" + UseTree@184..192 + UsePath@184..187 + UsePathSegment@184..187 + Ident@184..187 "Bar" + WhiteSpace@187..188 " " + UseTreeRename@188..192 + AsKw@188..190 "as" + WhiteSpace@190..191 " " + Underscore@191..192 "_" + Comma@192..193 "," + WhiteSpace@193..194 " " + UseTree@194..197 + UsePath@194..197 + UsePathSegment@194..197 + Ident@194..197 "Baz" + RBrace@197..198 "}" + Comma@198..199 "," + WhiteSpace@199..200 " " + UseTree@200..201 + UsePath@200..201 + UsePathSegment@200..201 + Star@200..201 "*" + RBrace@201..202 "}" + Newline@202..204 "\n\n" + Item@204..273 + Use@204..272 + UseKw@204..207 "use" + WhiteSpace@207..208 " " + UseTree@208..272 + UseTreeList@208..272 + LBrace@208..209 "{" + UseTree@209..225 + UsePath@209..217 + UsePathSegment@209..212 + Ident@209..212 "Foo" + Colon2@212..214 "::" + UsePathSegment@214..217 + Ident@214..217 "Bar" + WhiteSpace@217..218 " " + UseTreeRename@218..225 + AsKw@218..220 "as" + WhiteSpace@220..221 " " + Ident@221..225 "Bar1" + Comma@225..226 "," + WhiteSpace@226..227 " " + UseTree@227..243 + UsePath@227..235 + UsePathSegment@227..230 + Ident@227..230 "Bar" + Colon2@230..232 "::" + UsePathSegment@232..235 + Ident@232..235 "Bar" + WhiteSpace@235..236 " " + UseTreeRename@236..243 + AsKw@236..238 "as" + WhiteSpace@238..239 " " + Ident@239..243 "Bar2" + Comma@243..244 "," + WhiteSpace@244..245 " " + UseTree@245..261 + UsePath@245..253 + UsePathSegment@245..248 + Ident@245..248 "Baz" + Colon2@248..250 "::" + UsePathSegment@250..253 + Ident@250..253 "Bar" + WhiteSpace@253..254 " " + UseTreeRename@254..261 + AsKw@254..256 "as" + WhiteSpace@256..257 " " + Ident@257..261 "Bar3" + Comma@261..262 "," + WhiteSpace@262..263 " " + UseTree@263..271 + UsePath@263..271 + UsePathSegment@263..268 + Ident@263..268 "Trait" + Colon2@268..270 "::" + UsePathSegment@270..271 + Ident@270..271 "T" + RBrace@271..272 "}" + Newline@272..273 "\n" + Item@273..280 + Use@273..278 + UseKw@273..276 "use" + WhiteSpace@276..277 " " + UseTree@277..278 + UsePath@277..278 + UsePathSegment@277..278 + Star@277..278 "*" + Newline@278..280 "\n\n" + Item@280..293 + Use@280..292 + UseKw@280..283 "use" + WhiteSpace@283..284 " " + UseTree@284..292 + UsePath@284..292 + UsePathSegment@284..289 + SuperKw@284..289 "super" + Colon2@289..291 "::" + UsePathSegment@291..292 + Star@291..292 "*" + Newline@292..293 "\n" + Item@293..308 + Use@293..307 + UseKw@293..296 "use" + WhiteSpace@296..297 " " + UseTree@297..307 + UsePath@297..307 + UsePathSegment@297..302 + IngotKw@297..302 "ingot" + Colon2@302..304 "::" + UsePathSegment@304..307 + Ident@304..307 "Foo" + Newline@307..308 "\n" diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap index 2de2602739..9b8e1c8aa7 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.snap +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -5,75 +5,76 @@ input_file: crates/parser2/test_files/syntax_node/structs/attr.fe --- Root@0..170 ItemList@0..170 - Struct@0..170 - AttrList@0..56 - DocCommentAttr@0..15 - DocComment@0..15 "/// DocComment1" - Newline@15..16 "\n" - Attr@16..21 - Pound@16..17 "#" - Ident@17..21 "attr" - Newline@21..22 "\n" - Comment@22..39 "// normal comment" - Newline@39..40 "\n" - DocCommentAttr@40..55 - DocComment@40..55 "/// DocComment2" - Newline@55..56 "\n" - ItemModifier@56..59 - PubKw@56..59 "pub" - WhiteSpace@59..60 " " - StructKw@60..66 "struct" - WhiteSpace@66..67 " " - Ident@67..77 "StructAttr" - WhiteSpace@77..78 " " - RecordFieldDefList@78..170 - LBrace@78..79 "{" - Newline@79..80 "\n" - WhiteSpace@80..84 " " - RecordFieldDef@84..115 - AttrList@84..100 - DocCommentAttr@84..99 - DocComment@84..99 "/// This is `x`" - Newline@99..100 "\n" - WhiteSpace@100..104 " " - Ident@104..105 "x" - Colon@105..106 ":" - WhiteSpace@106..107 " " - PathType@107..115 - Path@107..115 - PathSegment@107..110 - Ident@107..110 "foo" - Colon2@110..112 "::" - PathSegment@112..115 - Ident@112..115 "Bar" - Newline@115..116 "\n" - WhiteSpace@116..120 " " - RecordFieldDef@120..168 - AttrList@120..158 - DocCommentAttr@120..135 - DocComment@120..135 "/// This is `y`" - Newline@135..136 "\n" - WhiteSpace@136..140 " " - Attr@140..157 - Pound@140..141 "#" - Ident@141..144 "cfg" - AttrArgList@144..157 - LParen@144..145 "(" - AttrArg@145..156 - Ident@145..151 "target" - Colon@151..152 ":" - WhiteSpace@152..153 " " - Ident@153..156 "evm" - RParen@156..157 ")" - Newline@157..158 "\n" - WhiteSpace@158..162 " " - Ident@162..163 "y" - Colon@163..164 ":" - WhiteSpace@164..165 " " - PathType@165..168 - Path@165..168 - PathSegment@165..168 - Ident@165..168 "i32" - Newline@168..169 "\n" - RBrace@169..170 "}" + Item@0..170 + Struct@0..170 + AttrList@0..56 + DocCommentAttr@0..15 + DocComment@0..15 "/// DocComment1" + Newline@15..16 "\n" + Attr@16..21 + Pound@16..17 "#" + Ident@17..21 "attr" + Newline@21..22 "\n" + Comment@22..39 "// normal comment" + Newline@39..40 "\n" + DocCommentAttr@40..55 + DocComment@40..55 "/// DocComment2" + Newline@55..56 "\n" + ItemModifier@56..59 + PubKw@56..59 "pub" + WhiteSpace@59..60 " " + StructKw@60..66 "struct" + WhiteSpace@66..67 " " + Ident@67..77 "StructAttr" + WhiteSpace@77..78 " " + RecordFieldDefList@78..170 + LBrace@78..79 "{" + Newline@79..80 "\n" + WhiteSpace@80..84 " " + RecordFieldDef@84..115 + AttrList@84..100 + DocCommentAttr@84..99 + DocComment@84..99 "/// This is `x`" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + Ident@104..105 "x" + Colon@105..106 ":" + WhiteSpace@106..107 " " + PathType@107..115 + Path@107..115 + PathSegment@107..110 + Ident@107..110 "foo" + Colon2@110..112 "::" + PathSegment@112..115 + Ident@112..115 "Bar" + Newline@115..116 "\n" + WhiteSpace@116..120 " " + RecordFieldDef@120..168 + AttrList@120..158 + DocCommentAttr@120..135 + DocComment@120..135 "/// This is `y`" + Newline@135..136 "\n" + WhiteSpace@136..140 " " + Attr@140..157 + Pound@140..141 "#" + Ident@141..144 "cfg" + AttrArgList@144..157 + LParen@144..145 "(" + AttrArg@145..156 + Ident@145..151 "target" + Colon@151..152 ":" + WhiteSpace@152..153 " " + Ident@153..156 "evm" + RParen@156..157 ")" + Newline@157..158 "\n" + WhiteSpace@158..162 " " + Ident@162..163 "y" + Colon@163..164 ":" + WhiteSpace@164..165 " " + PathType@165..168 + Path@165..168 + PathSegment@165..168 + Ident@165..168 "i32" + Newline@168..169 "\n" + RBrace@169..170 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/empty.snap b/crates/parser2/test_files/syntax_node/structs/empty.snap index fa786daf5c..a633350661 100644 --- a/crates/parser2/test_files/syntax_node/structs/empty.snap +++ b/crates/parser2/test_files/syntax_node/structs/empty.snap @@ -1,19 +1,21 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/empty.fe --- Root@0..26 ItemList@0..26 - Struct@0..26 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - StructKw@4..10 "struct" - WhiteSpace@10..11 " " - Ident@11..22 "EmptyStruct" - WhiteSpace@22..23 " " - RecordFieldDefList@23..26 - LBrace@23..24 "{" - Newline@24..25 "\n" - RBrace@25..26 "}" + Item@0..26 + Struct@0..26 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..22 "EmptyStruct" + WhiteSpace@22..23 " " + RecordFieldDefList@23..26 + LBrace@23..24 "{" + Newline@24..25 "\n" + RBrace@25..26 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index f414ac2ebe..100902ec57 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -5,454 +5,458 @@ input_file: crates/parser2/test_files/syntax_node/structs/generics.fe --- Root@0..553 ItemList@0..553 - Struct@0..74 - ItemModifier@0..3 - PubKw@0..3 "pub" - WhiteSpace@3..4 " " - StructKw@4..10 "struct" - WhiteSpace@10..11 " " - Ident@11..33 "StructWithGenericParam" - GenericParamList@33..42 - Lt@33..34 "<" - TypeGenericParam@34..35 - Ident@34..35 "S" - Comma@35..36 "," - WhiteSpace@36..37 " " - TypeGenericParam@37..38 - Ident@37..38 "T" - Comma@38..39 "," - WhiteSpace@39..40 " " - TypeGenericParam@40..41 - Ident@40..41 "U" - Gt@41..42 ">" - WhiteSpace@42..43 " " - Newline@43..44 "\n" - RecordFieldDefList@44..74 - LBrace@44..45 "{" - Newline@45..46 "\n" - WhiteSpace@46..50 " " - RecordFieldDef@50..54 - Ident@50..51 "x" - Colon@51..52 ":" - WhiteSpace@52..53 " " - PathType@53..54 - Path@53..54 - PathSegment@53..54 - Ident@53..54 "S" - Newline@54..55 "\n" - WhiteSpace@55..59 " " - RecordFieldDef@59..63 - Ident@59..60 "y" - Colon@60..61 ":" - WhiteSpace@61..62 " " - PathType@62..63 - Path@62..63 - PathSegment@62..63 - Ident@62..63 "T" - Newline@63..64 "\n" - WhiteSpace@64..68 " " - RecordFieldDef@68..72 - Ident@68..69 "z" - Colon@69..70 ":" - WhiteSpace@70..71 " " - PathType@71..72 - Path@71..72 - PathSegment@71..72 - Ident@71..72 "U" - Newline@72..73 "\n" - RBrace@73..74 "}" - Newline@74..75 "\n" + Item@0..75 + Struct@0..74 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..33 "StructWithGenericParam" + GenericParamList@33..42 + Lt@33..34 "<" + TypeGenericParam@34..35 + Ident@34..35 "S" + Comma@35..36 "," + WhiteSpace@36..37 " " + TypeGenericParam@37..38 + Ident@37..38 "T" + Comma@38..39 "," + WhiteSpace@39..40 " " + TypeGenericParam@40..41 + Ident@40..41 "U" + Gt@41..42 ">" + WhiteSpace@42..43 " " + Newline@43..44 "\n" + RecordFieldDefList@44..74 + LBrace@44..45 "{" + Newline@45..46 "\n" + WhiteSpace@46..50 " " + RecordFieldDef@50..54 + Ident@50..51 "x" + Colon@51..52 ":" + WhiteSpace@52..53 " " + PathType@53..54 + Path@53..54 + PathSegment@53..54 + Ident@53..54 "S" + Newline@54..55 "\n" + WhiteSpace@55..59 " " + RecordFieldDef@59..63 + Ident@59..60 "y" + Colon@60..61 ":" + WhiteSpace@61..62 " " + PathType@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "T" + Newline@63..64 "\n" + WhiteSpace@64..68 " " + RecordFieldDef@68..72 + Ident@68..69 "z" + Colon@69..70 ":" + WhiteSpace@70..71 " " + PathType@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "U" + Newline@72..73 "\n" + RBrace@73..74 "}" + Newline@74..75 "\n" WhiteSpace@75..76 " " Newline@76..77 "\n" - Struct@77..186 - ItemModifier@77..80 - PubKw@77..80 "pub" - WhiteSpace@80..81 " " - StructKw@81..87 "struct" - WhiteSpace@87..88 " " - Ident@88..111 "StructWithGenericParam2" - GenericParamList@111..146 - Lt@111..112 "<" - Newline@112..113 "\n" - WhiteSpace@113..117 " " - TypeGenericParam@117..118 - Ident@117..118 "S" - Comma@118..119 "," - Newline@119..120 "\n" - WhiteSpace@120..124 " " - TypeGenericParam@124..137 - Ident@124..125 "T" - TypeBoundList@125..137 - Colon@125..126 ":" - WhiteSpace@126..127 " " - TypeBound@127..137 - Path@127..137 - PathSegment@127..130 - Ident@127..130 "foo" - Colon2@130..132 "::" - PathSegment@132..137 - Ident@132..137 "Trait" - Comma@137..138 "," - Newline@138..139 "\n" - WhiteSpace@139..143 " " - TypeGenericParam@143..144 - Ident@143..144 "U" - Newline@144..145 "\n" - Gt@145..146 ">" - WhiteSpace@146..147 " " - RecordFieldDefList@147..186 - LBrace@147..148 "{" - Newline@148..149 "\n" - WhiteSpace@149..153 " " - RecordFieldDef@153..166 - Ident@153..154 "x" - Colon@154..155 ":" - WhiteSpace@155..156 " " - PtrType@156..166 - Star@156..157 "*" - TupleType@157..166 - LParen@157..158 "(" - PathType@158..159 - Path@158..159 - PathSegment@158..159 - Ident@158..159 "S" - Comma@159..160 "," - WhiteSpace@160..161 " " - PtrType@161..165 - Star@161..162 "*" - PathType@162..165 - Path@162..165 - PathSegment@162..165 - Ident@162..165 "i32" - RParen@165..166 ")" - Newline@166..167 "\n" - WhiteSpace@167..171 " " - RecordFieldDef@171..175 - Ident@171..172 "y" - Colon@172..173 ":" - WhiteSpace@173..174 " " - PathType@174..175 - Path@174..175 - PathSegment@174..175 - Ident@174..175 "T" - Newline@175..176 "\n" - WhiteSpace@176..180 " " - RecordFieldDef@180..184 - Ident@180..181 "z" - Colon@181..182 ":" - WhiteSpace@182..183 " " - PathType@183..184 - Path@183..184 - PathSegment@183..184 - Ident@183..184 "U" - Newline@184..185 "\n" - RBrace@185..186 "}" - Newline@186..188 "\n\n" - Struct@188..404 - ItemModifier@188..191 - PubKw@188..191 "pub" - WhiteSpace@191..192 " " - StructKw@192..198 "struct" - WhiteSpace@198..199 " " - Ident@199..222 "StructWithGenericParam3" - GenericParamList@222..282 - Lt@222..223 "<" - Newline@223..224 "\n" - WhiteSpace@224..228 " " - TypeGenericParam@228..254 - Ident@228..229 "S" - TypeBoundList@229..254 - Colon@229..230 ":" - WhiteSpace@230..231 " " - TypeBound@231..241 - Path@231..241 - PathSegment@231..234 - Ident@231..234 "foo" - Colon2@234..236 "::" - PathSegment@236..241 - Ident@236..241 "Trait" - WhiteSpace@241..242 " " - Plus@242..243 "+" - WhiteSpace@243..244 " " - TypeBound@244..254 - Path@244..254 - PathSegment@244..247 - Ident@244..247 "bar" - Colon2@247..249 "::" - PathSegment@249..254 - Ident@249..254 "Trait" - Comma@254..255 "," - Newline@255..256 "\n" - WhiteSpace@256..260 " " - TypeGenericParam@260..261 - Ident@260..261 "T" - Comma@261..262 "," - Newline@262..263 "\n" - WhiteSpace@263..267 " " - TypeGenericParam@267..280 - Ident@267..268 "U" - TypeBoundList@268..280 - Colon@268..269 ":" - WhiteSpace@269..270 " " - TypeBound@270..280 - Path@270..280 - PathSegment@270..273 - Ident@270..273 "bar" - Colon2@273..275 "::" - PathSegment@275..280 - Ident@275..280 "Trait" - Newline@280..281 "\n" - Gt@281..282 ">" - WhiteSpace@282..283 " " - WhereClause@283..374 - WhereKw@283..288 "where" - Newline@288..289 "\n" - WhiteSpace@289..293 " " - WherePredicate@293..312 - PathType@293..294 - Path@293..294 - PathSegment@293..294 - Ident@293..294 "T" - TypeBoundList@294..311 - Colon@294..295 ":" - WhiteSpace@295..296 " " - TypeBound@296..302 - Path@296..302 - PathSegment@296..302 - Ident@296..302 "Trait1" - WhiteSpace@302..303 " " - Plus@303..304 "+" - WhiteSpace@304..305 " " - TypeBound@305..311 - Path@305..311 - PathSegment@305..311 - Ident@305..311 "Trait2" - Newline@311..312 "\n" - WhiteSpace@312..316 " " - WherePredicate@316..343 - PathType@316..325 - Path@316..322 - PathSegment@316..322 - Ident@316..322 "Option" - GenericArgList@322..325 - Lt@322..323 "<" - TypeGenericArg@323..324 - PathType@323..324 - Path@323..324 - PathSegment@323..324 - Ident@323..324 "T" - Gt@324..325 ">" - TypeBoundList@325..342 - Colon@325..326 ":" - WhiteSpace@326..327 " " - TypeBound@327..333 - Path@327..333 - PathSegment@327..333 - Ident@327..333 "Trait1" - WhiteSpace@333..334 " " - Plus@334..335 "+" - WhiteSpace@335..336 " " - TypeBound@336..342 - Path@336..342 - PathSegment@336..342 - Ident@336..342 "Trait2" - Newline@342..343 "\n" - WhiteSpace@343..347 " " - WherePredicate@347..374 - PathType@347..356 - Path@347..353 - PathSegment@347..353 - Ident@347..353 "Result" - GenericArgList@353..356 - Lt@353..354 "<" - TypeGenericArg@354..355 - PathType@354..355 - Path@354..355 - PathSegment@354..355 - Ident@354..355 "U" - Gt@355..356 ">" - TypeBoundList@356..373 - Colon@356..357 ":" - WhiteSpace@357..358 " " - TypeBound@358..364 - Path@358..364 - PathSegment@358..364 - Ident@358..364 "Trait2" - WhiteSpace@364..365 " " - Plus@365..366 "+" - WhiteSpace@366..367 " " - TypeBound@367..373 - Path@367..373 - PathSegment@367..373 - Ident@367..373 "Trait3" - Newline@373..374 "\n" - RecordFieldDefList@374..404 - LBrace@374..375 "{" - Newline@375..376 "\n" - WhiteSpace@376..380 " " - RecordFieldDef@380..384 - Ident@380..381 "x" - Colon@381..382 ":" - WhiteSpace@382..383 " " - PathType@383..384 - Path@383..384 - PathSegment@383..384 - Ident@383..384 "S" - Newline@384..385 "\n" - WhiteSpace@385..389 " " - RecordFieldDef@389..393 - Ident@389..390 "y" - Colon@390..391 ":" - WhiteSpace@391..392 " " - PathType@392..393 - Path@392..393 - PathSegment@392..393 - Ident@392..393 "T" - Newline@393..394 "\n" - WhiteSpace@394..398 " " - RecordFieldDef@398..402 - Ident@398..399 "z" - Colon@399..400 ":" - WhiteSpace@400..401 " " - PathType@401..402 - Path@401..402 - PathSegment@401..402 - Ident@401..402 "U" - Newline@402..403 "\n" - RBrace@403..404 "}" - Newline@404..406 "\n\n" - Struct@406..553 - ItemModifier@406..409 - PubKw@406..409 "pub" - WhiteSpace@409..410 " " - StructKw@410..416 "struct" - WhiteSpace@416..417 " " - Ident@417..422 "MyArr" - GenericParamList@422..459 - Lt@422..423 "<" - TypeGenericParam@423..439 - Ident@423..424 "T" - TypeBoundList@424..439 - Colon@424..425 ":" - WhiteSpace@425..426 " " - TypeBound@426..439 - Path@426..439 - PathSegment@426..429 - Ident@426..429 "std" - Colon2@429..431 "::" - PathSegment@431..434 - Ident@431..434 "ops" - Colon2@434..436 "::" - PathSegment@436..439 - Ident@436..439 "Add" - Comma@439..440 "," - WhiteSpace@440..441 " " - TypeGenericParam@441..442 - Ident@441..442 "U" - Comma@442..443 "," - WhiteSpace@443..444 " " - ConstGenericParam@444..458 - ConstKw@444..449 "const" - WhiteSpace@449..450 " " - Ident@450..451 "N" - Colon@451..452 ":" - WhiteSpace@452..453 " " - PathType@453..458 - Path@453..458 - PathSegment@453..458 - Ident@453..458 "usize" - Gt@458..459 ">" - WhiteSpace@459..460 " " - Newline@460..461 "\n" - WhiteSpace@461..465 " " - WhereClause@465..509 - WhereKw@465..470 "where" - Newline@470..471 "\n" - WhiteSpace@471..479 " " - WherePredicate@479..509 - TupleType@479..485 - LParen@479..480 "(" - PathType@480..481 - Path@480..481 - PathSegment@480..481 - Ident@480..481 "T" - Comma@481..482 "," - WhiteSpace@482..483 " " - PathType@483..484 - Path@483..484 - PathSegment@483..484 - Ident@483..484 "U" - RParen@484..485 ")" - TypeBoundList@485..508 - Colon@485..486 ":" - WhiteSpace@486..487 " " - TypeBound@487..492 - Path@487..492 - PathSegment@487..492 - Ident@487..492 "Trait" - WhiteSpace@492..493 " " - Plus@493..494 "+" - WhiteSpace@494..495 " " - TypeBound@495..508 - Path@495..500 - PathSegment@495..500 - Ident@495..500 "Trait" - GenericArgList@500..508 - Lt@500..501 "<" - TypeGenericArg@501..504 - PathType@501..504 - Path@501..504 - PathSegment@501..504 - Ident@501..504 "i32" - Comma@504..505 "," - WhiteSpace@505..506 " " - TypeGenericArg@506..507 - PathType@506..507 - Path@506..507 - PathSegment@506..507 - Ident@506..507 "Y" - Gt@507..508 ">" - Newline@508..509 "\n" - RecordFieldDefList@509..553 - LBrace@509..510 "{" - Newline@510..511 "\n" - WhiteSpace@511..515 " " - RecordFieldDef@515..530 - Ident@515..522 "__inner" - Colon@522..523 ":" - WhiteSpace@523..524 " " - ArrayType@524..530 - LBracket@524..525 "[" - PathType@525..526 - Path@525..526 - PathSegment@525..526 - Ident@525..526 "T" - SemiColon@526..527 ";" - WhiteSpace@527..528 " " - PathExpr@528..529 - Path@528..529 - PathSegment@528..529 - Ident@528..529 "N" - RBracket@529..530 "]" - Newline@530..531 "\n" - WhiteSpace@531..535 " " - RecordFieldDef@535..551 - Ident@535..543 "__inner2" - Colon@543..544 ":" - WhiteSpace@544..545 " " - TupleType@545..551 - LParen@545..546 "(" - PathType@546..547 - Path@546..547 - PathSegment@546..547 - Ident@546..547 "T" - Comma@547..548 "," - WhiteSpace@548..549 " " - PathType@549..550 - Path@549..550 - PathSegment@549..550 - Ident@549..550 "U" - RParen@550..551 ")" - Newline@551..552 "\n" - RBrace@552..553 "}" + Item@77..188 + Struct@77..186 + ItemModifier@77..80 + PubKw@77..80 "pub" + WhiteSpace@80..81 " " + StructKw@81..87 "struct" + WhiteSpace@87..88 " " + Ident@88..111 "StructWithGenericParam2" + GenericParamList@111..146 + Lt@111..112 "<" + Newline@112..113 "\n" + WhiteSpace@113..117 " " + TypeGenericParam@117..118 + Ident@117..118 "S" + Comma@118..119 "," + Newline@119..120 "\n" + WhiteSpace@120..124 " " + TypeGenericParam@124..137 + Ident@124..125 "T" + TypeBoundList@125..137 + Colon@125..126 ":" + WhiteSpace@126..127 " " + TypeBound@127..137 + Path@127..137 + PathSegment@127..130 + Ident@127..130 "foo" + Colon2@130..132 "::" + PathSegment@132..137 + Ident@132..137 "Trait" + Comma@137..138 "," + Newline@138..139 "\n" + WhiteSpace@139..143 " " + TypeGenericParam@143..144 + Ident@143..144 "U" + Newline@144..145 "\n" + Gt@145..146 ">" + WhiteSpace@146..147 " " + RecordFieldDefList@147..186 + LBrace@147..148 "{" + Newline@148..149 "\n" + WhiteSpace@149..153 " " + RecordFieldDef@153..166 + Ident@153..154 "x" + Colon@154..155 ":" + WhiteSpace@155..156 " " + PtrType@156..166 + Star@156..157 "*" + TupleType@157..166 + LParen@157..158 "(" + PathType@158..159 + Path@158..159 + PathSegment@158..159 + Ident@158..159 "S" + Comma@159..160 "," + WhiteSpace@160..161 " " + PtrType@161..165 + Star@161..162 "*" + PathType@162..165 + Path@162..165 + PathSegment@162..165 + Ident@162..165 "i32" + RParen@165..166 ")" + Newline@166..167 "\n" + WhiteSpace@167..171 " " + RecordFieldDef@171..175 + Ident@171..172 "y" + Colon@172..173 ":" + WhiteSpace@173..174 " " + PathType@174..175 + Path@174..175 + PathSegment@174..175 + Ident@174..175 "T" + Newline@175..176 "\n" + WhiteSpace@176..180 " " + RecordFieldDef@180..184 + Ident@180..181 "z" + Colon@181..182 ":" + WhiteSpace@182..183 " " + PathType@183..184 + Path@183..184 + PathSegment@183..184 + Ident@183..184 "U" + Newline@184..185 "\n" + RBrace@185..186 "}" + Newline@186..188 "\n\n" + Item@188..406 + Struct@188..404 + ItemModifier@188..191 + PubKw@188..191 "pub" + WhiteSpace@191..192 " " + StructKw@192..198 "struct" + WhiteSpace@198..199 " " + Ident@199..222 "StructWithGenericParam3" + GenericParamList@222..282 + Lt@222..223 "<" + Newline@223..224 "\n" + WhiteSpace@224..228 " " + TypeGenericParam@228..254 + Ident@228..229 "S" + TypeBoundList@229..254 + Colon@229..230 ":" + WhiteSpace@230..231 " " + TypeBound@231..241 + Path@231..241 + PathSegment@231..234 + Ident@231..234 "foo" + Colon2@234..236 "::" + PathSegment@236..241 + Ident@236..241 "Trait" + WhiteSpace@241..242 " " + Plus@242..243 "+" + WhiteSpace@243..244 " " + TypeBound@244..254 + Path@244..254 + PathSegment@244..247 + Ident@244..247 "bar" + Colon2@247..249 "::" + PathSegment@249..254 + Ident@249..254 "Trait" + Comma@254..255 "," + Newline@255..256 "\n" + WhiteSpace@256..260 " " + TypeGenericParam@260..261 + Ident@260..261 "T" + Comma@261..262 "," + Newline@262..263 "\n" + WhiteSpace@263..267 " " + TypeGenericParam@267..280 + Ident@267..268 "U" + TypeBoundList@268..280 + Colon@268..269 ":" + WhiteSpace@269..270 " " + TypeBound@270..280 + Path@270..280 + PathSegment@270..273 + Ident@270..273 "bar" + Colon2@273..275 "::" + PathSegment@275..280 + Ident@275..280 "Trait" + Newline@280..281 "\n" + Gt@281..282 ">" + WhiteSpace@282..283 " " + WhereClause@283..374 + WhereKw@283..288 "where" + Newline@288..289 "\n" + WhiteSpace@289..293 " " + WherePredicate@293..312 + PathType@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "T" + TypeBoundList@294..311 + Colon@294..295 ":" + WhiteSpace@295..296 " " + TypeBound@296..302 + Path@296..302 + PathSegment@296..302 + Ident@296..302 "Trait1" + WhiteSpace@302..303 " " + Plus@303..304 "+" + WhiteSpace@304..305 " " + TypeBound@305..311 + Path@305..311 + PathSegment@305..311 + Ident@305..311 "Trait2" + Newline@311..312 "\n" + WhiteSpace@312..316 " " + WherePredicate@316..343 + PathType@316..325 + Path@316..322 + PathSegment@316..322 + Ident@316..322 "Option" + GenericArgList@322..325 + Lt@322..323 "<" + TypeGenericArg@323..324 + PathType@323..324 + Path@323..324 + PathSegment@323..324 + Ident@323..324 "T" + Gt@324..325 ">" + TypeBoundList@325..342 + Colon@325..326 ":" + WhiteSpace@326..327 " " + TypeBound@327..333 + Path@327..333 + PathSegment@327..333 + Ident@327..333 "Trait1" + WhiteSpace@333..334 " " + Plus@334..335 "+" + WhiteSpace@335..336 " " + TypeBound@336..342 + Path@336..342 + PathSegment@336..342 + Ident@336..342 "Trait2" + Newline@342..343 "\n" + WhiteSpace@343..347 " " + WherePredicate@347..374 + PathType@347..356 + Path@347..353 + PathSegment@347..353 + Ident@347..353 "Result" + GenericArgList@353..356 + Lt@353..354 "<" + TypeGenericArg@354..355 + PathType@354..355 + Path@354..355 + PathSegment@354..355 + Ident@354..355 "U" + Gt@355..356 ">" + TypeBoundList@356..373 + Colon@356..357 ":" + WhiteSpace@357..358 " " + TypeBound@358..364 + Path@358..364 + PathSegment@358..364 + Ident@358..364 "Trait2" + WhiteSpace@364..365 " " + Plus@365..366 "+" + WhiteSpace@366..367 " " + TypeBound@367..373 + Path@367..373 + PathSegment@367..373 + Ident@367..373 "Trait3" + Newline@373..374 "\n" + RecordFieldDefList@374..404 + LBrace@374..375 "{" + Newline@375..376 "\n" + WhiteSpace@376..380 " " + RecordFieldDef@380..384 + Ident@380..381 "x" + Colon@381..382 ":" + WhiteSpace@382..383 " " + PathType@383..384 + Path@383..384 + PathSegment@383..384 + Ident@383..384 "S" + Newline@384..385 "\n" + WhiteSpace@385..389 " " + RecordFieldDef@389..393 + Ident@389..390 "y" + Colon@390..391 ":" + WhiteSpace@391..392 " " + PathType@392..393 + Path@392..393 + PathSegment@392..393 + Ident@392..393 "T" + Newline@393..394 "\n" + WhiteSpace@394..398 " " + RecordFieldDef@398..402 + Ident@398..399 "z" + Colon@399..400 ":" + WhiteSpace@400..401 " " + PathType@401..402 + Path@401..402 + PathSegment@401..402 + Ident@401..402 "U" + Newline@402..403 "\n" + RBrace@403..404 "}" + Newline@404..406 "\n\n" + Item@406..553 + Struct@406..553 + ItemModifier@406..409 + PubKw@406..409 "pub" + WhiteSpace@409..410 " " + StructKw@410..416 "struct" + WhiteSpace@416..417 " " + Ident@417..422 "MyArr" + GenericParamList@422..459 + Lt@422..423 "<" + TypeGenericParam@423..439 + Ident@423..424 "T" + TypeBoundList@424..439 + Colon@424..425 ":" + WhiteSpace@425..426 " " + TypeBound@426..439 + Path@426..439 + PathSegment@426..429 + Ident@426..429 "std" + Colon2@429..431 "::" + PathSegment@431..434 + Ident@431..434 "ops" + Colon2@434..436 "::" + PathSegment@436..439 + Ident@436..439 "Add" + Comma@439..440 "," + WhiteSpace@440..441 " " + TypeGenericParam@441..442 + Ident@441..442 "U" + Comma@442..443 "," + WhiteSpace@443..444 " " + ConstGenericParam@444..458 + ConstKw@444..449 "const" + WhiteSpace@449..450 " " + Ident@450..451 "N" + Colon@451..452 ":" + WhiteSpace@452..453 " " + PathType@453..458 + Path@453..458 + PathSegment@453..458 + Ident@453..458 "usize" + Gt@458..459 ">" + WhiteSpace@459..460 " " + Newline@460..461 "\n" + WhiteSpace@461..465 " " + WhereClause@465..509 + WhereKw@465..470 "where" + Newline@470..471 "\n" + WhiteSpace@471..479 " " + WherePredicate@479..509 + TupleType@479..485 + LParen@479..480 "(" + PathType@480..481 + Path@480..481 + PathSegment@480..481 + Ident@480..481 "T" + Comma@481..482 "," + WhiteSpace@482..483 " " + PathType@483..484 + Path@483..484 + PathSegment@483..484 + Ident@483..484 "U" + RParen@484..485 ")" + TypeBoundList@485..508 + Colon@485..486 ":" + WhiteSpace@486..487 " " + TypeBound@487..492 + Path@487..492 + PathSegment@487..492 + Ident@487..492 "Trait" + WhiteSpace@492..493 " " + Plus@493..494 "+" + WhiteSpace@494..495 " " + TypeBound@495..508 + Path@495..500 + PathSegment@495..500 + Ident@495..500 "Trait" + GenericArgList@500..508 + Lt@500..501 "<" + TypeGenericArg@501..504 + PathType@501..504 + Path@501..504 + PathSegment@501..504 + Ident@501..504 "i32" + Comma@504..505 "," + WhiteSpace@505..506 " " + TypeGenericArg@506..507 + PathType@506..507 + Path@506..507 + PathSegment@506..507 + Ident@506..507 "Y" + Gt@507..508 ">" + Newline@508..509 "\n" + RecordFieldDefList@509..553 + LBrace@509..510 "{" + Newline@510..511 "\n" + WhiteSpace@511..515 " " + RecordFieldDef@515..530 + Ident@515..522 "__inner" + Colon@522..523 ":" + WhiteSpace@523..524 " " + ArrayType@524..530 + LBracket@524..525 "[" + PathType@525..526 + Path@525..526 + PathSegment@525..526 + Ident@525..526 "T" + SemiColon@526..527 ";" + WhiteSpace@527..528 " " + PathExpr@528..529 + Path@528..529 + PathSegment@528..529 + Ident@528..529 "N" + RBracket@529..530 "]" + Newline@530..531 "\n" + WhiteSpace@531..535 " " + RecordFieldDef@535..551 + Ident@535..543 "__inner2" + Colon@543..544 ":" + WhiteSpace@544..545 " " + TupleType@545..551 + LParen@545..546 "(" + PathType@546..547 + Path@546..547 + PathSegment@546..547 + Ident@546..547 "T" + Comma@547..548 "," + WhiteSpace@548..549 " " + PathType@549..550 + Path@549..550 + PathSegment@549..550 + Ident@549..550 "U" + RParen@550..551 ")" + Newline@551..552 "\n" + RBrace@552..553 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap index 469d8ebdb2..48ea9f1660 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -1,78 +1,80 @@ --- source: crates/parser2/tests/syntax_node.rs -expression: snapshot +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/tupel_field.fe --- Root@0..117 ItemList@0..117 - Struct@0..117 - StructKw@0..6 "struct" - WhiteSpace@6..7 " " - Ident@7..27 "StructWithTupleField" - WhiteSpace@27..28 " " - RecordFieldDefList@28..117 - LBrace@28..29 "{" - Newline@29..30 "\n" - WhiteSpace@30..34 " " - RecordFieldDef@34..47 - Ident@34..35 "x" - Colon@35..36 ":" - WhiteSpace@36..37 " " - TupleType@37..47 - LParen@37..38 "(" - PathType@38..41 - Path@38..41 - PathSegment@38..41 - Ident@38..41 "i32" - Comma@41..42 "," - WhiteSpace@42..43 " " - PathType@43..46 - Path@43..46 - PathSegment@43..46 - Ident@43..46 "u32" - RParen@46..47 ")" - Newline@47..48 "\n" - WhiteSpace@48..52 " " - RecordFieldDef@52..105 - Ident@52..53 "y" - Colon@53..54 ":" - WhiteSpace@54..55 " " - TupleType@55..105 - LParen@55..56 "(" - Newline@56..57 "\n" - WhiteSpace@57..65 " " - PathType@65..68 - Path@65..68 - PathSegment@65..68 - Ident@65..68 "i32" - Comma@68..69 "," - Newline@69..70 "\n" - WhiteSpace@70..78 " " - PathType@78..86 - Path@78..86 - PathSegment@78..81 - Ident@78..81 "foo" - Colon2@81..83 "::" - PathSegment@83..86 - Ident@83..86 "Bar" - Comma@86..87 "," - Newline@87..88 "\n" - WhiteSpace@88..96 " " - PathType@96..99 - Path@96..99 - PathSegment@96..99 - Ident@96..99 "u32" - Newline@99..100 "\n" - WhiteSpace@100..104 " " - RParen@104..105 ")" - Newline@105..106 "\n" - WhiteSpace@106..110 " " - RecordFieldDef@110..115 - Ident@110..111 "z" - Colon@111..112 ":" - WhiteSpace@112..113 " " - TupleType@113..115 - LParen@113..114 "(" - RParen@114..115 ")" - Newline@115..116 "\n" - RBrace@116..117 "}" + Item@0..117 + Struct@0..117 + StructKw@0..6 "struct" + WhiteSpace@6..7 " " + Ident@7..27 "StructWithTupleField" + WhiteSpace@27..28 " " + RecordFieldDefList@28..117 + LBrace@28..29 "{" + Newline@29..30 "\n" + WhiteSpace@30..34 " " + RecordFieldDef@34..47 + Ident@34..35 "x" + Colon@35..36 ":" + WhiteSpace@36..37 " " + TupleType@37..47 + LParen@37..38 "(" + PathType@38..41 + Path@38..41 + PathSegment@38..41 + Ident@38..41 "i32" + Comma@41..42 "," + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + Newline@47..48 "\n" + WhiteSpace@48..52 " " + RecordFieldDef@52..105 + Ident@52..53 "y" + Colon@53..54 ":" + WhiteSpace@54..55 " " + TupleType@55..105 + LParen@55..56 "(" + Newline@56..57 "\n" + WhiteSpace@57..65 " " + PathType@65..68 + Path@65..68 + PathSegment@65..68 + Ident@65..68 "i32" + Comma@68..69 "," + Newline@69..70 "\n" + WhiteSpace@70..78 " " + PathType@78..86 + Path@78..86 + PathSegment@78..81 + Ident@78..81 "foo" + Colon2@81..83 "::" + PathSegment@83..86 + Ident@83..86 "Bar" + Comma@86..87 "," + Newline@87..88 "\n" + WhiteSpace@88..96 " " + PathType@96..99 + Path@96..99 + PathSegment@96..99 + Ident@96..99 "u32" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + RParen@104..105 ")" + Newline@105..106 "\n" + WhiteSpace@106..110 " " + RecordFieldDef@110..115 + Ident@110..111 "z" + Colon@111..112 ":" + WhiteSpace@112..113 " " + TupleType@113..115 + LParen@113..114 "(" + RParen@114..115 ")" + Newline@115..116 "\n" + RBrace@116..117 "}" From 5b9365be3e15e78928f9e08999fe6dda3a96e5f2 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 13 Jul 2023 21:39:40 +0200 Subject: [PATCH 212/678] Refactor `ScopeBuilder` --- crates/hir/src/hir_def/body.rs | 9 +- crates/hir/src/hir_def/item.rs | 7 +- crates/hir/src/lower/body.rs | 37 ++---- crates/hir/src/lower/expr.rs | 4 +- crates/hir/src/lower/item.rs | 177 +++++++++----------------- crates/hir/src/lower/mod.rs | 11 +- crates/hir/src/lower/scope_builder.rs | 13 +- crates/hir/src/lower/use_tree.rs | 18 +-- 8 files changed, 106 insertions(+), 170 deletions(-) diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 107727172e..538123d925 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -16,7 +16,7 @@ use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, Tracke #[salsa::tracked] pub struct Body { #[id] - id: TrackedBodyId, + id: TrackedItemId, /// The expression that evaluates to the value of the body. /// In case of a function body, this is always be the block expression. @@ -42,13 +42,6 @@ impl Body { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TrackedBodyId { - ItemBody(Box), - NestedBody(Box), - NamelessBody, -} - pub type NodeStore = PrimaryMap; pub trait SourceAst: AstNode + Clone + Hash + PartialEq + Eq {} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 3d9d8754f8..52e3423df8 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -23,8 +23,8 @@ use crate::{ use super::{ scope_graph::{ScopeGraph, ScopeId}, - AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, - UseAlias, WhereClauseId, + AttrListId, Body, ExprId, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, + TypeId, UseAlias, WhereClauseId, }; #[derive( @@ -669,6 +669,9 @@ pub enum TrackedItemId { Const(Partial), Use(Partial), Extern, + FuncBody, + NamelessBody, + InsideBlock(Box, ExprId), Joined(Box, Box), } diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index ce2996ff3a..8463becb34 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -3,7 +3,7 @@ use parser::ast; use crate::{ hir_def::{ Body, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, - TrackedBodyId, TrackedItemId, + TrackedItemId, }, span::HirOrigin, }; @@ -11,31 +11,16 @@ use crate::{ use super::FileLowerCtxt; impl Body { - pub(super) fn lower_ast( - f_ctxt: &mut FileLowerCtxt, - parent_id: TrackedItemId, - ast: ast::Expr, - ) -> Self { - let bid = TrackedBodyId::ItemBody(parent_id.into()); - let mut ctxt = BodyCtxt::new(f_ctxt, bid); - let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast, body_expr) - } - - pub(super) fn lower_ast_nested( - f_ctxt: &mut FileLowerCtxt, - bid: TrackedBodyId, - ast: ast::Expr, - ) -> Self { - let bid = TrackedBodyId::NestedBody(bid.into()); - let mut ctxt = BodyCtxt::new(f_ctxt, bid); + pub(super) fn lower_ast(f_ctxt: &mut FileLowerCtxt, ast: ast::Expr) -> Self { + let id = f_ctxt.joined_id(TrackedItemId::FuncBody); + let mut ctxt = BodyCtxt::new(f_ctxt, id); let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); ctxt.build(&ast, body_expr) } pub(super) fn lower_ast_nameless(f_ctxt: &mut FileLowerCtxt<'_>, ast: ast::Expr) -> Self { - let bid = TrackedBodyId::NamelessBody; - let mut ctxt = BodyCtxt::new(f_ctxt, bid); + let id = f_ctxt.joined_id(TrackedItemId::NamelessBody); + let mut ctxt = BodyCtxt::new(f_ctxt, id); let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); ctxt.build(&ast, body_expr) } @@ -43,7 +28,7 @@ impl Body { pub(super) struct BodyCtxt<'ctxt, 'db> { pub(super) f_ctxt: &'ctxt mut FileLowerCtxt<'db>, - pub(super) bid: TrackedBodyId, + pub(super) id: TrackedItemId, pub(super) stmts: NodeStore>, pub(super) exprs: NodeStore>, @@ -91,11 +76,11 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { pat_id } - fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, bid: TrackedBodyId) -> Self { - f_ctxt.enter_scope(false); + fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, id: TrackedItemId) -> Self { + f_ctxt.enter_scope(id.clone(), false); Self { f_ctxt, - bid, + id, stmts: NodeStore::new(), exprs: NodeStore::new(), pats: NodeStore::new(), @@ -107,7 +92,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { let origin = HirOrigin::raw(ast); let body = Body::new( self.f_ctxt.db(), - self.bid, + self.id, body_expr, self.stmts, self.exprs, diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index 356da2ce48..f6518c23d9 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -22,11 +22,13 @@ impl Expr { ast::ExprKind::Block(block) => { ctxt.f_ctxt.enter_block_scope(); let mut stmts = vec![]; + for stmt in block.stmts() { let stmt = Stmt::push_to_body(ctxt, stmt); stmts.push(stmt); } let expr_id = ctxt.push_expr(Self::Block(stmts), HirOrigin::raw(&ast)); + ctxt.f_ctxt.leave_block_scope(expr_id); return expr_id; } @@ -134,7 +136,7 @@ impl Expr { let val = Self::push_to_body_opt(ctxt, array_rep.val()); let len = array_rep .len() - .map(|ast| Body::lower_ast_nested(ctxt.f_ctxt, ctxt.bid.clone(), ast)) + .map(|ast| Body::lower_ast_nameless(ctxt.f_ctxt, ast)) .into(); Self::ArrayRep(val, len) } diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index bc9965e2dd..bdb3338a6a 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -10,53 +10,56 @@ use crate::{ use super::FileLowerCtxt; -pub(crate) fn lower_module_items( - ctxt: &mut FileLowerCtxt<'_>, - id: TrackedItemId, - items: ast::ItemList, -) { +pub(crate) fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, items: ast::ItemList) { for item in items { - let Some(kind) = item.kind() else { - continue; + ItemKind::lower_ast(ctxt, item); + } +} + +impl ItemKind { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Item) { + let Some(kind) = ast.kind() else { + return; }; + match kind { ast::ItemKind::Mod(mod_) => { - Mod::lower_ast(ctxt, id.clone(), mod_); + Mod::lower_ast(ctxt, mod_); } ast::ItemKind::Func(fn_) => { - Func::lower_ast(ctxt, id.clone(), fn_, false); + Func::lower_ast(ctxt, fn_, false); } ast::ItemKind::Struct(struct_) => { - Struct::lower_ast(ctxt, id.clone(), struct_); + Struct::lower_ast(ctxt, struct_); } ast::ItemKind::Contract(contract) => { - Contract::lower_ast(ctxt, id.clone(), contract); + Contract::lower_ast(ctxt, contract); } ast::ItemKind::Enum(enum_) => { - Enum::lower_ast(ctxt, id.clone(), enum_); + Enum::lower_ast(ctxt, enum_); } ast::ItemKind::TypeAlias(alias) => { - TypeAlias::lower_ast(ctxt, id.clone(), alias); + TypeAlias::lower_ast(ctxt, alias); } ast::ItemKind::Impl(impl_) => { - Impl::lower_ast(ctxt, id.clone(), impl_); + Impl::lower_ast(ctxt, impl_); } ast::ItemKind::Trait(trait_) => { - Trait::lower_ast(ctxt, id.clone(), trait_); + Trait::lower_ast(ctxt, trait_); } ast::ItemKind::ImplTrait(impl_trait) => { - ImplTrait::lower_ast(ctxt, id.clone(), impl_trait); + ImplTrait::lower_ast(ctxt, impl_trait); } ast::ItemKind::Const(const_) => { - Const::lower_ast(ctxt, id.clone(), const_); + Const::lower_ast(ctxt, const_); } ast::ItemKind::Use(use_) => { - Use::lower_ast(ctxt, id.clone(), use_); + Use::lower_ast(ctxt, use_); } ast::ItemKind::Extern(extern_) => { if let Some(extern_block) = extern_.extern_block() { for fn_ in extern_block { - Func::lower_ast(ctxt, id.clone(), fn_, true); + Func::lower_ast(ctxt, fn_, true); } } } @@ -65,19 +68,15 @@ pub(crate) fn lower_module_items( } impl Mod { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Mod, - ) -> Self { - ctxt.enter_scope(true); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Mod) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Mod(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Mod(name)); + ctxt.enter_scope(id.clone(), true); + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); if let Some(items) = ast.items() { - lower_module_items(ctxt, id.clone(), items); + lower_module_items(ctxt, items); } let origin = HirOrigin::raw(&ast); @@ -87,16 +86,10 @@ impl Mod { } impl Func { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Func, - is_extern: bool, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Func, is_extern: bool) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Func(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Func(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -107,13 +100,9 @@ impl Func { .into(); let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); let modifier = ItemModifier::lower_ast(ast.modifier()); - let body = ast.body().map(|body| { - Body::lower_ast( - ctxt, - id.clone(), - ast::Expr::cast(body.syntax().clone()).unwrap(), - ) - }); + let body = ast + .body() + .map(|body| Body::lower_ast(ctxt, ast::Expr::cast(body.syntax().clone()).unwrap())); let origin = HirOrigin::raw(&ast); let fn_ = Self::new( @@ -136,15 +125,10 @@ impl Func { } impl Struct { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Struct, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Struct) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Struct(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Struct(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -170,15 +154,10 @@ impl Struct { } impl Contract { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Contract, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Contract) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Contract(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Contract(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -200,15 +179,10 @@ impl Contract { } impl Enum { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Enum, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Enum) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Enum(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Enum(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -234,15 +208,10 @@ impl Enum { } impl TypeAlias { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::TypeAlias, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeAlias) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.alias()); - let id = TrackedItemId::TypeAlias(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::TypeAlias(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -268,15 +237,10 @@ impl TypeAlias { } impl Impl { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Impl, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Impl) -> Self { let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); - let id = TrackedItemId::Impl(ty).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Impl(ty)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -285,7 +249,7 @@ impl Impl { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item, false); + Func::lower_ast(ctxt, impl_item, false); } } @@ -304,15 +268,10 @@ impl Impl { } impl Trait { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Trait, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Trait) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Trait(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Trait(name)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -322,7 +281,7 @@ impl Trait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item, false); + Func::lower_ast(ctxt, impl_item, false); } } @@ -343,16 +302,11 @@ impl Trait { } impl ImplTrait { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::ImplTrait, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::ImplTrait) -> Self { let trait_ref = TraitRef::lower_ast_partial(ctxt, ast.trait_ref()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); - let id = TrackedItemId::ImplTrait(trait_ref, ty).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::ImplTrait(trait_ref, ty)); + ctxt.enter_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -361,7 +315,7 @@ impl ImplTrait { if let Some(item_list) = ast.item_list() { for impl_item in item_list { - Func::lower_ast(ctxt, id.clone(), impl_item, false); + Func::lower_ast(ctxt, impl_item, false); } } @@ -381,20 +335,13 @@ impl ImplTrait { } impl Const { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Const, - ) -> Self { - ctxt.enter_scope(false); - + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Const) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let id = TrackedItemId::Const(name).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Const(name)); + ctxt.enter_scope(id.clone(), false); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); - let body = ast - .value() - .map(|ast| Body::lower_ast(ctxt, id.clone(), ast)) - .into(); + let body = ast.value().map(|ast| Body::lower_ast(ctxt, ast)).into(); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let origin = HirOrigin::raw(&ast); diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 75df8d73b6..65528bb996 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -62,9 +62,8 @@ pub(crate) fn scope_graph_impl(db: &dyn HirDb, top_mod: TopLevelMod) -> ScopeGra let ast = top_mod_ast(db, top_mod); let mut ctxt = FileLowerCtxt::enter_top_mod(db, top_mod); - let id = TrackedItemId::TopLevelMod(top_mod.name(db)); if let Some(items) = ast.items() { - lower_module_items(&mut ctxt, id, items); + lower_module_items(&mut ctxt, items); } ctxt.leave_item_scope(top_mod); @@ -108,9 +107,13 @@ impl<'db> FileLowerCtxt<'db> { self.builder.leave_block_scope(block); } + pub(super) fn joined_id(&self, id: TrackedItemId) -> TrackedItemId { + self.builder.joined_id(id) + } + /// Creates a new scope for an item. - fn enter_scope(&mut self, is_mod: bool) { - self.builder.enter_scope(is_mod); + fn enter_scope(&mut self, id: TrackedItemId, is_mod: bool) { + self.builder.enter_item_scope(id, is_mod); } /// Leaves the current scope, `item` should be the generated item which owns diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 7b0420b4ed..dccb718982 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -7,7 +7,7 @@ use crate::{ hir_def::{ scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, Body, ExprId, FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, - TopLevelMod, Use, VariantDefListId, Visibility, + TopLevelMod, TrackedItemId, Use, VariantDefListId, Visibility, }, HirDb, }; @@ -18,6 +18,7 @@ pub(super) struct ScopeGraphBuilder<'db> { graph: IntermediateScopeGraph, scope_stack: Vec, module_stack: Vec, + id_stack: Vec, declared_blocks: FxHashMap>, } @@ -29,10 +30,11 @@ impl<'db> ScopeGraphBuilder<'db> { graph: IntermediateScopeGraph::default(), scope_stack: Default::default(), module_stack: Default::default(), + id_stack: Default::default(), declared_blocks: FxHashMap::default(), }; - builder.enter_scope(true); + builder.enter_item_scope(TrackedItemId::TopLevelMod(top_mod.name(db)), true); builder } @@ -40,7 +42,8 @@ impl<'db> ScopeGraphBuilder<'db> { self.graph.build(self.top_mod) } - pub(super) fn enter_scope(&mut self, is_mod: bool) { + pub(super) fn enter_item_scope(&mut self, id: TrackedItemId, is_mod: bool) { + self.id_stack.push(id); self.enter_scope_impl(is_mod); } @@ -243,6 +246,10 @@ impl<'db> ScopeGraphBuilder<'db> { .add_edge(parent_node, item_node, parent_to_child_edge); } + pub(super) fn joined_id(&self, id: TrackedItemId) -> TrackedItemId { + self.id_stack.last().unwrap().clone().join(id) + } + pub(super) fn enter_block_scope(&mut self) { let node = self.enter_scope_impl(false); self.declared_blocks.insert(node, None); diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index fd8e2d582c..b4b4998705 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -8,16 +8,12 @@ use crate::{ use super::FileLowerCtxt; impl Use { - pub(super) fn lower_ast( - ctxt: &mut FileLowerCtxt<'_>, - parent_id: TrackedItemId, - ast: ast::Use, - ) -> Vec { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Use) -> Vec { let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let Some(use_tree) = ast.use_tree() else { - ctxt.enter_scope(false); - let id = TrackedItemId::Use(Partial::Absent).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Use(Partial::Absent)); + ctxt.enter_scope(id.clone(), false); let path = Partial::Absent; let alias = None; let top_mod = ctxt.top_mod(); @@ -29,9 +25,9 @@ impl Use { // If the use tree has no subtree, then there is no need to decompose it. if !use_tree.has_subtree() { - ctxt.enter_scope(false); let path = UsePathId::lower_ast_partial(ctxt, use_tree.path()); - let id = TrackedItemId::Use(path).join(parent_id); + let id = ctxt.joined_id(TrackedItemId::Use(path)); + ctxt.enter_scope(id.clone(), false); let alias = use_tree .alias() .map(|alias| UseAlias::lower_ast_partial(ctxt, alias)); @@ -46,8 +42,8 @@ impl Use { decomposed_paths .into_iter() .map(|(path, alias, origin)| { - ctxt.enter_scope(false); - let id = TrackedItemId::Use(path).join(parent_id.clone()); + let id = ctxt.joined_id(TrackedItemId::Use(path)); + ctxt.enter_scope(id.clone(), false); let top_mod = ctxt.top_mod(); let alias = alias; let origin = HirOrigin::desugared(origin); From b0e353c0840bf7b07036680317cfa9e430012d6a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 13 Jul 2023 22:50:01 +0200 Subject: [PATCH 213/678] Let `VisitorCtxt` provide scope information --- crates/driver2/src/lib.rs | 2 +- crates/hir/src/hir_def/body.rs | 70 ++++++++- crates/hir/src/hir_def/item.rs | 57 +++++++- crates/hir/src/hir_def/scope_graph.rs | 28 +++- crates/hir/src/lower/expr.rs | 8 +- crates/hir/src/visitor.rs | 197 +++++++++++++++++++++----- 6 files changed, 315 insertions(+), 47 deletions(-) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index a359974484..dd06721bdd 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -141,7 +141,7 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { let mut pass_manager = AnalysisPassManager::new(); pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); - pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 538123d925..1fe8e61cac 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -9,9 +9,16 @@ use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap}; use parser::ast::{self, prelude::*}; use rustc_hash::FxHashMap; -use crate::span::{item::LazyBodySpan, HirOrigin}; +use crate::{ + span::{item::LazyBodySpan, HirOrigin}, + visitor::prelude::*, + HirDb, +}; -use super::{Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, TrackedItemId}; +use super::{ + scope_graph::ScopeId, Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, + TrackedItemId, +}; #[salsa::tracked] pub struct Body { @@ -20,7 +27,7 @@ pub struct Body { /// The expression that evaluates to the value of the body. /// In case of a function body, this is always be the block expression. - pub body_expr: ExprId, + pub expr: ExprId, #[return_ref] pub stmts: NodeStore>, @@ -40,6 +47,20 @@ impl Body { pub fn lazy_span(self) -> LazyBodySpan { LazyBodySpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } + + #[doc(hidden)] + /// Returns the BFS order of the blocks in the body. + /// + /// Currently, this is only used for testing. + /// When it turns out to be generally useful, we need to consider to let + /// salsa track this method. + pub fn block_order(self, db: &dyn HirDb) -> FxHashMap { + BlockOrderCalculator::new(db).calculate(self) + } } pub type NodeStore = PrimaryMap; @@ -108,3 +129,46 @@ where } } } + +struct BlockOrderCalculator<'db> { + db: &'db dyn HirDb, + order: FxHashMap, + fresh_number: usize, +} + +impl<'db> Visitor for BlockOrderCalculator<'db> { + fn visit_expr( + &mut self, + ctxt: &mut crate::visitor::VisitorCtxt<'_, crate::span::expr::LazyExprSpan>, + expr: ExprId, + expr_data: &Expr, + ) { + if matches!(expr_data, Expr::Block(..)) { + self.order.insert(expr, self.fresh_number); + self.fresh_number += 1; + } + + walk_expr(self, ctxt, expr) + } +} + +impl<'db> BlockOrderCalculator<'db> { + fn new(db: &'db dyn HirDb) -> Self { + Self { + db, + order: FxHashMap::default(), + fresh_number: 0, + } + } + + fn calculate(mut self, body: Body) -> FxHashMap { + let expr = body.expr(self.db); + let Partial::Present(expr_data) = expr.data(self.db, body) else { + return self.order; + }; + + let mut ctxt = VisitorCtxt::with_expr(self.db, body.scope(), body, expr); + self.visit_expr(&mut ctxt, expr, expr_data); + self.order + } +} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 52e3423df8..26628aafd0 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -23,8 +23,8 @@ use crate::{ use super::{ scope_graph::{ScopeGraph, ScopeId}, - AttrListId, Body, ExprId, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, - TypeId, UseAlias, WhereClauseId, + AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, + UseAlias, WhereClauseId, }; #[derive( @@ -62,6 +62,10 @@ impl ItemKind { LazyItemSpan::new(self) } + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self) + } + pub fn name(self, db: &dyn HirDb) -> Option { use ItemKind::*; match self { @@ -240,6 +244,10 @@ impl TopLevelMod { LazyTopModSpan::new(self) } + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } + pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { lower::scope_graph_impl(db, self) } @@ -296,6 +304,10 @@ impl Mod { LazyModSpan::new(self) } + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } + pub fn children_non_nested(self, db: &dyn HirDb) -> impl Iterator + '_ { let s_graph = self.top_mod(db).scope_graph(db); let scope = ScopeId::from_item(self.into()); @@ -327,6 +339,10 @@ impl Func { LazyFuncSpan::new(self) } + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } + pub fn vis(self, db: &dyn HirDb) -> Visibility { self.modifier(db).to_visibility() } @@ -352,6 +368,10 @@ impl Struct { pub fn lazy_span(self) -> LazyStructSpan { LazyStructSpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -372,6 +392,10 @@ impl Contract { pub fn lazy_span(self) -> LazyContractSpan { LazyContractSpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -394,6 +418,10 @@ impl Enum { pub fn lazy_span(self) -> LazyEnumSpan { LazyEnumSpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -416,6 +444,10 @@ impl TypeAlias { pub fn lazy_span(self) -> LazyTypeAliasSpan { LazyTypeAliasSpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -442,6 +474,10 @@ impl Impl { let scope = ScopeId::from_item(self.into()); s_graph.child_items(scope) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -470,6 +506,10 @@ impl Trait { let scope = ScopeId::from_item(self.into()); s_graph.child_items(scope) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -497,6 +537,10 @@ impl ImplTrait { let scope = ScopeId::from_item(self.into()); s_graph.child_items(scope) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -517,6 +561,10 @@ impl Const { pub fn lazy_span(self) -> LazyConstSpan { LazyConstSpan::new(self) } + + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } } #[salsa::tracked] @@ -537,6 +585,10 @@ impl Use { LazyUseSpan::new(self) } + pub fn scope(self) -> ScopeId { + ScopeId::from_item(self.into()) + } + /// Returns imported name if it is present and not a glob. pub fn imported_name(&self, db: &dyn HirDb) -> Option { if let Some(alias) = self.alias(db) { @@ -671,7 +723,6 @@ pub enum TrackedItemId { Extern, FuncBody, NamelessBody, - InsideBlock(Box, ExprId), Joined(Box, Box), } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 2b172c3523..9c2751beb8 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -28,6 +28,10 @@ impl ScopeGraph { /// Returns the direct child items of the scope. pub fn child_items(&self, scope: ScopeId) -> impl Iterator + '_ { + self.children(scope).filter_map(|child| child.to_item()) + } + + pub fn children(&self, scope: ScopeId) -> impl Iterator + '_ { self.edges(scope).filter_map(|edge| match edge.kind { EdgeKind::Lex(_) | EdgeKind::Super(_) @@ -35,7 +39,7 @@ impl ScopeGraph { | EdgeKind::SelfTy(_) | EdgeKind::Self_(_) => None, - _ => edge.dest.to_item(), + _ => Some(edge.dest), }) } @@ -95,6 +99,18 @@ impl ScopeId { Self::Item(top_mod.into()) } + /// Returns the nearest enclosing item. + pub fn item(self) -> ItemKind { + match self { + ScopeId::Item(item) => item, + ScopeId::GenericParam(item, _) => item, + ScopeId::FuncParam(item, _) => item, + ScopeId::Field(item, _) => item, + ScopeId::Variant(item, _) => item, + ScopeId::Block(body, _) => body.into(), + } + } + pub fn is_importable(self) -> bool { !matches!( self, @@ -283,11 +299,17 @@ impl ScopeId { } pub fn pretty_path(self, db: &dyn HirDb) -> Option { + let name = match self { + ScopeId::Block(body, expr) => format!("block_{}", body.block_order(db)[&expr]), + _ => self.name(db)?.data(db).clone(), + }; + dbg!(&name); + if let Some(parent) = self.parent(db) { let parent_path = parent.pretty_path(db)?; - Some(format!("{}::{}", parent_path, self.name(db)?.data(db))) + Some(format!("{}::{}", parent_path, name)) } else { - self.name(db).map(|name| name.data(db).clone()) + Some(name) } } } diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs index f6518c23d9..1e121e16a2 100644 --- a/crates/hir/src/lower/expr.rs +++ b/crates/hir/src/lower/expr.rs @@ -1,7 +1,9 @@ use parser::ast::{self, prelude::*}; use crate::{ - hir_def::{expr::*, Body, GenericArgListId, IdentId, IntegerId, LitKind, Pat, PathId, Stmt}, + hir_def::{ + expr::*, Body, GenericArgListId, IdentId, IntegerId, ItemKind, LitKind, Pat, PathId, Stmt, + }, span::HirOrigin, }; @@ -29,6 +31,10 @@ impl Expr { } let expr_id = ctxt.push_expr(Self::Block(stmts), HirOrigin::raw(&ast)); + for item in block.items() { + ItemKind::lower_ast(ctxt.f_ctxt, item); + } + ctxt.f_ctxt.leave_block_scope(expr_id); return expr_id; } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 8ea32a169a..31bf8dfc67 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -2,12 +2,12 @@ use std::{marker::PhantomData, mem}; use crate::{ hir_def::{ - attr, Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, FieldDef, FieldDefListId, - FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, - GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, - LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, TopLevelMod, - Trait, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, UsePathSegment, - VariantDef, VariantDefListId, WhereClauseId, WherePredicate, + attr, scope_graph::ScopeId, Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, + FieldDef, FieldDefListId, FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, + FuncParamName, GenericArg, GenericArgListId, GenericParam, GenericParamListId, IdentId, + Impl, ImplTrait, ItemKind, LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, + StmtId, Struct, TopLevelMod, Trait, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, + UsePathId, UsePathSegment, VariantDef, VariantDefListId, WhereClauseId, WherePredicate, }, span::{lazy_spans::*, transition::ChainRoot, SpanDowncast}, HirDb, @@ -238,20 +238,35 @@ pub trait Visitor { walk_variant_def(self, ctxt, variant) } - fn visit_stmt(&mut self, ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, stmt: &Stmt) { + fn visit_stmt( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, + stmt: StmtId, + #[allow(unused_variables)] stmt_data: &Stmt, + ) { walk_stmt(self, ctxt, stmt) } - fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, + expr: ExprId, + #[allow(unused_variables)] expr_data: &Expr, + ) { walk_expr(self, ctxt, expr) } - fn visit_arm(&mut self, ctxt: &mut VisitorCtxt<'_, LazyMatchArmSpan>, arm: &MatchArm) { - walk_arm(self, ctxt, arm) + fn visit_pat( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, + pat: PatId, + #[allow(unused_variables)] pat_data: &Pat, + ) { + walk_pat(self, ctxt, pat) } - fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { - walk_pat(self, ctxt, pat) + fn visit_arm(&mut self, ctxt: &mut VisitorCtxt<'_, LazyMatchArmSpan>, arm: &MatchArm) { + walk_arm(self, ctxt, arm) } fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { @@ -808,15 +823,18 @@ pub fn walk_body(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyBodySpan>, b where V: Visitor + ?Sized, { - for stmt_id in body.stmts(ctxt.db).keys() { - visit_node_in_body!(visitor, ctxt, &stmt_id, stmt); - } + let body_expr = body.expr(ctxt.db); + visit_node_in_body!(visitor, ctxt, &body_expr, expr); } -pub fn walk_stmt(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, stmt: &Stmt) +pub fn walk_stmt(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyStmtSpan>, stmt: StmtId) where V: Visitor + ?Sized, { + let Partial::Present(stmt) = stmt.data(ctxt.db, ctxt.body()) else { + return; + }; + match stmt { Stmt::Let(pat_id, ty, expr_id) => { visit_node_in_body!(visitor, ctxt, pat_id, pat); @@ -859,11 +877,15 @@ where } } -pub fn walk_expr(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) +pub fn walk_expr(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: ExprId) where V: Visitor + ?Sized, { - match expr { + let Partial::Present(data) = expr.data(ctxt.db, ctxt.body()) else { + return; + }; + + match data { Expr::Lit(lit) => ctxt.with_new_ctxt( |span| span.into_lit_expr().lit_moved(), |ctxt| { @@ -872,6 +894,13 @@ where ), Expr::Block(stmts) => { + let s_graph = ctxt.top_mod().scope_graph(ctxt.db); + let scope = ctxt.scope(); + for item in s_graph.child_items(scope) { + let mut new_ctxt = VisitorCtxt::with_item(ctxt.db, item); + visitor.visit_item(&mut new_ctxt, item); + } + for stmt_id in stmts { visit_node_in_body!(visitor, ctxt, stmt_id, stmt); } @@ -1052,11 +1081,15 @@ where visit_node_in_body!(visitor, ctxt, &arm.body, expr); } -pub fn walk_pat(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) +pub fn walk_pat(visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: PatId) where V: Visitor + ?Sized, { - match pat { + let Partial::Present(data) = pat.data(ctxt.db, ctxt.body()) else { + return; + }; + + match data { Pat::Lit(lit) => { if let Some(lit) = lit.to_opt() { ctxt.with_new_ctxt( @@ -1229,8 +1262,10 @@ pub fn walk_generic_param_list( ) where V: Visitor + ?Sized, { + let parent_item = ctxt.scope().item(); for (i, param) in params.data(ctxt.db).iter().enumerate() { - ctxt.with_new_ctxt( + ctxt.with_new_scoped_ctxt( + ScopeId::GenericParam(parent_item, i), |span| span.param_moved(i), |ctxt| { visitor.visit_generic_param(ctxt, param); @@ -1375,8 +1410,10 @@ pub fn walk_func_param_list( ) where V: Visitor + ?Sized, { + let parent_item = ctxt.scope().item(); for (idx, param) in params.data(ctxt.db).iter().enumerate() { - ctxt.with_new_ctxt( + ctxt.with_new_scoped_ctxt( + ScopeId::FuncParam(parent_item, idx), |span| span.param_moved(idx), |ctxt| { visitor.visit_func_param(ctxt, param); @@ -1449,8 +1486,10 @@ pub fn walk_field_def_list( ) where V: Visitor + ?Sized, { + let parent_item = ctxt.scope().item(); for (idx, field) in fields.data(ctxt.db).iter().enumerate() { - ctxt.with_new_ctxt( + ctxt.with_new_scoped_ctxt( + ScopeId::Field(parent_item, idx), |span| span.field_moved(idx), |ctxt| { visitor.visit_field_def(ctxt, field); @@ -1492,8 +1531,10 @@ pub fn walk_variant_def_list( ) where V: Visitor + ?Sized, { + let parent_item = ctxt.scope().item(); for (idx, variant) in variants.data(ctxt.db).iter().enumerate() { - ctxt.with_new_ctxt( + ctxt.with_new_scoped_ctxt( + ScopeId::Variant(parent_item, idx), |span| span.variant_moved(idx), |ctxt| { visitor.visit_variant_def(ctxt, variant); @@ -1721,8 +1762,8 @@ pub fn walk_where_predicate( use attr::{Attr, AttrListId}; -/// [`VisitorCtxt`] is used to track the span information of the current node -/// being visited. +/// [`VisitorCtxt`] is used to track the span information and the scope of the +/// current node being visited. /// The context is updated automatically when entering a new node. Thus, the /// user need to only construct the context when invoking a visitor. pub struct VisitorCtxt<'db, T> @@ -1731,6 +1772,8 @@ where { db: &'db dyn HirDb, span: DynLazySpan, + scope_stack: Vec, + _t: PhantomData, } @@ -1738,6 +1781,10 @@ impl<'db, T> VisitorCtxt<'db, T> where T: LazySpan, { + pub fn db(&self) -> &'db dyn HirDb { + self.db + } + pub fn span(&self) -> Option where T: SpanDowncast, @@ -1746,6 +1793,82 @@ where T::downcast(dyn_span) } + pub fn scope(&self) -> ScopeId { + *self.scope_stack.last().unwrap() + } + + pub fn top_mod(&self) -> TopLevelMod { + match self.span.0.as_ref().unwrap().root { + ChainRoot::ItemKind(item) => item.top_mod(self.db), + ChainRoot::TopMod(top_mod) => top_mod, + ChainRoot::Mod(mod_) => mod_.top_mod(self.db), + ChainRoot::Func(func) => func.top_mod(self.db), + ChainRoot::Struct(struct_) => struct_.top_mod(self.db), + ChainRoot::Contract(contract) => contract.top_mod(self.db), + ChainRoot::Enum(enum_) => enum_.top_mod(self.db), + ChainRoot::TypeAlias(alias) => alias.top_mod(self.db), + ChainRoot::Impl(impl_) => impl_.top_mod(self.db), + ChainRoot::Trait(trait_) => trait_.top_mod(self.db), + ChainRoot::ImplTrait(impl_trait) => impl_trait.top_mod(self.db), + ChainRoot::Const(const_) => const_.top_mod(self.db), + ChainRoot::Use(use_) => use_.top_mod(self.db), + ChainRoot::Body(body) => body.top_mod(self.db), + ChainRoot::Stmt(_) | ChainRoot::Expr(_) | ChainRoot::Pat(_) => { + self.body().top_mod(self.db) + } + } + } + + /// Create a new context for visiting a pattern. + /// `scope` is the scope that encloses the pattern. + pub fn with_pat(db: &'db dyn HirDb, scope: ScopeId, body: Body, pat: PatId) -> Self { + Self { + db, + span: LazyPatSpan::new(body, pat).into(), + scope_stack: vec![scope], + _t: PhantomData, + } + } + + /// Create a new context for visiting a statement. + /// `scope` is the scope that encloses the statement. + pub fn with_stmt(db: &'db dyn HirDb, scope: ScopeId, body: Body, stmt: StmtId) -> Self { + Self { + db, + span: LazyStmtSpan::new(body, stmt).into(), + scope_stack: vec![scope], + _t: PhantomData, + } + } + + /// Create a new context for visiting an expression. + /// `scope` is the scope that encloses the expression. + pub fn with_expr(db: &'db dyn HirDb, scope: ScopeId, body: Body, expr: ExprId) -> Self { + let scope_id = match expr.data(db, body) { + Partial::Present(Expr::Block(_)) => ScopeId::Block(body, expr), + _ => scope, + }; + + Self { + db, + span: LazyExprSpan::new(body, expr).into(), + scope_stack: vec![scope_id], + _t: PhantomData, + } + } + + fn with_new_scoped_ctxt(&mut self, scope_id: ScopeId, f1: F1, f2: F2) + where + T: SpanDowncast, + F1: FnOnce(T) -> U, + F2: FnOnce(&mut VisitorCtxt), + U: LazySpan + SpanDowncast + Into, + { + self.scope_stack.push(scope_id); + self.with_new_ctxt(f1, f2); + self.scope_stack.pop(); + } + fn with_new_ctxt(&mut self, f1: F1, f2: F2) where T: SpanDowncast, @@ -1769,12 +1892,14 @@ where U: LazySpan + SpanDowncast + Into, { let dyn_span = mem::replace(&mut self.span, DynLazySpan::invalid()); + let scope_stack = mem::take(&mut self.scope_stack); let span = T::downcast(dyn_span).unwrap(); let u = f(span); Self { db: self.db, span: u.into(), + scope_stack, _t: PhantomData, } .cast() @@ -1791,6 +1916,7 @@ where Self { db: self.db, span: self.span, + scope_stack: self.scope_stack, _t: PhantomData, } .cast() @@ -1800,6 +1926,7 @@ where VisitorCtxt { db: self.db, span: self.span, + scope_stack: self.scope_stack, _t: PhantomData, } } @@ -1815,16 +1942,17 @@ where } } -macro_rules! define_ctxt_ctor { +macro_rules! define_item_ctxt_ctor { ($(( $span_ty:ty, - $ctor:ident($($ctor_name:ident: $ctor_ty:ty),*)),)*) => { + $ctor:ident($ctor_name:ident: $ctor_ty:ty)),)*) => { $(impl<'db> VisitorCtxt<'db, $span_ty> { /// Create a new [`VisitorCtxt`] with the given item as the root of the span chain. - pub fn $ctor(db: &'db dyn HirDb, $($ctor_name: $ctor_ty,)*) -> Self { + pub fn $ctor(db: &'db dyn HirDb, $ctor_name: $ctor_ty) -> Self { Self { db, - span: <$span_ty>::new($($ctor_name),*).into(), + span: <$span_ty>::new($ctor_name).into(), + scope_stack: vec![$ctor_name.scope()], _t: PhantomData, } } @@ -1832,7 +1960,7 @@ macro_rules! define_ctxt_ctor { }; } -define_ctxt_ctor! { +define_item_ctxt_ctor! { (LazyItemSpan, with_item(item: ItemKind)), (LazyTopModSpan, with_top_mod(top_mod: TopLevelMod)), (LazyModSpan, with_mod(mod_: Mod)), @@ -1847,17 +1975,14 @@ define_ctxt_ctor! { (LazyConstSpan, with_const(const_: Const)), (LazyUseSpan, with_use(use_: Use)), (LazyBodySpan, with_body(body: Body)), - (LazyExprSpan, with_expr(body: Body, expr: ExprId)), - (LazyStmtSpan, with_stmt(body: Body, stmt: StmtId)), - (LazyPatSpan, with_pat(body: Body, pat: PatId)), - } macro_rules! visit_node_in_body { ($visitor:expr, $ctxt:expr, $id:expr, $inner:ident) => { if let Partial::Present(data) = $id.data($ctxt.db, $ctxt.body()) { + let scope = *$ctxt.scope_stack.last().unwrap(); paste::paste! { - $visitor.[](&mut VisitorCtxt::[]($ctxt.db, $ctxt.body(), *$id), data); + $visitor.[](&mut VisitorCtxt::[]($ctxt.db, scope, $ctxt.body(), *$id), *$id, data); } } From ae94d2b460d6ce344fecc671b59335e7eb2f8159 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 15:46:18 +0200 Subject: [PATCH 214/678] Add `BodyKind` --- crates/hir/src/hir_def/body.rs | 24 +++++++++++++++++------- crates/hir/src/hir_def/scope_graph.rs | 12 ++++++++++-- crates/hir/src/lower/body.rs | 11 ++++++----- 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index 1fe8e61cac..f41304d256 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -29,6 +29,8 @@ pub struct Body { /// In case of a function body, this is always be the block expression. pub expr: ExprId, + pub body_kind: BodyKind, + #[return_ref] pub stmts: NodeStore>, #[return_ref] @@ -59,10 +61,16 @@ impl Body { /// When it turns out to be generally useful, we need to consider to let /// salsa track this method. pub fn block_order(self, db: &dyn HirDb) -> FxHashMap { - BlockOrderCalculator::new(db).calculate(self) + BlockOrderCalculator::new(db, self).calculate() } } +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum BodyKind { + FuncBody, + Anonymous, +} + pub type NodeStore = PrimaryMap; pub trait SourceAst: AstNode + Clone + Hash + PartialEq + Eq {} @@ -133,6 +141,7 @@ where struct BlockOrderCalculator<'db> { db: &'db dyn HirDb, order: FxHashMap, + body: Body, fresh_number: usize, } @@ -143,7 +152,7 @@ impl<'db> Visitor for BlockOrderCalculator<'db> { expr: ExprId, expr_data: &Expr, ) { - if matches!(expr_data, Expr::Block(..)) { + if ctxt.body() == self.body && matches!(expr_data, Expr::Block(..)) { self.order.insert(expr, self.fresh_number); self.fresh_number += 1; } @@ -153,21 +162,22 @@ impl<'db> Visitor for BlockOrderCalculator<'db> { } impl<'db> BlockOrderCalculator<'db> { - fn new(db: &'db dyn HirDb) -> Self { + fn new(db: &'db dyn HirDb, body: Body) -> Self { Self { db, order: FxHashMap::default(), + body, fresh_number: 0, } } - fn calculate(mut self, body: Body) -> FxHashMap { - let expr = body.expr(self.db); - let Partial::Present(expr_data) = expr.data(self.db, body) else { + fn calculate(mut self) -> FxHashMap { + let expr = self.body.expr(self.db); + let Partial::Present(expr_data) = expr.data(self.db, self.body) else { return self.order; }; - let mut ctxt = VisitorCtxt::with_expr(self.db, body.scope(), body, expr); + let mut ctxt = VisitorCtxt::with_expr(self.db, self.body.scope(), self.body, expr); self.visit_expr(&mut ctxt, expr, expr_data); self.order } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 9c2751beb8..3b5f2419ed 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -2,7 +2,11 @@ use std::collections::BTreeSet; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::{hir_def::GenericParamOwner, span::DynLazySpan, HirDb}; +use crate::{ + hir_def::{BodyKind, GenericParamOwner}, + span::DynLazySpan, + HirDb, +}; use super::{ Body, Enum, ExprId, Func, FuncParamLabel, IdentId, IngotId, ItemKind, TopLevelMod, Use, @@ -300,7 +304,11 @@ impl ScopeId { pub fn pretty_path(self, db: &dyn HirDb) -> Option { let name = match self { - ScopeId::Block(body, expr) => format!("block_{}", body.block_order(db)[&expr]), + ScopeId::Block(body, expr) => format!("{{block{}}}", body.block_order(db)[&expr]), + ScopeId::Item(ItemKind::Body(body)) => match body.body_kind(db) { + BodyKind::FuncBody => "{fn_body}".to_string().into(), + BodyKind::Anonymous => "{anonymous_body}".to_string().into(), + }, _ => self.name(db)?.data(db).clone(), }; dbg!(&name); diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs index 8463becb34..928faef5ab 100644 --- a/crates/hir/src/lower/body.rs +++ b/crates/hir/src/lower/body.rs @@ -2,7 +2,7 @@ use parser::ast; use crate::{ hir_def::{ - Body, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, + Body, BodyKind, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, TrackedItemId, }, span::HirOrigin, @@ -15,14 +15,14 @@ impl Body { let id = f_ctxt.joined_id(TrackedItemId::FuncBody); let mut ctxt = BodyCtxt::new(f_ctxt, id); let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast, body_expr) + ctxt.build(&ast, body_expr, BodyKind::FuncBody) } pub(super) fn lower_ast_nameless(f_ctxt: &mut FileLowerCtxt<'_>, ast: ast::Expr) -> Self { let id = f_ctxt.joined_id(TrackedItemId::NamelessBody); let mut ctxt = BodyCtxt::new(f_ctxt, id); let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); - ctxt.build(&ast, body_expr) + ctxt.build(&ast, body_expr, BodyKind::Anonymous) } } @@ -77,7 +77,7 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { } fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, id: TrackedItemId) -> Self { - f_ctxt.enter_scope(id.clone(), false); + f_ctxt.enter_body_scope(id.clone()); Self { f_ctxt, id, @@ -88,12 +88,13 @@ impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { } } - fn build(self, ast: &ast::Expr, body_expr: ExprId) -> Body { + fn build(self, ast: &ast::Expr, body_expr: ExprId, body_kind: BodyKind) -> Body { let origin = HirOrigin::raw(ast); let body = Body::new( self.f_ctxt.db(), self.id, body_expr, + body_kind, self.stmts, self.exprs, self.pats, From 1818641121e996eb5740b0297e131650dc82c546 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 15:47:57 +0200 Subject: [PATCH 215/678] Fix a bug in constructing block scope --- crates/hir/src/lower/item.rs | 20 ++++++++++---------- crates/hir/src/lower/mod.rs | 6 +++++- crates/hir/src/lower/scope_builder.rs | 22 +++++++++++++++------- crates/hir/src/lower/use_tree.rs | 6 +++--- 4 files changed, 33 insertions(+), 21 deletions(-) diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index bdb3338a6a..c622b5bc34 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -71,7 +71,7 @@ impl Mod { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Mod) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Mod(name)); - ctxt.enter_scope(id.clone(), true); + ctxt.enter_item_scope(id.clone(), true); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -89,7 +89,7 @@ impl Func { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Func, is_extern: bool) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Func(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -128,7 +128,7 @@ impl Struct { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Struct) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Struct(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -157,7 +157,7 @@ impl Contract { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Contract) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Contract(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -182,7 +182,7 @@ impl Enum { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Enum) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Enum(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -211,7 +211,7 @@ impl TypeAlias { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeAlias) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.alias()); let id = ctxt.joined_id(TrackedItemId::TypeAlias(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -240,7 +240,7 @@ impl Impl { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Impl) -> Self { let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let id = ctxt.joined_id(TrackedItemId::Impl(ty)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -271,7 +271,7 @@ impl Trait { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Trait) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Trait(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); @@ -306,7 +306,7 @@ impl ImplTrait { let trait_ref = TraitRef::lower_ast_partial(ctxt, ast.trait_ref()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let id = ctxt.joined_id(TrackedItemId::ImplTrait(trait_ref, ty)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); @@ -338,7 +338,7 @@ impl Const { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::Const) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); let id = ctxt.joined_id(TrackedItemId::Const(name)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let body = ast.value().map(|ast| Body::lower_ast(ctxt, ast)).into(); diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index 65528bb996..b47de8e3cb 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -112,10 +112,14 @@ impl<'db> FileLowerCtxt<'db> { } /// Creates a new scope for an item. - fn enter_scope(&mut self, id: TrackedItemId, is_mod: bool) { + fn enter_item_scope(&mut self, id: TrackedItemId, is_mod: bool) { self.builder.enter_item_scope(id, is_mod); } + fn enter_body_scope(&mut self, id: TrackedItemId) { + self.builder.enter_body_scope(id); + } + /// Leaves the current scope, `item` should be the generated item which owns /// the scope. fn leave_item_scope(&mut self, item: I) -> I diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index dccb718982..c687dc258c 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -1,5 +1,3 @@ -use std::mem; - use cranelift_entity::{entity_impl, PrimaryMap}; use rustc_hash::{FxHashMap, FxHashSet}; @@ -19,7 +17,7 @@ pub(super) struct ScopeGraphBuilder<'db> { scope_stack: Vec, module_stack: Vec, id_stack: Vec, - declared_blocks: FxHashMap>, + declared_blocks: Vec>>, } impl<'db> ScopeGraphBuilder<'db> { @@ -31,7 +29,7 @@ impl<'db> ScopeGraphBuilder<'db> { scope_stack: Default::default(), module_stack: Default::default(), id_stack: Default::default(), - declared_blocks: FxHashMap::default(), + declared_blocks: vec![], }; builder.enter_item_scope(TrackedItemId::TopLevelMod(top_mod.name(db)), true); @@ -47,6 +45,11 @@ impl<'db> ScopeGraphBuilder<'db> { self.enter_scope_impl(is_mod); } + pub(super) fn enter_body_scope(&mut self, id: TrackedItemId) { + self.declared_blocks.push(FxHashMap::default()); + self.enter_item_scope(id, false); + } + pub(super) fn leave_item_scope(&mut self, item: ItemKind) { use ItemKind::*; @@ -232,7 +235,7 @@ impl<'db> ScopeGraphBuilder<'db> { Body(body) => { self.graph.add_lex_edge(item_node, parent_node); - for (node, block) in mem::take(&mut self.declared_blocks) { + for (node, block) in self.declared_blocks.pop().unwrap() { let block = block.unwrap(); self.finalize_block_scope(node, body, block); } @@ -252,13 +255,18 @@ impl<'db> ScopeGraphBuilder<'db> { pub(super) fn enter_block_scope(&mut self) { let node = self.enter_scope_impl(false); - self.declared_blocks.insert(node, None); + self.declared_blocks.last_mut().unwrap().insert(node, None); } pub(super) fn leave_block_scope(&mut self, block: ExprId) { let block_node = self.scope_stack.pop().unwrap(); let parent_node = *self.scope_stack.last().unwrap(); - *self.declared_blocks.get_mut(&block_node).unwrap() = Some(block); + *self + .declared_blocks + .last_mut() + .unwrap() + .get_mut(&block_node) + .unwrap() = Some(block); self.graph.add_lex_edge(block_node, parent_node); self.graph .add_edge(parent_node, block_node, EdgeKind::anon()); diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs index b4b4998705..d54673cec2 100644 --- a/crates/hir/src/lower/use_tree.rs +++ b/crates/hir/src/lower/use_tree.rs @@ -13,7 +13,7 @@ impl Use { let Some(use_tree) = ast.use_tree() else { let id = ctxt.joined_id(TrackedItemId::Use(Partial::Absent)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let path = Partial::Absent; let alias = None; let top_mod = ctxt.top_mod(); @@ -27,7 +27,7 @@ impl Use { if !use_tree.has_subtree() { let path = UsePathId::lower_ast_partial(ctxt, use_tree.path()); let id = ctxt.joined_id(TrackedItemId::Use(path)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let alias = use_tree .alias() .map(|alias| UseAlias::lower_ast_partial(ctxt, alias)); @@ -43,7 +43,7 @@ impl Use { .into_iter() .map(|(path, alias, origin)| { let id = ctxt.joined_id(TrackedItemId::Use(path)); - ctxt.enter_scope(id.clone(), false); + ctxt.enter_item_scope(id.clone(), false); let top_mod = ctxt.top_mod(); let alias = alias; let origin = HirOrigin::desugared(origin); From 03d8108b78b7671617387d128ba6645723276fa3 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 15:49:15 +0200 Subject: [PATCH 216/678] Improve `Visitor` usability --- .../hir-analysis/src/name_resolution/mod.rs | 87 ++++++++++--------- .../tests/early_path_resolution.rs | 37 +++----- crates/hir/src/visitor.rs | 23 ++--- crates/parser2/src/ast/expr.rs | 5 ++ crates/parser2/src/ast/item.rs | 1 - 5 files changed, 74 insertions(+), 79 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index d6429a4194..2e4b3b151e 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -16,8 +16,8 @@ use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, hir_def::{ - scope_graph::ScopeId, Expr, FieldDefListId, GenericParamListId, IdentId, IngotId, ItemKind, - Partial, Pat, PathId, TopLevelMod, TypeBound, TypeId, VariantDefListId, + scope_graph::ScopeId, Expr, ExprId, IdentId, IngotId, ItemKind, Partial, Pat, PatId, + PathId, TopLevelMod, TypeBound, TypeId, }, visitor::prelude::*, }; @@ -219,7 +219,13 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { } } - fn verify_path(&mut self, path: PathId, span: LazyPathSpan, bucket: NameResBucket) { + fn verify_path( + &mut self, + path: PathId, + scope: ScopeId, + span: LazyPathSpan, + bucket: NameResBucket, + ) { let path_kind = self.path_ctxt.last().unwrap(); let last_seg_idx = path.len(self.db.as_hir_db()) - 1; let last_seg_ident = *path.segments(self.db.as_hir_db())[last_seg_idx].unwrap(); @@ -252,10 +258,7 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { match path_kind.pick(self.db, bucket) { // The path exists and belongs to the expected kind. Either::Left(res) => { - if !res.is_visible( - self.db, - ScopeId::from_item(*self.item_stack.last().unwrap()), - ) { + if !res.is_visible(self.db, scope) { self.diags.push(NameResDiag::invisible( span, last_seg_ident, @@ -368,46 +371,34 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { self.path_ctxt.pop(); } - fn visit_field_def_list( + fn visit_field_def( &mut self, - ctxt: &mut VisitorCtxt<'_, LazyFieldDefListSpan>, - fields: FieldDefListId, + ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, + field: &hir::hir_def::FieldDef, ) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..fields.data(self.db.as_hir_db()).len() { - let scope = ScopeId::Field(parent_item, i); - self.check_conflict(scope); - } - - walk_field_def_list(self, ctxt, fields); + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_field_def(self, ctxt, field); } - fn visit_variant_def_list( + fn visit_variant_def( &mut self, - ctxt: &mut VisitorCtxt<'_, LazyVariantDefListSpan>, - variants: VariantDefListId, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, + variant: &hir::hir_def::VariantDef, ) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..variants.data(self.db.as_hir_db()).len() { - let scope = ScopeId::Variant(parent_item, i); - self.check_conflict(scope); - } - - walk_variant_def_list(self, ctxt, variants); + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_variant_def(self, ctxt, variant); } - fn visit_generic_param_list( + fn visit_generic_param( &mut self, - ctxt: &mut VisitorCtxt<'_, hir::span::params::LazyGenericParamListSpan>, - params: GenericParamListId, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamSpan>, + param: &hir::hir_def::GenericParam, ) { - let parent_item = *self.item_stack.last().unwrap(); - for i in 0..params.data(self.db.as_hir_db()).len() { - let scope = ScopeId::GenericParam(parent_item, i); - self.check_conflict(scope); - } - - walk_generic_param_list(self, ctxt, params); + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_generic_param(self, ctxt, param); } fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { @@ -418,12 +409,24 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { // We don't need to run path analysis on patterns, statements and expressions in // early path resolution. - fn visit_pat(&mut self, _: &mut VisitorCtxt<'_, LazyPatSpan>, _: &Pat) {} - fn visit_stmt(&mut self, _: &mut VisitorCtxt<'_, LazyStmtSpan>, _: &hir::hir_def::Stmt) {} - fn visit_expr(&mut self, _: &mut VisitorCtxt<'_, LazyExprSpan>, _: &Expr) {} + fn visit_pat(&mut self, _: &mut VisitorCtxt<'_, LazyPatSpan>, _: PatId, _: &Pat) {} + + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, + expr: ExprId, + expr_data: &Expr, + ) { + match expr_data { + // We need to run path analysis on block expressions because they can contain items. + Expr::Block(_) => walk_expr(self, ctxt, expr), + + _ => {} + } + } fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { - let scope = ScopeId::from_item(self.item_stack.last().copied().unwrap()); + let scope = ctxt.scope(); let dummy_cache_store = ResolvedQueryCacheStore::no_cache(); let mut resolver = EarlyPathResolver::new(self.db, &mut self.inner, &dummy_cache_store); @@ -469,7 +472,7 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { let EarlyResolvedPath::Full(bucket) = resolved_path.resolved else { return; }; - self.verify_path(path, ctxt.span().unwrap(), bucket); + self.verify_path(path, scope, ctxt.span().unwrap(), bucket); } } diff --git a/crates/hir-analysis/tests/early_path_resolution.rs b/crates/hir-analysis/tests/early_path_resolution.rs index e1b52acbbb..89667ee774 100644 --- a/crates/hir-analysis/tests/early_path_resolution.rs +++ b/crates/hir-analysis/tests/early_path_resolution.rs @@ -11,7 +11,7 @@ use fe_hir_analysis::{ }; use hir::{ analysis_pass::ModuleAnalysisPass, - hir_def::{scope_graph::ScopeId, Expr, ItemKind, Pat, PathId, TopLevelMod, TypeId}, + hir_def::{Expr, ExprId, ItemKind, Pat, PatId, PathId, TopLevelMod, TypeId}, visitor::prelude::*, HirDb, SpannedHirDb, }; @@ -40,7 +40,6 @@ fn test_standalone(fixture: Fixture<&str>) { db: &db, top_mod, domain_stack: Vec::new(), - item_stack: Vec::new(), prop_formatter: &mut prop_formatter, } .visit_top_mod(&mut ctxt, top_mod); @@ -53,7 +52,6 @@ struct PathVisitor<'db, 'a> { db: &'db HirAnalysisTestDb, top_mod: TopLevelMod, domain_stack: Vec, - item_stack: Vec, prop_formatter: &'a mut HirPropertyFormatter, } @@ -64,10 +62,8 @@ impl<'db, 'a> Visitor for PathVisitor<'db, 'a> { } self.domain_stack.push(NameDomain::Type); - self.item_stack.push(item); walk_item(self, ctxt, item); self.domain_stack.pop(); - self.item_stack.pop(); } fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { @@ -76,32 +72,21 @@ impl<'db, 'a> Visitor for PathVisitor<'db, 'a> { self.domain_stack.pop(); } - fn visit_pat(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPatSpan>, pat: &Pat) { - if pat.is_bind(self.db.as_hir_db()) { - return; - } + fn visit_pat(&mut self, _: &mut VisitorCtxt<'_, LazyPatSpan>, _: PatId, _: &Pat) {} - if matches!(pat, Pat::Record { .. }) { - self.domain_stack.push(NameDomain::Type); - } else { - self.domain_stack.push(NameDomain::Value); + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, + expr: ExprId, + expr_data: &Expr, + ) { + if matches!(expr_data, Expr::Block { .. }) { + walk_expr(self, ctxt, expr); } - walk_pat(self, ctxt, pat); - self.domain_stack.pop(); - } - - fn visit_expr(&mut self, ctxt: &mut VisitorCtxt<'_, LazyExprSpan>, expr: &Expr) { - if matches!(expr, Expr::RecordInit { .. }) { - self.domain_stack.push(NameDomain::Type); - } else { - self.domain_stack.push(NameDomain::Value); - } - walk_expr(self, ctxt, expr); - self.domain_stack.pop(); } fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { - let scope = ScopeId::from_item(self.item_stack.last().copied().unwrap()); + let scope = ctxt.scope(); let resolved_path = resolve_path_early(self.db.as_hir_analysis_db(), path, scope); match resolved_path { EarlyResolvedPath::Full(bucket) => { diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 31bf8dfc67..df303de50b 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1857,6 +1857,19 @@ where } } + /// Returns the body that encloses the current node. + /// # panic + /// Panics when the current node is not enclosed by a body. + pub fn body(&self) -> Body { + match self.span.0.as_ref().unwrap().root { + ChainRoot::Body(body) => body, + ChainRoot::Expr(expr) => expr.body, + ChainRoot::Stmt(stmt) => stmt.body, + ChainRoot::Pat(pat) => pat.body, + _ => panic!(), + } + } + fn with_new_scoped_ctxt(&mut self, scope_id: ScopeId, f1: F1, f2: F2) where T: SpanDowncast, @@ -1930,16 +1943,6 @@ where _t: PhantomData, } } - - fn body(&self) -> Body { - match self.span.0.as_ref().unwrap().root { - ChainRoot::Body(body) => body, - ChainRoot::Expr(expr) => expr.body, - ChainRoot::Stmt(stmt) => stmt.body, - ChainRoot::Pat(pat) => pat.body, - _ => panic!(), - } - } } macro_rules! define_item_ctxt_ctor { diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs index 05711f35d6..53afabeb91 100644 --- a/crates/parser2/src/ast/expr.rs +++ b/crates/parser2/src/ast/expr.rs @@ -65,6 +65,11 @@ impl BlockExpr { pub fn stmts(&self) -> impl Iterator { self.iter() } + + /// Returns items declared in the block. + pub fn items(&self) -> impl Iterator { + support::children(self.syntax()) + } } ast_node! { diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index ee13bb336d..5b6d1256a1 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -29,7 +29,6 @@ ast_node! { } impl Item { pub fn kind(&self) -> Option { - dbg!(self.syntax()); support::child(self.syntax()) .map(ItemKind::Mod) .or_else(|| support::child(self.syntax()).map(ItemKind::Func)) From bbdc9c0ba782f3da8fe9abf333315f4c755155c7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 21:59:29 +0200 Subject: [PATCH 217/678] Refactor name resolver --- .../src/name_resolution/import_resolver.rs | 69 +++++++++---------- .../src/name_resolution/name_resolver.rs | 63 ++++++++++------- .../name_resolution/import_ambiguous.fe | 4 +- .../name_resolution/import_ambiguous.snap | 18 ++--- .../name_resolution/import_invisible.fe | 1 - .../name_resolution/import_invisible.snap | 2 +- 6 files changed, 83 insertions(+), 74 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 76908f210d..39f2fdbc5e 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -11,10 +11,7 @@ use hir::{ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; -use crate::{ - name_resolution::visibility_checker::{is_scope_visible_from, is_use_visible}, - HirAnalysisDb, -}; +use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; use super::{ diagnostics::NameResDiag, @@ -184,6 +181,12 @@ impl<'db> ImportResolver<'db> { } /// Try to resolve the given glob `IntermediateUse`. + /// + /// The first value of the returned tuple is the updated `IntermediateUse` + /// if the resolution is not fully completed. + /// + /// The second value of the returned tuple indicates whether the resolution + /// is progressed from the passed `IntermediateUse`. fn resolve_glob(&mut self, i_use: IntermediateUse) -> (Option, bool) { let (base_path_resolved, changed) = { if i_use.is_base_resolved(self.db) { @@ -215,7 +218,7 @@ impl<'db> ImportResolver<'db> { let original_scope = base_path_resolved.original_scope; let use_ = base_path_resolved.use_; - // Collect all unresolved named imports in the target scope to avoid bucket a + // Collect all unresolved named imports in the target scope to avoid binding a // name to a wrong resolution being brought by a glob. let unresolved_named_imports = match self.intermediate_uses.get(&target_scope) { Some(i_uses) => i_uses @@ -243,7 +246,7 @@ impl<'db> ImportResolver<'db> { ); let is_decidable = self.is_decidable(&base_path_resolved); - let n_res = resolutions.iter().fold(0, |acc, bind| acc + bind.1.len()); + let n_res = resolutions.iter().fold(0, |acc, res| acc + res.1.len()); if *self.num_imported_res.entry(use_).or_default() == n_res { if is_decidable { return (None, true); @@ -333,6 +336,9 @@ impl<'db> ImportResolver<'db> { let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); let mut bucket = resolver.resolve_query(query); + if !i_use.is_base_resolved(self.db) { + bucket.filter_by_domain(NameDomain::Type); + } // Filter out invisible resolutions. let mut invisible_span = None; @@ -340,29 +346,16 @@ impl<'db> ImportResolver<'db> { let Ok(res) = res else { return true; }; - match res.scope() { - Some(scope) => { - if let NameDerivation::GlobImported(use_) - | NameDerivation::NamedImported(use_) = res.derivation - { - if !is_use_visible(self.db, i_use.original_scope, use_) { - invisible_span.get_or_insert_with(|| use_.lazy_span().into()); - false - } else { - true - } - } else if is_scope_visible_from(self.db, scope, i_use.original_scope) { - true - } else { - if scope.is_importable() { - if let Some(span) = res.kind.name_span(self.db) { - invisible_span.get_or_insert(span); - } - } - false - } + + if !res.is_importable() { + false + } else if res.is_visible(self.db, i_use.original_scope) { + true + } else { + if let Some(span) = res.derived_from(self.db) { + invisible_span.get_or_insert(span); } - None => true, + false } }); @@ -371,7 +364,7 @@ impl<'db> ImportResolver<'db> { bucket.filter_by_domain(NameDomain::Type); } - for err in bucket.errors() { + for (_, err) in bucket.errors() { if !matches!( err, NameResolutionError::NotFound | NameResolutionError::Invalid @@ -458,7 +451,7 @@ impl<'db> ImportResolver<'db> { self.num_imported_res.insert(i_use.use_, n_res); if let Err(err) = self .resolved_imports - .set_named_binds(self.db, &i_use, bucket) + .set_named_bucket(self.db, &i_use, bucket) { self.accumulated_errors.push(err); } @@ -883,19 +876,19 @@ impl IntermediateResolvedImports { } } - fn set_named_binds( + fn set_named_bucket( &mut self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse, - mut bind: NameResBucket, + mut bucket: NameResBucket, ) -> Result<(), NameResDiag> { let scope = i_use.original_scope; - bind.set_derivation(NameDerivation::NamedImported(i_use.use_)); + bucket.set_derivation(NameDerivation::NamedImported(i_use.use_)); let imported_name = match i_use.imported_name(db) { Some(name) => name, None => { - self.resolved_imports.unnamed_resolved.push(bind); + self.resolved_imports.unnamed_resolved.push(bucket); return Ok(()); } }; @@ -908,9 +901,9 @@ impl IntermediateResolvedImports { match imported_set.entry(imported_name) { Entry::Occupied(mut e) => { - let bucket = e.get_mut(); - bucket.merge(bind.iter()); - for err in bucket.errors() { + let old_bucket = e.get_mut(); + old_bucket.merge(&bucket); + for (_, err) in old_bucket.errors() { let NameResolutionError::Ambiguous(cands) = err else { continue; }; @@ -934,7 +927,7 @@ impl IntermediateResolvedImports { } Entry::Vacant(e) => { - e.insert(bind); + e.insert(bucket); Ok(()) } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index c2b134ca91..91c0afe914 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -136,8 +136,10 @@ impl NameResBucket { self.bucket.values_mut().filter_map(|res| res.as_mut().ok()) } - pub fn errors(&self) -> impl Iterator { - self.bucket.values().filter_map(|res| res.as_ref().err()) + pub fn errors(&self) -> impl Iterator { + self.bucket + .iter() + .filter_map(|(domain, res)| res.as_ref().err().map(|err| (*domain, err))) } /// Returns the resolution of the given `domain`. @@ -151,10 +153,16 @@ impl NameResBucket { self.bucket.retain(|d, _| *d == domain); } - /// Merge the `resolutions` into the set. If name conflict happens, the old - /// resolution will be returned, otherwise `None` will be returned. - pub(super) fn merge<'a>(&mut self, resolutions: impl Iterator) { - for res in resolutions { + pub(super) fn merge<'a>(&mut self, bucket: &NameResBucket) { + for (domain, err) in bucket.errors() { + match self.pick(domain) { + Err(NameResolutionError::NotFound) => { + self.bucket.insert(domain, Err(err.clone())); + } + _ => {} + } + } + for res in bucket.iter() { self.push(res); } } @@ -350,6 +358,13 @@ impl NameRes { } } + pub(super) fn is_importable(&self) -> bool { + match self.kind { + NameResKind::Scope(scope) => scope.is_importable(), + NameResKind::Prim(_) => true, + } + } + fn new_prim(prim: PrimTy) -> Self { Self { kind: prim.into(), @@ -514,7 +529,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { .named_imports(self.db, query.scope) .and_then(|imports| imports.get(&query.name)) { - bucket.merge(imported.iter()); + bucket.merge(imported); } // 3. Look for the name in the glob imports. @@ -534,7 +549,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { let mut resolved = self.resolve_query(query_for_parent); resolved.set_lexed_derivation(); - bucket.merge(resolved.iter()); + bucket.merge(&resolved); } if !query.directive.allow_external { @@ -614,7 +629,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { pub(super) fn collect_all_resolutions_for_glob( &mut self, target: ScopeId, - ref_scope: ScopeId, + use_scope: ScopeId, unresolved_named_imports: FxHashSet, ) -> FxHashMap> { let mut res_collection: FxHashMap> = FxHashMap::default(); @@ -636,21 +651,20 @@ impl<'db, 'a> NameResolver<'db, 'a> { let res = NameRes::new_from_scope(scope, NameDomain::from_scope(scope), NameDerivation::Def); - *found_domains.entry(name).or_default() |= res.domain as u8; - res_collection.entry(name).or_default().push(res); + if res.is_visible(self.db, use_scope) { + *found_domains.entry(name).or_default() |= res.domain as u8; + res_collection.entry(name).or_default().push(res); + } } let mut found_domains_after_named = found_domains.clone(); if let Some(named_imports) = self.importer.named_imports(self.db, target) { for (&name, import) in named_imports { let found_domain = found_domains.get(&name).copied().unwrap_or_default(); - for res in import.iter().filter(|res| { - if let NameDerivation::NamedImported(use_) = res.derivation { - is_use_visible(self.db, ref_scope, use_) - } else { - false - } - }) { + for res in import + .iter() + .filter(|res| res.is_visible(self.db, use_scope)) + { if (found_domain & res.domain as u8 != 0) || !found_kinds.insert((name, res.kind)) { @@ -664,16 +678,19 @@ impl<'db, 'a> NameResolver<'db, 'a> { } if let Some(glob_imports) = self.importer.glob_imports(self.db, target) { - for (&use_, resolutions) in glob_imports.iter() { - if !is_use_visible(self.db, ref_scope, use_) { - continue; - } + for (_, resolutions) in glob_imports.iter() { + // if !is_use_visible(self.db, ref_scope, use_) { + // continue; + // } for (&name, res_for_name) in resolutions.iter() { if unresolved_named_imports.contains(&name) { continue; } - for res in res_for_name.iter() { + for res in res_for_name + .iter() + .filter(|res| res.is_visible(self.db, use_scope)) + { let seen_domain = found_domains_after_named .get(&name) .copied() diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.fe b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe index 38b1161766..8ef6d67152 100644 --- a/crates/uitest/fixtures/name_resolution/import_ambiguous.fe +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe @@ -8,9 +8,9 @@ mod foo { pub mod inner1 { - struct S {} + pub struct S {} } mod inner2 { - struct S {} + pub struct S {} } } \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.snap b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap index de87ba263d..7fdd4c1e4b 100644 --- a/crates/uitest/fixtures/name_resolution/import_ambiguous.snap +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap @@ -1,5 +1,5 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_ambiguous.fe --- @@ -9,11 +9,11 @@ error[2-0004]: `S` is ambiguous 2 │ pub use S │ ^ `S` is ambiguous · -11 │ struct S {} - │ - candidate `#0` +11 │ pub struct S {} + │ - candidate `#0` · -14 │ struct S {} - │ - candidate `#1` +14 │ pub struct S {} + │ - candidate `#1` error[2-0004]: `S` is ambiguous ┌─ import_ambiguous.fe:7:13 @@ -21,10 +21,10 @@ error[2-0004]: `S` is ambiguous 7 │ pub use S │ ^ `S` is ambiguous · -11 │ struct S {} - │ - candidate `#0` +11 │ pub struct S {} + │ - candidate `#0` · -14 │ struct S {} - │ - candidate `#1` +14 │ pub struct S {} + │ - candidate `#1` diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.fe b/crates/uitest/fixtures/name_resolution/import_invisible.fe index 1d96bf0883..1b4b9a0cd5 100644 --- a/crates/uitest/fixtures/name_resolution/import_invisible.fe +++ b/crates/uitest/fixtures/name_resolution/import_invisible.fe @@ -19,4 +19,3 @@ mod foo3 { } } - diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.snap b/crates/uitest/fixtures/name_resolution/import_invisible.snap index 4a289175bf..7c426e9db7 100644 --- a/crates/uitest/fixtures/name_resolution/import_invisible.snap +++ b/crates/uitest/fixtures/name_resolution/import_invisible.snap @@ -19,7 +19,7 @@ error[2-0003]: `Bar` is not visible │ ^^^ `Bar` is not visible 7 │ mod foo2 { 8 │ use foo3::Bar - │ ------------- `Bar is defined here + │ --- `Bar is defined here error[2-0003]: `foo4` is not visible ┌─ import_invisible.fe:15:11 From 5bde4cc960ebd773d98e582afb486cc6e690ebe1 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 22:01:08 +0200 Subject: [PATCH 218/678] Add tests for scoped name resolution --- .../early_path_resolution/generic_param.fe | 15 +++++ .../early_path_resolution/generic_param.snap | 60 +++++++++++++++++++ .../early_path_resolution/nested_block.fe | 33 ++++++++++ .../early_path_resolution/nested_block.snap | 54 +++++++++++++++++ .../early_path_resolution/scoped_import.fe | 14 +++++ .../early_path_resolution/scoped_import.snap | 18 ++++++ 6 files changed, 194 insertions(+) create mode 100644 crates/hir-analysis/test_files/early_path_resolution/generic_param.fe create mode 100644 crates/hir-analysis/test_files/early_path_resolution/generic_param.snap create mode 100644 crates/hir-analysis/test_files/early_path_resolution/nested_block.fe create mode 100644 crates/hir-analysis/test_files/early_path_resolution/nested_block.snap create mode 100644 crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe create mode 100644 crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap diff --git a/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe b/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe new file mode 100644 index 0000000000..3037fc7bc2 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe @@ -0,0 +1,15 @@ +trait InnerTrait { } + +trait TraitWithGenerics +where U: InnerTrait +{ +} + +pub struct MyS +where T: TraitWithGenerics + U: InnerTrait +{ + x: T + y: U + +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap b/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap new file mode 100644 index 0000000000..1dca84f187 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap @@ -0,0 +1,60 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/generic_param.fe +--- +note: + ┌─ test_file.fe:4:7 + │ +4 │ where U: InnerTrait + │ ^ test_file::TraitWithGenerics::U + +note: + ┌─ test_file.fe:4:10 + │ +4 │ where U: InnerTrait + │ ^^^^^^^^^^ test_file::InnerTrait + +note: + ┌─ test_file.fe:9:7 + │ +9 │ where T: TraitWithGenerics + │ ^ test_file::MyS::T + +note: + ┌─ test_file.fe:9:10 + │ +9 │ where T: TraitWithGenerics + │ ^^^^^^^^^^^^^^^^^ test_file::TraitWithGenerics + +note: + ┌─ test_file.fe:9:28 + │ +9 │ where T: TraitWithGenerics + │ ^ test_file::MyS::U + +note: + ┌─ test_file.fe:10:7 + │ +10 │ U: InnerTrait + │ ^ test_file::MyS::U + +note: + ┌─ test_file.fe:10:10 + │ +10 │ U: InnerTrait + │ ^^^^^^^^^^ test_file::InnerTrait + +note: + ┌─ test_file.fe:12:8 + │ +12 │ x: T + │ ^ test_file::MyS::T + +note: + ┌─ test_file.fe:13:8 + │ +13 │ y: U + │ ^ test_file::MyS::U + + diff --git a/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe b/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe new file mode 100644 index 0000000000..eeca7935da --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe @@ -0,0 +1,33 @@ +fn foo() { + struct Foo {} + + { + struct Foo {} + let f: Foo + } + + let f: Foo +} + +fn bar() { + struct Bar {} + + let x: [i32; { + { + struct Bar {} + + impl Bar { + fn len() -> u256 { + 1 + } + } + let bar: Bar + } + + struct Bar {} + let bar: Bar + 1 + }] + + let bar: Bar +} diff --git a/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap b/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap new file mode 100644 index 0000000000..f62419ed87 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap @@ -0,0 +1,54 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/nested_block.fe +--- +note: + ┌─ test_file.fe:6:16 + │ +6 │ let f: Foo + │ ^^^ test_file::foo::{fn_body}::{block0}::{block1}::Foo + +note: + ┌─ test_file.fe:9:12 + │ +9 │ let f: Foo + │ ^^^ test_file::foo::{fn_body}::{block0}::Foo + +note: + ┌─ test_file.fe:15:13 + │ +15 │ let x: [i32; { + │ ^^^ i32 + +note: + ┌─ test_file.fe:19:18 + │ +19 │ impl Bar { + │ ^^^ test_file::bar::{fn_body}::{block0}::{anonymous_body}::{block0}::{block1}::Bar + +note: + ┌─ test_file.fe:20:29 + │ +20 │ fn len() -> u256 { + │ ^^^^ u256 + +note: + ┌─ test_file.fe:24:22 + │ +24 │ let bar: Bar + │ ^^^ test_file::bar::{fn_body}::{block0}::{anonymous_body}::{block0}::{block1}::Bar + +note: + ┌─ test_file.fe:28:18 + │ +28 │ let bar: Bar + │ ^^^ test_file::bar::{fn_body}::{block0}::{anonymous_body}::{block0}::Bar + +note: + ┌─ test_file.fe:32:14 + │ +32 │ let bar: Bar + │ ^^^ test_file::bar::{fn_body}::{block0}::Bar + + diff --git a/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe new file mode 100644 index 0000000000..b87b559a3e --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe @@ -0,0 +1,14 @@ +pub fn foo() { + { + use mod1::Foo + let v: Foo + } + + let v: Foo +} + +struct Foo {} + +mod mod1 { + pub struct Foo {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap new file mode 100644 index 0000000000..1592aa47b8 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap @@ -0,0 +1,18 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe +--- +note: + ┌─ test_file.fe:4:16 + │ +4 │ let v: Foo + │ ^^^ test_file::mod1::Foo + +note: + ┌─ test_file.fe:7:12 + │ +7 │ let v: Foo + │ ^^^ test_file::Foo + + From 7d1077ba88d8bf14ffca9584d43f55aa9f98145d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 22:25:02 +0200 Subject: [PATCH 219/678] Add missing builtin ambiguity check --- .../src/name_resolution/import_resolver.rs | 23 ++++++++++++------- .../src/name_resolution/visibility_checker.rs | 11 +++++---- .../import_ambiguous_builtin.fe | 7 ++++++ .../import_ambiguous_builtin.snap | 12 ++++++++++ 4 files changed, 40 insertions(+), 13 deletions(-) create mode 100644 crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 39f2fdbc5e..b843744c6c 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -5,7 +5,7 @@ use std::{ }; use hir::{ - hir_def::{scope_graph::ScopeId, IdentId, IngotId, Use}, + hir_def::{prim_ty::PrimTy, scope_graph::ScopeId, IdentId, IngotId, Use}, span::DynLazySpan, }; use itertools::Itertools; @@ -391,7 +391,7 @@ impl<'db> ImportResolver<'db> { // insert the use into the `suspicious_imports` set to verify the ambiguity // after the algorithm reaches the fixed point. for res in bucket.iter() { - if res.is_external(self.db, i_use) || res.is_derived_from_glob() { + if res.is_builtin() || res.is_external(self.db, i_use) || res.is_derived_from_glob() { self.suspicious_imports.insert(i_use.use_); break; } @@ -470,8 +470,8 @@ impl<'db> ImportResolver<'db> { let ingot = scope.ingot(self.db.as_hir_db()); // The ambiguity in the first segment possibly occurs when the segment is - // resolved to either a glob imported bucket or an external ingot in the - // `i_use` resolution. + // resolved to either a glob imported derived resolution or an external ingot in + // the `i_use` resolution. // // This is because: // 1. the resolution of the first segment changes depending on whether the @@ -494,14 +494,17 @@ impl<'db> ImportResolver<'db> { if matches!( resolved.current_res.unwrap().derivation, NameDerivation::GlobImported(_) - ) && ingot + ) && (ingot .external_ingots(self.db.as_hir_db()) .iter() - .any(|(ingot_name, _)| ingot_name == &first_segment_ident) + .any(|(ingot_name, _)| *ingot_name == first_segment_ident) + || PrimTy::all_types() + .iter() + .any(|ty| ty.name() == first_segment_ident)) { // The resolved scope is shadowed by an glob imports while originally - // the use might be resolved to an external ingot. This means there is an - // ambiguity between the external ingot and the name + // the use might be resolved to an external ingot or builtin. This means there + // is an ambiguity between the external ingot and the name // imported by the glob import. self.register_error(&i_use, NameResolutionError::Ambiguous(vec![])); } @@ -1016,6 +1019,10 @@ impl NameRes { } } + fn is_builtin(&self) -> bool { + matches!(self.kind, NameResKind::Prim(_)) + } + /// Returns true if the bucket contains a glob import. fn is_derived_from_glob(&self) -> bool { matches!(self.derivation, NameDerivation::GlobImported(_)) diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs index bb30e7c999..18177de351 100644 --- a/crates/hir-analysis/src/name_resolution/visibility_checker.rs +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -20,11 +20,12 @@ pub(crate) fn is_scope_visible_from( let Some(def_scope) = (if matches!(scope, ScopeId::Field(..) | ScopeId::Variant(..)) { // We treat fields as if they are defined in the parent of the parent scope so // that field can be accessible from the scope where the parent is defined. - scope.parent(db.as_hir_db()).and_then(|scope| scope.parent(db.as_hir_db())) - } else { - scope.parent(db.as_hir_db()) - }) - else { + scope + .parent(db.as_hir_db()) + .and_then(|scope| scope.parent(db.as_hir_db())) + } else { + scope.parent(db.as_hir_db()) + }) else { return false; }; diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe new file mode 100644 index 0000000000..a027553e39 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe @@ -0,0 +1,7 @@ +use foo::* + +use i32::* + +mod foo { + pub mod i32 {} +} diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap new file mode 100644 index 0000000000..d53f66d5b8 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe +--- +error[2-0004]: `i32` is ambiguous + ┌─ import_ambiguous_builtin.fe:3:5 + │ +3 │ use i32::* + │ ^^^ `i32` is ambiguous + + From 0cf26cee13118587dae9d27226e7f085e650f836 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 22:29:18 +0200 Subject: [PATCH 220/678] Make clippy happy --- crates/analyzer/src/context.rs | 4 ++-- crates/hir-analysis/src/name_resolution/mod.rs | 9 +++------ .../src/name_resolution/name_resolver.rs | 9 +++------ crates/hir/src/hir_def/body.rs | 14 +++++++++++++- crates/hir/src/hir_def/scope_graph.rs | 5 ++--- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 2aa7d57bd6..7cb202d0bf 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -223,7 +223,7 @@ pub trait AnalyzerContext { fn register_diag(&self, diag: Diagnostic) -> DiagnosticVoucher { self.add_diagnostic(diag); - DiagnosticVoucher(PhantomData::default()) + DiagnosticVoucher(PhantomData) } } @@ -314,7 +314,7 @@ pub struct DiagnosticVoucher(PhantomData<()>); impl DiagnosticVoucher { pub fn assume_the_parser_handled_it() -> Self { - Self(PhantomData::default()) + Self(PhantomData) } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 2e4b3b151e..c7738d1ca2 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -56,7 +56,7 @@ pub fn resolve_segments_early( ) -> EarlyResolvedPath { // Obtain cache store for the given scope. let cache_store = resolve_path_early_impl(db, scope.top_mod(db.as_hir_db())); - let importer = DefaultImporter::default(); + let importer = DefaultImporter; // We use the cache store that is returned from `resolve_path_early` to get // cached results immediately. let mut name_resolver = name_resolver::NameResolver::new_no_cache(db, &importer); @@ -417,11 +417,8 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { expr: ExprId, expr_data: &Expr, ) { - match expr_data { - // We need to run path analysis on block expressions because they can contain items. - Expr::Block(_) => walk_expr(self, ctxt, expr), - - _ => {} + if matches!(expr_data, Expr::Block(_)) { + walk_expr(self, ctxt, expr) } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 91c0afe914..6b29690023 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -153,13 +153,10 @@ impl NameResBucket { self.bucket.retain(|d, _| *d == domain); } - pub(super) fn merge<'a>(&mut self, bucket: &NameResBucket) { + pub(super) fn merge(&mut self, bucket: &NameResBucket) { for (domain, err) in bucket.errors() { - match self.pick(domain) { - Err(NameResolutionError::NotFound) => { - self.bucket.insert(domain, Err(err.clone())); - } - _ => {} + if let Err(NameResolutionError::NotFound) = self.pick(domain) { + self.bucket.insert(domain, Err(err.clone())); } } for res in bucket.iter() { diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs index f41304d256..2ceb9c02fc 100644 --- a/crates/hir/src/hir_def/body.rs +++ b/crates/hir/src/hir_def/body.rs @@ -55,7 +55,19 @@ impl Body { } #[doc(hidden)] - /// Returns the BFS order of the blocks in the body. + /// Returns the order of the blocks in the body in lexical order. + /// e.g., + /// ```fe + /// fn foo() { // 0 + /// ... + /// { // 1 + /// ... + /// { // 2 + /// ... + /// } + /// } + /// } + /// /// /// Currently, this is only used for testing. /// When it turns out to be generally useful, we need to consider to let diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 3b5f2419ed..b96c131814 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -306,12 +306,11 @@ impl ScopeId { let name = match self { ScopeId::Block(body, expr) => format!("{{block{}}}", body.block_order(db)[&expr]), ScopeId::Item(ItemKind::Body(body)) => match body.body_kind(db) { - BodyKind::FuncBody => "{fn_body}".to_string().into(), - BodyKind::Anonymous => "{anonymous_body}".to_string().into(), + BodyKind::FuncBody => "{fn_body}".to_string(), + BodyKind::Anonymous => "{anonymous_body}".to_string(), }, _ => self.name(db)?.data(db).clone(), }; - dbg!(&name); if let Some(parent) = self.parent(db) { let parent_path = parent.pretty_path(db)?; From 5d594afb1ed6bb6bb80b6e6c66cfa80c253520db Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 22:31:39 +0200 Subject: [PATCH 221/678] Add simple driver for testing purpose --- crates/driver2/src/main.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 crates/driver2/src/main.rs diff --git a/crates/driver2/src/main.rs b/crates/driver2/src/main.rs new file mode 100644 index 0000000000..c764cb6fc6 --- /dev/null +++ b/crates/driver2/src/main.rs @@ -0,0 +1,10 @@ +use fe_driver2::DriverDataBase; + +pub fn main() { + let arg = std::env::args().nth(1).unwrap(); + + let mut db = DriverDataBase::default(); + let source = std::fs::read_to_string(&arg).unwrap(); + db.run_on_file(std::path::Path::new(&arg), &source); + db.emit_diags(); +} From d0bc9bb82e9e666af20fcfbf9dcf0e076222b287 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 14 Jul 2023 22:40:25 +0200 Subject: [PATCH 222/678] Remove unnecessary `parent_scope` field from `Scope` --- crates/hir/src/hir_def/scope_graph.rs | 17 ++++++++--------- crates/hir/src/lower/scope_builder.rs | 17 +++++------------ 2 files changed, 13 insertions(+), 21 deletions(-) diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index b96c131814..84fce2c28d 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -256,12 +256,13 @@ impl ScopeId { } pub fn parent_item(self, db: &dyn HirDb) -> Option { - let data = self.data(db); - match data.id { - ScopeId::Item(item) => Some(item), - _ => { - let parent = data.parent_scope?; - parent.parent_item(db) + let mut parent = self.parent(db)?; + loop { + match parent { + ScopeId::Item(item) => return Some(item), + _ => { + parent = parent.parent(db)?; + } } } } @@ -355,16 +356,14 @@ impl<'a> std::iter::Iterator for ScopeGraphItemIterDfs<'a> { pub struct Scope { pub id: ScopeId, pub edges: BTreeSet, - pub parent_scope: Option, pub vis: Visibility, } impl Scope { - pub fn new(kind: ScopeId, parent_scope: Option, vis: Visibility) -> Self { + pub fn new(kind: ScopeId, vis: Visibility) -> Self { Self { id: kind, edges: Default::default(), - parent_scope, vis, } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index c687dc258c..c0525d0f20 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -297,11 +297,9 @@ impl<'db> ScopeGraphBuilder<'db> { parent_item: ItemKind, fields: FieldDefListId, ) { - let parent_scope = ScopeId::Item(parent_item); - for (i, field) in fields.data(self.db).iter().enumerate() { let scope_id = ScopeId::Field(parent_item, i); - let scope_data = Scope::new(scope_id, Some(parent_scope), field.vis); + let scope_data = Scope::new(scope_id, field.vis); let field_node = self.graph.push(scope_id, scope_data); self.graph.add_lex_edge(field_node, parent_node); @@ -320,12 +318,11 @@ impl<'db> ScopeGraphBuilder<'db> { parent_item: ItemKind, variants: VariantDefListId, ) { - let parent_scope = ScopeId::Item(parent_item); let parent_vis = parent_item.vis(self.db); for (i, field) in variants.data(self.db).iter().enumerate() { let scope_id = ScopeId::Variant(parent_item, i); - let scope_data = Scope::new(scope_id, Some(parent_scope), parent_vis); + let scope_data = Scope::new(scope_id, parent_vis); let variant_node = self.graph.push(scope_id, scope_data); self.graph.add_lex_edge(variant_node, parent_node); @@ -344,11 +341,9 @@ impl<'db> ScopeGraphBuilder<'db> { parent_item: ItemKind, params: FuncParamListId, ) { - let parent_scope = ScopeId::Item(parent_item); - for (i, param) in params.data(self.db).iter().enumerate() { let scope_id = ScopeId::FuncParam(parent_item, i); - let scope = Scope::new(scope_id, Some(parent_scope), Visibility::Private); + let scope = Scope::new(scope_id, Visibility::Private); let func_param_node = self.graph.push(scope_id, scope); self.graph.add_lex_edge(func_param_node, parent_node); @@ -370,11 +365,9 @@ impl<'db> ScopeGraphBuilder<'db> { parent_item: ItemKind, params: GenericParamListId, ) { - let parent_scope = ScopeId::Item(parent_item); - for (i, param) in params.data(self.db).iter().enumerate() { let scope_id = ScopeId::GenericParam(parent_item, i); - let scope = Scope::new(scope_id, Some(parent_scope), Visibility::Private); + let scope = Scope::new(scope_id, Visibility::Private); let generic_param_node = self.graph.push(scope_id, scope); self.graph.add_lex_edge(generic_param_node, parent_node); @@ -389,7 +382,7 @@ impl<'db> ScopeGraphBuilder<'db> { fn dummy_scope(&self) -> (ScopeId, Scope) { let scope_id = ScopeId::Item(self.top_mod.into()); - (scope_id, Scope::new(scope_id, None, Visibility::Public)) + (scope_id, Scope::new(scope_id, Visibility::Public)) } } From 1890c48f5ce83ae7fa7075eee6d2d0f534fbfc62 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 16 Jul 2023 00:18:09 +0200 Subject: [PATCH 223/678] Bumpup v2 crates versions to 0.23.0 --- Cargo.lock | 14 +++++++------- crates/common2/Cargo.toml | 2 +- crates/driver2/Cargo.toml | 2 +- crates/hir-analysis/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/macros/Cargo.toml | 2 +- crates/parser2/Cargo.toml | 2 +- crates/uitest/Cargo.toml | 2 +- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index df3384dc5a..5477ca2880 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -559,7 +559,7 @@ checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" [[package]] name = "driver2" -version = "0.20.0-alpha" +version = "0.23.0" dependencies = [ "dir-test", "fe-compiler-test-utils", @@ -828,7 +828,7 @@ dependencies = [ [[package]] name = "fe-common2" -version = "0.20.0-alpha" +version = "0.23.0" dependencies = [ "camino", "fe-parser2", @@ -916,7 +916,7 @@ dependencies = [ [[package]] name = "fe-driver2" -version = "0.20.0-alpha" +version = "0.23.0" dependencies = [ "camino", "codespan-reporting", @@ -929,7 +929,7 @@ dependencies = [ [[package]] name = "fe-hir" -version = "0.22.0" +version = "0.23.0" dependencies = [ "camino", "cranelift-entity", @@ -947,7 +947,7 @@ dependencies = [ [[package]] name = "fe-hir-analysis" -version = "0.20.0-alpha" +version = "0.23.0" dependencies = [ "codespan-reporting", "derive_more", @@ -972,7 +972,7 @@ dependencies = [ [[package]] name = "fe-macros" -version = "0.1.1" +version = "0.23.0" dependencies = [ "glob", "proc-macro2", @@ -1023,7 +1023,7 @@ dependencies = [ [[package]] name = "fe-parser2" -version = "0.22.0" +version = "0.23.0" dependencies = [ "derive_more", "dir-test", diff --git a/crates/common2/Cargo.toml b/crates/common2/Cargo.toml index 03b0f333db..7dd6492f04 100644 --- a/crates/common2/Cargo.toml +++ b/crates/common2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-common2" -version = "0.20.0-alpha" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml index e7c87c4c9f..d1d1d04f9c 100644 --- a/crates/driver2/Cargo.toml +++ b/crates/driver2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-driver2" -version = "0.20.0-alpha" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index b13801c9fa..96d1144ff2 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-hir-analysis" -version = "0.20.0-alpha" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index e714c6129b..cbc2f16bbb 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-hir" -version = "0.22.0" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/macros/Cargo.toml b/crates/macros/Cargo.toml index 820f28f5e0..91edf840ab 100644 --- a/crates/macros/Cargo.toml +++ b/crates/macros/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "fe-macros" authors = ["The Fe Project Developers"] -version = "0.1.1" +version = "0.23.0" edition = "2021" license = "Apache-2.0" repository = "https://github.com/ethereum/fe" diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml index 45a51c124c..3b1581bd91 100644 --- a/crates/parser2/Cargo.toml +++ b/crates/parser2/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-parser2" -version = "0.22.0" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" diff --git a/crates/uitest/Cargo.toml b/crates/uitest/Cargo.toml index b23c5b1d2b..08323f4203 100644 --- a/crates/uitest/Cargo.toml +++ b/crates/uitest/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "driver2" -version = "0.20.0-alpha" +version = "0.23.0" authors = ["The Fe Developers "] edition = "2021" license = "Apache-2.0" From f42dafadf7f98fa2689cbff26dc9f964ebbb1dc6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 16 Jul 2023 17:00:40 +0200 Subject: [PATCH 224/678] Doc work for name resolution --- .../src/name_resolution/import_resolver.rs | 4 +- .../src/name_resolution/name_resolver.rs | 84 +++++++------ .../src/name_resolution/path_resolver.rs | 13 +++ crates/hir/src/hir_def/scope_graph.rs | 110 ++++++++++++------ crates/hir/src/lower/mod.rs | 4 +- crates/hir/src/lower/scope_builder.rs | 18 ++- 6 files changed, 154 insertions(+), 79 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index b843744c6c..89d0d3b0a4 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -347,9 +347,7 @@ impl<'db> ImportResolver<'db> { return true; }; - if !res.is_importable() { - false - } else if res.is_visible(self.db, i_use.original_scope) { + if res.is_visible(self.db, i_use.original_scope) { true } else { if let Some(span) = res.derived_from(self.db) { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 6b29690023..7889ac7393 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -111,7 +111,7 @@ impl Default for QueryDirective { } /// The struct contains the lookup result of a name query. -/// The results can contain more than one name resolution which belong to +/// The results can contain more than one name resolutions which belong to /// different name domains. #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct NameResBucket { @@ -242,41 +242,20 @@ impl From for NameResBucket { } } +/// The struct contains the lookup result of a name query. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct NameRes { + /// The kind of the resolution. pub kind: NameResKind, + /// The domain of the name resolution. pub domain: NameDomain, + /// Where the resolution is derived from. (e.g, via `use` or item definition + /// in the same scope). pub derivation: NameDerivation, } impl NameRes { - pub fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { - match self.kind { - NameResKind::Prim(_) => true, - NameResKind::Scope(scope) => scope.is_type(db.as_hir_db()), - } - } - - pub fn is_trait(&self, db: &dyn HirAnalysisDb) -> bool { - match self.kind { - NameResKind::Prim(_) => false, - NameResKind::Scope(scope) => scope.is_trait(db.as_hir_db()), - } - } - - pub fn is_value(&self, db: &dyn HirAnalysisDb) -> bool { - !self.is_type(db) && !self.is_trait(db) - } - - /// Returns the scope of the name resolution if the name is not a builtin - /// type. - pub fn scope(&self) -> Option { - match self.kind { - NameResKind::Scope(scope) => Some(scope), - NameResKind::Prim(_) => None, - } - } - + /// Returns `true` if the name is visible from the given `scope`. pub fn is_visible(&self, db: &dyn HirAnalysisDb, from: ScopeId) -> bool { let scope_or_use = match self.derivation { NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { @@ -308,6 +287,35 @@ impl NameRes { } } + /// Returns `true` if the resolution is a type. + pub(crate) fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + NameResKind::Prim(_) => true, + NameResKind::Scope(scope) => scope.is_type(db.as_hir_db()), + } + } + + /// Returns `true` if the resolution is a trait. + pub(crate) fn is_trait(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => scope.is_trait(db.as_hir_db()), + } + } + + pub(crate) fn is_value(&self, db: &dyn HirAnalysisDb) -> bool { + !self.is_type(db) && !self.is_trait(db) + } + + /// Returns the scope of the name resolution if the name is not a builtin + /// type. + pub fn scope(&self) -> Option { + match self.kind { + NameResKind::Scope(scope) => Some(scope), + NameResKind::Prim(_) => None, + } + } + pub fn pretty_path(&self, db: &dyn HirAnalysisDb) -> Option { match self.kind { NameResKind::Scope(scope) => scope.pretty_path(db.as_hir_db()), @@ -355,13 +363,6 @@ impl NameRes { } } - pub(super) fn is_importable(&self) -> bool { - match self.kind { - NameResKind::Scope(scope) => scope.is_importable(), - NameResKind::Prim(_) => true, - } - } - fn new_prim(prim: PrimTy) -> Self { Self { kind: prim.into(), @@ -373,7 +374,9 @@ impl NameRes { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, derive_more::From)] pub enum NameResKind { + /// The name is resolved to a scope. Scope(ScopeId), + /// The name is resolved to a primitive type. Prim(PrimTy), } @@ -565,7 +568,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // We don't care about the result of `push` because we assume ingots are // guaranteed to be unique. bucket.push(&NameRes::new_from_scope( - ScopeId::root(*root_mod), + ScopeId::from_item((*root_mod).into()), NameDomain::Type, NameDerivation::External, )) @@ -782,8 +785,8 @@ impl ResolvedQueryCacheStore { /// The multiple same names can be introduced in a same scope as long as they /// are in different domains. /// -/// E.g., A `Foo` can be introduced in a same scope as a type and variant at the -/// same time. This means the code below is valid. +/// E.g., A `Foo` in the below example can be introduced in the same scope as a +/// type and variant at the same time. /// ```fe /// struct Foo {} /// enum MyEnum { @@ -817,6 +820,7 @@ impl NameDomain { } } +/// The propagator controls how the name query is propagated to the next scope. trait QueryPropagator { fn propagate(self, query: &NameQuery) -> PropagationResult; fn propagate_glob(self) -> PropagationResult; @@ -824,8 +828,12 @@ trait QueryPropagator { #[derive(Debug, Clone, PartialEq, Eq, Hash)] enum PropagationResult { + /// The query is resolved to the next scope(edge's destination). Terminated, + /// The query resolution should be continued, i.e., the query is propagated + /// to the next scope and the next scope should be searched for the query. Continuation, + /// The query can't be propagated to the next scope. UnPropagated, } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index 98f6efda24..fe3420a8d2 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -10,6 +10,13 @@ use super::{ NameDomain, NameQuery, }; +/// The result of early path resolution. +/// There are two kinds of early resolution results: +/// 1. Fully resolved path, which is a path that is fully resolved to concrete +/// items. +/// 2. Partially resolved path. This happens when the path is partially resolved +/// to a type, and the rest of the path depends on the type to resolve. +/// Type/Trait context is needed to resolve the rest of the path. #[derive(Debug, Clone, PartialEq, Eq)] pub enum EarlyResolvedPath { Full(NameResBucket), @@ -164,6 +171,7 @@ impl<'a> IntermediatePath<'a> { } } + /// Make a `NameQuery` to resolve the current segment. fn make_query(&self, db: &dyn HirAnalysisDb) -> PathResolutionResult { debug_assert!(self.state(db) != IntermediatePathState::TypeDependent); let Partial::Present(name) = self.path[self.idx] else { @@ -189,6 +197,7 @@ impl<'a> IntermediatePath<'a> { Ok(NameQuery::with_directive(name, scope, directive)) } + /// Finalizes the `IntermediatePath` as a `EarlyResolvedPath::Partial`. fn finalize_as_partial(self) -> EarlyResolvedPathWithTrajectory { let resolved = EarlyResolvedPath::Partial { res: self.current_res.clone(), @@ -205,6 +214,7 @@ impl<'a> IntermediatePath<'a> { } } + /// Finalizes the `IntermediatePath` as a `EarlyResolvedPath::Full`. fn finalize_as_full(mut self, bucket: NameResBucket) -> EarlyResolvedPathWithTrajectory { let resolved = EarlyResolvedPath::Full(bucket); let mut trajectory = self.trajectory; @@ -217,6 +227,9 @@ impl<'a> IntermediatePath<'a> { } } + /// Proceeds to the next segment with the given `bucket`. + /// If the `bucket` doesn't contain proper resolution, then an error is + /// returned. fn proceed(&mut self, bucket: NameResBucket) -> PathResolutionResult<()> { let next_res = bucket .pick(NameDomain::Type) diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 84fce2c28d..096bab134a 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,3 +1,4 @@ +/// use std::collections::BTreeSet; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,14 +14,20 @@ use super::{ Visibility, }; +/// Represents a scope relation graph in a top-level module. #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeGraph { + /// The top-level module containing the scope graph. pub top_mod: TopLevelMod, + /// /// + /// The scopes in the graph. pub scopes: FxHashMap, + /// The all unresolved uses in the graph, this is used in name resolution. pub unresolved_uses: FxHashSet, } impl ScopeGraph { + /// Represents all item scopes in a top-level module in depth-first order. pub fn items_dfs<'a>(&'a self, db: &'a dyn HirDb) -> impl Iterator + 'a { ScopeGraphItemIterDfs { db, @@ -30,11 +37,12 @@ impl ScopeGraph { } } - /// Returns the direct child items of the scope. + /// Returns the direct child items of the given `scope`. pub fn child_items(&self, scope: ScopeId) -> impl Iterator + '_ { self.children(scope).filter_map(|child| child.to_item()) } + /// Returns the direct child scopes of the given `scope` pub fn children(&self, scope: ScopeId) -> impl Iterator + '_ { self.edges(scope).filter_map(|edge| match edge.kind { EdgeKind::Lex(_) @@ -47,6 +55,7 @@ impl ScopeGraph { }) } + /// Returns the all edges outgoing from the given `scope`. pub fn edges(&self, scope: ScopeId) -> impl Iterator + '_ { self.scopes[&scope].edges.iter() } @@ -56,16 +65,29 @@ impl ScopeGraph { } } +/// An reference to a `[ScopeData]` in a `ScopeGraph`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ScopeId { + /// An item scope. Item(ItemKind), + + /// A generic parameter scope. GenericParam(ItemKind, usize), + + /// A function parameter scope. FuncParam(ItemKind, usize), + + /// A field scope. Field(ItemKind, usize), + + /// A variant scope. Variant(ItemKind, usize), + + /// A block scope. Block(Body, ExprId), } impl ScopeId { + /// Returns the top level module containing this scope. pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { match self { ScopeId::Item(item) => item.top_mod(db), @@ -77,21 +99,12 @@ impl ScopeId { } } - pub fn kind_name(&self) -> &'static str { - match self { - ScopeId::Item(item) => item.kind_name(), - ScopeId::GenericParam(_, _) => "type", - ScopeId::FuncParam(_, _) => "value", - ScopeId::Field(_, _) => "field", - ScopeId::Variant(_, _) => "value", - ScopeId::Block(_, _) => "block", - } - } - + /// Convert an item to a scope id. pub fn from_item(item: ItemKind) -> Self { Self::Item(item) } + /// Convert a scope id to an item if the scope is an item. pub fn to_item(self) -> Option { match self { ScopeId::Item(item) => Some(item), @@ -99,10 +112,6 @@ impl ScopeId { } } - pub fn root(top_mod: TopLevelMod) -> Self { - Self::Item(top_mod.into()) - } - /// Returns the nearest enclosing item. pub fn item(self) -> ItemKind { match self { @@ -115,13 +124,6 @@ impl ScopeId { } } - pub fn is_importable(self) -> bool { - !matches!( - self, - ScopeId::GenericParam(..) | ScopeId::FuncParam(..) | ScopeId::Field(..) - ) - } - /// Returns the scope graph containing this scope. pub fn scope_graph(self, db: &dyn HirDb) -> &ScopeGraph { self.top_mod(db).scope_graph(db) @@ -168,10 +170,15 @@ impl ScopeId { self.top_mod(db).ingot(db) } + /// Returns the `Scope` data for this scope. pub fn data(self, db: &dyn HirDb) -> &Scope { self.top_mod(db).scope_graph(db).scope_data(&self) } + /// Returns the parent scope of this scope. + /// The parent scope is + /// 1. the lexical parent if it exists + /// 2. the parent module if 1. does not exist pub fn parent(self, db: &dyn HirDb) -> Option { let mut super_dest = None; for edge in self.edges(db) { @@ -185,6 +192,7 @@ impl ScopeId { super_dest } + /// Returns the lexical parent scope of this scope. pub fn lex_parent(self, db: &dyn HirDb) -> Option { self.data(db) .edges @@ -193,6 +201,7 @@ impl ScopeId { .map(|e| e.dest) } + /// Returns the parent module of this scope. pub fn parent_module(self, db: &dyn HirDb) -> Option { let parent_item = self.parent_item(db)?; match parent_item { @@ -204,6 +213,7 @@ impl ScopeId { } } + /// Returns `true` if the scope is a type. pub fn is_type(self, db: &dyn HirDb) -> bool { match self.data(db).id { ScopeId::Item(item) => item.is_type(), @@ -212,6 +222,20 @@ impl ScopeId { } } + /// Returns the item that contains this scope. + pub fn parent_item(self, db: &dyn HirDb) -> Option { + let mut parent = self.parent(db)?; + loop { + match parent { + ScopeId::Item(item) => return Some(item), + _ => { + parent = parent.parent(db)?; + } + } + } + } + + /// Returns `true` if the scope is a trait definition. pub fn is_trait(self, db: &dyn HirDb) -> bool { match self.data(db).id { ScopeId::Item(item) => item.is_trait(), @@ -255,18 +279,6 @@ impl ScopeId { } } - pub fn parent_item(self, db: &dyn HirDb) -> Option { - let mut parent = self.parent(db)?; - loop { - match parent { - ScopeId::Item(item) => return Some(item), - _ => { - parent = parent.parent(db)?; - } - } - } - } - pub fn name_span(self, db: &dyn HirDb) -> Option { match self.data(db).id { ScopeId::Item(item) => item.name_span(), @@ -303,6 +315,16 @@ impl ScopeId { } } + pub fn kind_name(&self) -> &'static str { + match self { + ScopeId::Item(item) => item.kind_name(), + ScopeId::GenericParam(_, _) => "type", + ScopeId::FuncParam(_, _) => "value", + ScopeId::Field(_, _) => "field", + ScopeId::Variant(_, _) => "value", + ScopeId::Block(_, _) => "block", + } + } pub fn pretty_path(self, db: &dyn HirDb) -> Option { let name = match self { ScopeId::Block(body, expr) => format!("{{block{}}}", body.block_order(db)[&expr]), @@ -369,6 +391,10 @@ impl Scope { } } +/// An edge of the scope graph. +/// The edge contains the destination of the edge and the kind of the edge. +/// [`EdgeKind`] is contains supplementary information about the destination +/// scope, which is used for name resolution. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ScopeEdge { pub dest: ScopeId, @@ -377,18 +403,32 @@ pub struct ScopeEdge { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub enum EdgeKind { + /// An edge to a lexical parent scope. Lex(LexEdge), + /// An edge to a module. Mod(ModEdge), + /// An edge to a type. Type(TypeEdge), + /// An edge to a trait. Trait(TraitEdge), + /// An edge from a scope to a generic parameter. GenericParam(GenericParamEdge), + /// An edge to a value. The value is either a function or a + /// constant. Value(ValueEdge), + /// An edge to a field definition scope. Field(FieldEdge), + /// An edge to a enum variant definition scope. Variant(VariantEdge), + /// An edge to a module that is referenced by a `super` keyword. Super(SuperEdge), + /// An edge to an ingot that is referenced by a `ingot` keyword. Ingot(IngotEdge), + /// An edge to a scope that is referenced by a `self` keyword. Self_(SelfEdge), + /// An edge to a scope that is referenced by a `Self` keyword. SelfTy(SelfTyEdge), + /// An edge to an anonymous scope, e.g., `impl` or function body. Anon(AnonEdge), } diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs index b47de8e3cb..25a9b0227c 100644 --- a/crates/hir/src/lower/mod.rs +++ b/crates/hir/src/lower/mod.rs @@ -39,7 +39,7 @@ pub fn map_file_to_mod(db: &dyn LowerHirDb, file: InputFile) -> TopLevelMod { map_file_to_mod_impl(db.as_hir_db(), ingot, file) } -/// Returns the item tree of the given top-level module. +/// Returns the scope graph of the given top-level module. pub fn scope_graph(db: &dyn LowerHirDb, top_mod: TopLevelMod) -> &ScopeGraph { scope_graph_impl(db.as_hir_db(), top_mod) } @@ -76,7 +76,7 @@ pub(crate) fn top_mod_ast(db: &dyn HirDb, top_mod: TopLevelMod) -> ast::Root { ast::Root::cast(node).unwrap() } -pub struct FileLowerCtxt<'db> { +pub(super) struct FileLowerCtxt<'db> { builder: ScopeGraphBuilder<'db>, } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index c0525d0f20..6b36a027bf 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -10,6 +10,22 @@ use crate::{ HirDb, }; +/// An [`ScopeGraph`] builder that is used to construct the scope in the hir +/// lowering phase. +// +// The difficulty in constructing a scope graph lies in that the ScopeId must +// hold the corresponding HIR node to represent the scope. However, because HIR +// nodes tracked by salsa are immutable, it is only possible to create HIR nodes +// once the lowering of the item is completely finished. This means that a +// ScopeId can only be constructed after the completion of lowering, or at the +// end point of the scope. +// +// Therefore, the builder's `enter_*_scope` method group does not take any +// concrete item information as arguments. When the `enter_*_scope` method group +// is called, the builder constructs a dummy scope and sets up the relationship +// between this dummy scope and other scopes. Then, when the `leave_*_scope` +// method group is called, the builder substitutes the dummy scope with the real +// scope while keeping the relationship between scopes intact. pub(super) struct ScopeGraphBuilder<'db> { pub(super) db: &'db dyn HirDb, pub(super) top_mod: TopLevelMod, @@ -273,7 +289,7 @@ impl<'db> ScopeGraphBuilder<'db> { } fn enter_scope_impl(&mut self, is_mod: bool) -> NodeId { - // Create dummy scope, the scope kind is initialized in `leave_scope`. + // Create dummy scope, the scope kind is initialized when leaving the scope. let (dummy_scope_id, dummy_scope) = self.dummy_scope(); let id = self.graph.push(dummy_scope_id, dummy_scope); self.scope_stack.push(id); From a7208ec2495f3ee3920fde3def901883266ebbcd Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 17 Jul 2023 00:44:34 +0200 Subject: [PATCH 225/678] Implement `ScopeGraphFormatter` to dump dot representation of `ScopeGraph` --- Cargo.lock | 266 ++++++++++++++++++++-- crates/driver2/Cargo.toml | 1 + crates/driver2/src/lib.rs | 31 ++- crates/driver2/src/main.rs | 37 ++- crates/hir/Cargo.toml | 1 + crates/hir/src/hir_def/item.rs | 16 +- crates/hir/src/hir_def/mod.rs | 2 + crates/hir/src/hir_def/scope_graph.rs | 17 +- crates/hir/src/hir_def/scope_graph_viz.rs | 221 ++++++++++++++++++ crates/hir/src/hir_def/use_tree.rs | 18 ++ crates/mir/Cargo.toml | 8 +- crates/uitest/tests/name_resolution.rs | 6 +- 12 files changed, 576 insertions(+), 48 deletions(-) create mode 100644 crates/hir/src/hir_def/scope_graph_viz.rs diff --git a/Cargo.lock b/Cargo.lock index 5477ca2880..fa17d3bb37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,6 +33,55 @@ dependencies = [ "memchr", ] +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + [[package]] name = "arc-swap" version = "1.6.0" @@ -195,8 +244,8 @@ checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ "atty", "bitflags", - "clap_derive", - "clap_lex", + "clap_derive 3.2.18", + "clap_lex 0.2.4", "indexmap", "once_cell", "strsim", @@ -204,6 +253,29 @@ dependencies = [ "textwrap 0.16.0", ] +[[package]] +name = "clap" +version = "4.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eab9e8ceb9afdade1ab3f0fd8dbce5b1b2f468ad653baf10e771781b2b67b73" +dependencies = [ + "clap_builder", + "clap_derive 4.3.12", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f2763db829349bf00cfc06251268865ed4363b93a943174f638daf3ecdba2cd" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.5.0", + "strsim", +] + [[package]] name = "clap_derive" version = "3.2.18" @@ -217,6 +289,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "clap_derive" +version = "4.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.15", +] + [[package]] name = "clap_lex" version = "0.2.4" @@ -226,6 +310,12 @@ dependencies = [ "os_str_bytes", ] +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + [[package]] name = "cloudabi" version = "0.1.0" @@ -254,6 +344,12 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + [[package]] name = "colored" version = "2.0.0" @@ -553,9 +649,9 @@ dependencies = [ [[package]] name = "dot2" -version = "0.1.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" +checksum = "855423f2158bcc73798b3b9a666ec4204597a72370dc91dbdb8e7f9519de8cc3" [[package]] name = "driver2" @@ -615,6 +711,27 @@ dependencies = [ "syn 2.0.15", ] +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "ethabi" version = "18.0.0" @@ -919,6 +1036,7 @@ name = "fe-driver2" version = "0.23.0" dependencies = [ "camino", + "clap 4.3.12", "codespan-reporting", "fe-common2", "fe-hir", @@ -934,6 +1052,7 @@ dependencies = [ "camino", "cranelift-entity", "derive_more", + "dot2", "fe-common2", "fe-macros", "fe-parser2", @@ -1241,6 +1360,12 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + [[package]] name = "hex" version = "0.4.3" @@ -1368,6 +1493,29 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.2", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "is-terminal" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +dependencies = [ + "hermit-abi 0.3.2", + "io-lifetimes", + "rustix", + "windows-sys 0.48.0", +] + [[package]] name = "itertools" version = "0.10.5" @@ -1440,6 +1588,12 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + [[package]] name = "lock_api" version = "0.4.9" @@ -1683,7 +1837,7 @@ dependencies = [ "libc", "redox_syscall 0.2.16", "smallvec", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2132,6 +2286,20 @@ dependencies = [ "semver 1.0.17", ] +[[package]] +name = "rustix" +version = "0.37.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + [[package]] name = "rustversion" version = "1.0.12" @@ -2651,6 +2819,12 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + [[package]] name = "vec1" version = "1.10.1" @@ -2828,7 +3002,16 @@ version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ - "windows-targets", + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.1", ] [[package]] @@ -2837,13 +3020,28 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] [[package]] @@ -2852,42 +3050,84 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + [[package]] name = "windows_i686_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + [[package]] name = "windows_i686_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + [[package]] name = "winnow" version = "0.4.1" diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml index d1d1d04f9c..9d4e155a91 100644 --- a/crates/driver2/Cargo.toml +++ b/crates/driver2/Cargo.toml @@ -18,3 +18,4 @@ common = { path = "../common2", package = "fe-common2" } macros = { path = "../macros", package = "fe-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" +clap = { version = "4.3", features = ["derive"] } diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index dd06721bdd..82cc962a53 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -12,8 +12,8 @@ use common::{ InputDb, InputFile, InputIngot, }; use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, lower::map_file_to_mod, - HirDb, LowerHirDb, ParsingPass, SpannedHirDb, + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, + lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, @@ -43,21 +43,24 @@ pub struct DriverDataBase { impl DriverDataBase { // TODO: An temporary implementation for ui testing. - pub fn run_on_file(&mut self, file_path: &path::Path, source: &str) { - self.run_on_file_with_pass_manager(file_path, source, initialize_analysis_pass); + pub fn run_on_top_mod(&mut self, top_mod: TopLevelMod) { + self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass); } - pub fn run_on_file_with_pass_manager( - &mut self, - file_path: &path::Path, - source: &str, - pm_builder: F, - ) where + pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) + where F: FnOnce(&DriverDataBase) -> AnalysisPassManager<'_>, { self.diags.clear(); + self.diags = { + let mut pass_manager = pm_builder(self); + pass_manager.run_on_module(top_mod) + }; + } + pub fn top_mod_from_file(&mut self, file_path: &path::Path, source: &str) -> TopLevelMod { let kind = IngotKind::StandAlone; + // We set the ingot version to 0.0.0 for stand-alone file. let version = Version::new(0, 0, 0); let root_file = file_path; @@ -71,16 +74,10 @@ impl DriverDataBase { let file_name = root_file.file_name().unwrap().to_str().unwrap(); let file = InputFile::new(self, ingot, file_name.into(), source.to_string()); - ingot.set_root_file(self, file); ingot.set_files(self, [file].into()); - let top_mod = map_file_to_mod(self, file); - - self.diags = { - let mut pass_manager = pm_builder(self); - pass_manager.run_on_module(top_mod) - }; + map_file_to_mod(self, file) } /// Prints accumulated diagnostics to stderr. diff --git a/crates/driver2/src/main.rs b/crates/driver2/src/main.rs index c764cb6fc6..0533efd52b 100644 --- a/crates/driver2/src/main.rs +++ b/crates/driver2/src/main.rs @@ -1,10 +1,41 @@ use fe_driver2::DriverDataBase; +use clap::Parser; +use hir::hir_def::TopLevelMod; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// The file to compile. + #[arg()] + file_path: String, + + /// Dump a graphviz dot file of the scope graph for the given file. + #[arg(long = "dump-scope-graph", default_value_t = false)] + dump_scope_graph: bool, +} + pub fn main() { - let arg = std::env::args().nth(1).unwrap(); + let args = Args::parse(); + let path = std::path::Path::new(&args.file_path); + if !path.exists() { + eprintln!("file '{}' does not exist", args.file_path); + std::process::exit(2); + } + let source = std::fs::read_to_string(&args.file_path).unwrap(); let mut db = DriverDataBase::default(); - let source = std::fs::read_to_string(&arg).unwrap(); - db.run_on_file(std::path::Path::new(&arg), &source); + let top_mod = db.top_mod_from_file(path, &source); + db.run_on_top_mod(top_mod); db.emit_diags(); + + if args.dump_scope_graph { + println!("{}", dump_scope_graph(&db, top_mod)); + } +} + +fn dump_scope_graph(db: &DriverDataBase, top_mod: TopLevelMod) -> String { + let mut s = vec![]; + top_mod.scope_graph(db).write_as_dot(db, &mut s).unwrap(); + String::from_utf8(s).unwrap() } diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index cbc2f16bbb..6906d217ec 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -19,6 +19,7 @@ camino = "1.1.4" rustc-hash = "1.1.0" smallvec = "1.10.0" paste = "1.0" +dot2 = "1.0" common = { path = "../common2", package = "fe-common2" } parser = { path = "../parser2", package = "fe-parser2" } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 26628aafd0..05c0370eb4 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -85,19 +85,19 @@ impl ItemKind { pub fn kind_name(self) -> &'static str { use ItemKind::*; match self { - TopMod(_) => "module", - Mod(_) => "module", - Func(_) => "function", + TopMod(_) => "mod", + Mod(_) => "mod", + Func(_) => "fn", Struct(_) => "struct", Contract(_) => "contract", Enum(_) => "enum", - TypeAlias(_) => "type alias", + TypeAlias(_) => "type", Trait(_) => "trait", Impl(_) => "impl", ImplTrait(_) => "impl trait", Const(_) => "const", Use(_) => "use", - Body(_) => "expression body", + Body(_) => "body", } } @@ -640,6 +640,12 @@ impl Use { false } } + + pub(crate) fn pretty_path(&self, db: &dyn HirDb) -> String { + self.path(db) + .to_opt() + .map_or_else(|| "{invalid}".to_string(), |path| path.pretty_path(db)) + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 9cd42e3f0c..633f8dbb9b 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -12,6 +12,8 @@ pub mod stmt; pub mod types; pub mod use_tree; +mod scope_graph_viz; + pub(crate) mod module_tree; pub use attr::*; diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 096bab134a..c8cd27f178 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -1,5 +1,4 @@ -/// -use std::collections::BTreeSet; +use std::{collections::BTreeSet, io}; use rustc_hash::{FxHashMap, FxHashSet}; @@ -10,8 +9,8 @@ use crate::{ }; use super::{ - Body, Enum, ExprId, Func, FuncParamLabel, IdentId, IngotId, ItemKind, TopLevelMod, Use, - Visibility, + scope_graph_viz::ScopeGraphFormatter, Body, Enum, ExprId, Func, FuncParamLabel, IdentId, + IngotId, ItemKind, TopLevelMod, Use, Visibility, }; /// Represents a scope relation graph in a top-level module. @@ -60,6 +59,11 @@ impl ScopeGraph { self.scopes[&scope].edges.iter() } + /// Write a scope graph as a dot file format to given `w`. + pub fn write_as_dot(&self, db: &dyn HirDb, w: &mut impl io::Write) -> io::Result<()> { + ScopeGraphFormatter::new(db, self).render(w) + } + pub fn scope_data(&self, scope: &ScopeId) -> &Scope { &self.scopes[scope] } @@ -401,6 +405,11 @@ pub struct ScopeEdge { pub kind: EdgeKind, } +/// A specific edge property definitions. +/// +/// NOTE: The internal types of each variants contains very small amount of +/// information, the reason why we need to prepare each internal types is to +/// allow us to implement traits to each edges directly. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] pub enum EdgeKind { /// An edge to a lexical parent scope. diff --git a/crates/hir/src/hir_def/scope_graph_viz.rs b/crates/hir/src/hir_def/scope_graph_viz.rs new file mode 100644 index 0000000000..62b777ffd6 --- /dev/null +++ b/crates/hir/src/hir_def/scope_graph_viz.rs @@ -0,0 +1,221 @@ +use std::{ + collections::{hash_map::Entry, VecDeque}, + io, +}; + +use cranelift_entity::{entity_impl, PrimaryMap}; +use dot2::label::Text; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{hir_def::ItemKind, HirDb}; + +use super::scope_graph::{EdgeKind, ScopeGraph, ScopeId}; + +type NodeId = usize; + +pub(super) struct ScopeGraphFormatter<'db> { + db: &'db dyn HirDb, + edges: PrimaryMap, + nodes: Vec, +} + +impl<'db> ScopeGraphFormatter<'db> { + fn build_formatter(&mut self, s_graph: &ScopeGraph) { + let mut visited = FxHashSet::default(); + let mut nodes_map = FxHashMap::default(); + + let mut worklist = VecDeque::new(); + let root = s_graph.top_mod.scope(); + worklist.push_back(root); + while let Some(scope) = worklist.pop_front() { + if !visited.insert(scope) { + continue; + } + let source = self.node_id(scope, &mut nodes_map); + + for edge in s_graph.edges(scope) { + let target = self.node_id(edge.dest, &mut nodes_map); + + self.edges.push(Edge { + kind: edge.kind, + target, + source, + }); + + if !visited.contains(&edge.dest) { + worklist.push_back(edge.dest); + } + } + } + } + + fn node_id(&mut self, scope: ScopeId, nodes_map: &mut FxHashMap) -> NodeId { + match nodes_map.entry(scope) { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => { + let id = self.nodes.len(); + self.nodes.push(scope); + entry.insert(id); + id + } + } + } +} + +impl<'db> ScopeGraphFormatter<'db> { + pub(super) fn new(db: &'db dyn HirDb, s_graph: &ScopeGraph) -> Self { + let nodes = Vec::new(); + let edges = PrimaryMap::new(); + let mut formatter = Self { db, edges, nodes }; + + formatter.build_formatter(s_graph); + formatter + } + + pub(super) fn render(&self, w: &mut impl io::Write) -> io::Result<()> { + dot2::render(self, w).map_err(|err| match err { + dot2::Error::Io(err) => err, + dot2::Error::InvalidId => unreachable!(), + }) + } +} + +impl<'db, 'a> dot2::Labeller<'a> for ScopeGraphFormatter<'db> { + type Node = NodeId; + type Edge = EdgeId; + type Subgraph = (); + + fn graph_id(&'a self) -> dot2::Result> { + dot2::Id::new("example1") + } + + fn node_id(&'a self, n: &Self::Node) -> dot2::Result> { + dot2::Id::new(format!("N{n}")) + } + + fn node_label<'b>(&'a self, node: &Self::Node) -> dot2::Result> { + let label = match &self.nodes[*node] { + ScopeId::Item(item) => { + let item_name = match item { + ItemKind::Use(use_) => use_.pretty_path(self.db), + _ => item + .name(self.db) + .map_or(" ", |name| name.data(self.db)) + .to_string(), + }; + + format!( + r#" {kind_name} {item_name} "#, + kw_color = "#7B2D80", + kind_name = item.kind_name(), + item_color = "#1B458D", + ) + } + + ScopeId::Block(body, expr) => { + let idx = body.block_order(self.db)[expr]; + format!( + r#" {{block{block_number}}} "#, + block_color = "#383A42", + block_number = idx + ) + } + + scope => { + format!( + r#" {name} "#, + param_color = "#3A793A", + name = scope + .name(self.db) + .map_or(String::new(), |name| name.data(self.db).to_string()), + ) + } + }; + Ok(Text::HtmlStr(label.into())) + } + + fn edge_label(&'a self, e: &Self::Edge) -> Text<'a> { + let edge = &self.edges[*e]; + + let label = match edge.kind { + EdgeKind::Lex(_) => "lex", + EdgeKind::Mod(_) => "mod", + EdgeKind::Type(_) => "type", + EdgeKind::Trait(_) => "trait", + EdgeKind::GenericParam(_) => "generic_param", + EdgeKind::Value(_) => "value", + EdgeKind::Field(_) => "field", + EdgeKind::Variant(_) => "variant", + EdgeKind::Super(_) => "super", + EdgeKind::Ingot(_) => "ingot", + EdgeKind::Self_(_) => "self", + EdgeKind::SelfTy(_) => "self_ty", + EdgeKind::Anon(_) => "anon", + }; + let color = edge.color(); + let colored_label = format!(r#" {} "#, color, label); + Text::HtmlStr(colored_label.into()) + } + + fn edge_color(&'a self, e: &Self::Edge) -> Option> { + let edge = &self.edges[*e]; + Some(Text::LabelStr(edge.color().into())) + } + + fn node_shape(&self, _n: &Self::Node) -> Option> { + Some(Text::LabelStr("box".into())) + } +} + +impl<'db, 'a> dot2::GraphWalk<'a> for ScopeGraphFormatter<'db> { + type Node = NodeId; + type Edge = EdgeId; + type Subgraph = (); + + fn nodes(&'a self) -> dot2::Nodes<'a, Self::Node> { + (0..self.nodes.len()).collect() + } + + fn edges(&'a self) -> dot2::Edges<'a, Self::Edge> { + self.edges.keys().collect() + } + + fn source(&self, e: &Self::Edge) -> Self::Node { + self.edges[*e].source + } + + fn target(&self, e: &Self::Edge) -> Self::Node { + self.edges[*e].target + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(super) struct EdgeId(u32); +entity_impl!(EdgeId); + +#[derive(Debug)] +struct Edge { + kind: EdgeKind, + target: NodeId, + source: NodeId, +} + +impl Edge { + fn color(&self) -> &'static str { + match self.kind { + EdgeKind::Lex(_) => "#F94144", + EdgeKind::Mod(_) => "#F3722C", + EdgeKind::Type(_) => "#F8961E", + EdgeKind::Trait(_) => "#F9C74F", + EdgeKind::GenericParam(_) => "#90BE6D", + EdgeKind::Value(_) => "#43AA8B", + EdgeKind::Field(_) => "#577590", + EdgeKind::Variant(_) => "#6D597A", + EdgeKind::Super(_) => "#B56576", + EdgeKind::Ingot(_) => "#E56B6F", + EdgeKind::Self_(_) => "#FFBA49", + EdgeKind::SelfTy(_) => "#3A6351", + EdgeKind::Anon(_) => "#788475", + } + } +} diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs index 18e2e0453e..70bdfa8ef8 100644 --- a/crates/hir/src/hir_def/use_tree.rs +++ b/crates/hir/src/hir_def/use_tree.rs @@ -26,6 +26,24 @@ impl UsePathId { pub fn segment_len(&self, db: &dyn HirDb) -> usize { self.data(db).len() } + + pub fn pretty_path(&self, db: &dyn HirDb) -> String { + let mut path = String::new(); + + for (i, seg) in self.data(db).iter().enumerate() { + if i != 0 { + path.push_str("::"); + } + match seg { + Partial::Absent => path.push_str("{invalid}"), + Partial::Present(seg) => match seg { + UsePathSegment::Ident(ident) => path.push_str(ident.data(db)), + UsePathSegment::Glob => path.push('*'), + }, + } + } + path + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/mir/Cargo.toml b/crates/mir/Cargo.toml index 601549ff6c..7de3ba531e 100644 --- a/crates/mir/Cargo.toml +++ b/crates/mir/Cargo.toml @@ -7,9 +7,9 @@ license = "Apache-2.0" repository = "https://github.com/ethereum/fe" [dependencies] -fe-common = { path = "../common", version = "^0.23.0"} -fe-parser = { path = "../parser", version = "^0.23.0"} -fe-analyzer = { path = "../analyzer", version = "^0.23.0"} +fe-common = { path = "../common", version = "^0.23.0" } +fe-parser = { path = "../parser", version = "^0.23.0" } +fe-analyzer = { path = "../analyzer", version = "^0.23.0" } salsa = "0.16.1" smol_str = "0.1.21" num-bigint = "0.4.3" @@ -17,7 +17,7 @@ num-traits = "0.2.14" num-integer = "0.1.45" id-arena = "2.2.1" fxhash = "0.2.1" -dot2 = "0.1.0" +dot2 = "1.0.0" indexmap = "1.6.2" [dev-dependencies] diff --git a/crates/uitest/tests/name_resolution.rs b/crates/uitest/tests/name_resolution.rs index 19d1ff0dce..b4260da226 100644 --- a/crates/uitest/tests/name_resolution.rs +++ b/crates/uitest/tests/name_resolution.rs @@ -11,7 +11,8 @@ use fe_compiler_test_utils::snap_test; fn run_name_resolution(fixture: Fixture<&str>) { let mut driver = DriverDataBase::default(); let path = Path::new(fixture.path()); - driver.run_on_file(path, fixture.content()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_top_mod(top_mod); let diags = driver.format_diags(); snap_test!(diags, fixture.path()); } @@ -32,6 +33,7 @@ mod wasm { fn run_name_resolution(fixture: Fixture<&str>) { let mut driver = DriverDataBase::default(); let path = Path::new(fixture.path()); - driver.run_on_file(path, fixture.content()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_top_mod(top_mod); } } From 0081d897dcfa41334f549a7e2efbe6108406db97 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 17 Jul 2023 16:50:34 +0200 Subject: [PATCH 226/678] Add check for unimportable scopes --- .../src/name_resolution/import_resolver.rs | 3 +++ .../src/name_resolution/name_resolver.rs | 7 ++++++ .../name_resolution/import_unimpotable.fe | 11 +++++++++ .../name_resolution/import_unimpotable.snap | 24 +++++++++++++++++++ 4 files changed, 45 insertions(+) create mode 100644 crates/uitest/fixtures/name_resolution/import_unimpotable.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_unimpotable.snap diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 89d0d3b0a4..cb631d33c0 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -347,6 +347,9 @@ impl<'db> ImportResolver<'db> { return true; }; + if !res.is_importable() { + return false; + } if res.is_visible(self.db, i_use.original_scope) { true } else { diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 7889ac7393..ee98624c06 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -363,6 +363,13 @@ impl NameRes { } } + pub(super) fn is_importable(&self) -> bool { + match self.kind { + NameResKind::Scope(scope) => matches!(scope, ScopeId::Item(_) | ScopeId::Variant(..)), + NameResKind::Prim(_) => true, + } + } + fn new_prim(prim: PrimTy) -> Self { Self { kind: prim.into(), diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.fe b/crates/uitest/fixtures/name_resolution/import_unimpotable.fe new file mode 100644 index 0000000000..b842a96238 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.fe @@ -0,0 +1,11 @@ +use S::{t, T} +use E::T + + +struct S { + t: T +} + +enum E { + Bar(T) +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap new file mode 100644 index 0000000000..bf0f97e2ee --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_unimpotable.fe +--- +error[2-0002]: `t` is not found + ┌─ import_unimpotable.fe:1:9 + │ +1 │ use S::{t, T} + │ ^ `t` is not found + +error[2-0002]: `T` is not found + ┌─ import_unimpotable.fe:1:12 + │ +1 │ use S::{t, T} + │ ^ `T` is not found + +error[2-0002]: `T` is not found + ┌─ import_unimpotable.fe:2:8 + │ +2 │ use E::T + │ ^ `T` is not found + + From 469e836aba312b816fa8c55a89085099397fe89c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 18 Jul 2023 17:42:27 +0200 Subject: [PATCH 227/678] Improve error message when a middle segment of use path is resolved to inappropriate scope --- .../src/name_resolution/diagnostics.rs | 47 +++++++++++++- .../src/name_resolution/import_resolver.rs | 61 +++++++++++++------ .../hir-analysis/src/name_resolution/mod.rs | 19 ++++-- .../src/name_resolution/name_resolver.rs | 33 ++++++++-- .../src/name_resolution/path_resolver.rs | 2 +- crates/hir/src/hir_def/scope_graph.rs | 25 +++++--- .../name_resolution/import_unimpotable.snap | 28 +++++---- .../name_resolution/path_invalid_domain.snap | 8 +-- .../fixtures/name_resolution/path_shadow.snap | 2 +- 9 files changed, 166 insertions(+), 59 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 0e5d6051f4..beeb21270e 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -32,6 +32,10 @@ pub enum NameResDiag { /// The resolved name is ambiguous. Ambiguous(DynLazySpan, IdentId, Vec), + /// The name is found, but it can't be used as a middle segment of a use + /// path. + InvalidUsePathSegment(DynLazySpan, IdentId, Option), + /// The name is found but belongs to a different name domain other than the /// Type. ExpectedType(DynLazySpan, IdentId, NameRes), @@ -57,6 +61,7 @@ impl NameResDiag { Self::NotFound(span, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Invisible(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Ambiguous(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::InvalidUsePathSegment(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedType(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedTrait(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedValue(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), @@ -92,15 +97,26 @@ impl NameResDiag { Self::Ambiguous(span, ident, cands) } + pub(super) fn invalid_use_path_segment( + db: &dyn HirAnalysisDb, + span: DynLazySpan, + ident: IdentId, + found: NameRes, + ) -> Self { + let found = found.kind.name_span(db); + Self::InvalidUsePathSegment(span, ident, found) + } + fn local_code(&self) -> u16 { match self { Self::Conflict(..) => 1, Self::NotFound(..) => 2, Self::Invisible(..) => 3, Self::Ambiguous(..) => 4, - Self::ExpectedType(..) => 5, - Self::ExpectedTrait(..) => 6, - Self::ExpectedValue(..) => 7, + Self::InvalidUsePathSegment(..) => 5, + Self::ExpectedType(..) => 6, + Self::ExpectedTrait(..) => 7, + Self::ExpectedValue(..) => 8, } } @@ -118,6 +134,12 @@ impl NameResDiag { format!("`{}` is not visible", name.data(db),) } Self::Ambiguous(_, name, _) => format!("`{}` is ambiguous", name.data(db)), + Self::InvalidUsePathSegment(_, name, _) => { + format!( + "`{}` can't be used as a middle segment of a use path", + name.data(db) + ) + } Self::ExpectedType(_, _, _) => "expected type item here".to_string(), Self::ExpectedTrait(_, _, _) => "expected trait item here".to_string(), Self::ExpectedValue(_, _, _) => "expected value here".to_string(), @@ -202,6 +224,25 @@ impl NameResDiag { diags } + Self::InvalidUsePathSegment(prim_span, name, res_span) => { + let name = name.data(db.as_hir_db()); + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` can't be used as a middle segment of a use path", name,), + prim_span.resolve(db), + )]; + + if let Some(span) = res_span { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("`{}` is defined here", name), + span.resolve(db), + )); + } + + diag + } + Self::ExpectedType(prim_span, name, res) => { let res_kind_name = res.kind_name(); let name = name.data(db.as_hir_db()); diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index cb631d33c0..5d3c5c08c1 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -336,10 +336,6 @@ impl<'db> ImportResolver<'db> { let mut resolver = NameResolver::new_no_cache(self.db, &self.resolved_imports); let mut bucket = resolver.resolve_query(query); - if !i_use.is_base_resolved(self.db) { - bucket.filter_by_domain(NameDomain::Type); - } - // Filter out invisible resolutions. let mut invisible_span = None; bucket.bucket.retain(|_, res| { @@ -360,11 +356,6 @@ impl<'db> ImportResolver<'db> { } }); - // Filter out irrelevant resolutions if the segment is not the last one. - if !i_use.is_base_resolved(self.db) { - bucket.filter_by_domain(NameDomain::Type); - } - for (_, err) in bucket.errors() { if !matches!( err, @@ -401,8 +392,14 @@ impl<'db> ImportResolver<'db> { if i_use.is_base_resolved(self.db) { Some(IUseResolution::Full(bucket)) } else { - let res = bucket.pick(NameDomain::Type).clone().unwrap(); - let next_i_use = i_use.proceed(res); + let next_i_use = match i_use.proceed(self.db, bucket) { + Ok(next_i_use) => next_i_use, + Err(err) => { + self.register_error(i_use, err); + return None; + } + }; + if next_i_use.is_base_resolved(self.db) { Some(IUseResolution::BasePath(next_i_use)) } else { @@ -555,6 +552,16 @@ impl<'db> ImportResolver<'db> { )); } + NameResolutionError::InvalidUsePathSegment(res) => { + self.accumulated_errors + .push(NameResDiag::invalid_use_path_segment( + self.db, + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + res, + )) + } + NameResolutionError::Invisible(invisible_span) => { self.accumulated_errors.push(NameResDiag::invisible( i_use.current_segment_span(), @@ -792,13 +799,31 @@ impl IntermediateUse { } /// Proceed the resolution of the use path to the next segment. - /// The bucket must contain exactly one resolution. - fn proceed(&self, next_res: NameRes) -> Self { - Self { - use_: self.use_, - current_res: next_res.into(), - original_scope: self.original_scope, - unresolved_from: self.unresolved_from + 1, + /// Returns an error if the bucket doesn't contain appropriate resolution + /// for use path segment. # Panics + /// - Panics if the the base path is already resolved. + /// - Panics if the bucket is empty. + fn proceed(&self, db: &dyn HirAnalysisDb, bucket: NameResBucket) -> NameResolutionResult { + debug_assert!(!bucket.is_empty()); + debug_assert!(!self.is_base_resolved(db)); + + let next_res = match bucket.pick(NameDomain::Type) { + Ok(res) => res.clone(), + Err(_) => { + let res = bucket.iter().next().unwrap(); + return Err(NameResolutionError::InvalidUsePathSegment(res.clone())); + } + }; + + if next_res.is_mod() || next_res.is_enum() { + Ok(Self { + use_: self.use_, + current_res: next_res.into(), + original_scope: self.original_scope, + unresolved_from: self.unresolved_from + 1, + }) + } else { + Err(NameResolutionError::InvalidUsePathSegment(next_res)) } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index c7738d1ca2..d240af50d2 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -255,7 +255,7 @@ impl<'db, 'a> EarlyPathVisitor<'db, 'a> { return; } - match path_kind.pick(self.db, bucket) { + match path_kind.pick(bucket) { // The path exists and belongs to the expected kind. Either::Left(res) => { if !res.is_visible(self.db, scope) { @@ -451,6 +451,15 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { NameResolutionError::Ambiguous(cands) => { NameResDiag::ambiguous(self.db, span.into(), *ident.unwrap(), cands) } + + NameResolutionError::InvalidUsePathSegment(res) => { + NameResDiag::invalid_use_path_segment( + self.db, + span.into(), + *ident.unwrap(), + res, + ) + } }; self.diags.push(diag); @@ -489,7 +498,7 @@ impl ExpectedPathKind { } } - fn pick(self, db: &dyn HirAnalysisDb, bucket: NameResBucket) -> Either { + fn pick(self, bucket: NameResBucket) -> Either { debug_assert!(!bucket.is_empty()); let res = match bucket.pick(self.domain()).as_ref().ok() { @@ -500,9 +509,9 @@ impl ExpectedPathKind { }; match self { - Self::Type if !res.is_type(db) => Either::Right(res), - Self::Trait if !res.is_trait(db) => Either::Right(res), - Self::Value if !res.is_value(db) => Either::Right(res), + Self::Type if !res.is_type() => Either::Right(res), + Self::Trait if !res.is_trait() => Either::Right(res), + Self::Value if !res.is_value() => Either::Right(res), _ => Either::Left(res), } } diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index ee98624c06..c1905dd03b 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -288,23 +288,37 @@ impl NameRes { } /// Returns `true` if the resolution is a type. - pub(crate) fn is_type(&self, db: &dyn HirAnalysisDb) -> bool { + pub(crate) fn is_type(&self) -> bool { match self.kind { NameResKind::Prim(_) => true, - NameResKind::Scope(scope) => scope.is_type(db.as_hir_db()), + NameResKind::Scope(scope) => scope.is_type(), } } /// Returns `true` if the resolution is a trait. - pub(crate) fn is_trait(&self, db: &dyn HirAnalysisDb) -> bool { + pub(crate) fn is_trait(&self) -> bool { match self.kind { NameResKind::Prim(_) => false, - NameResKind::Scope(scope) => scope.is_trait(db.as_hir_db()), + NameResKind::Scope(scope) => scope.is_trait(), } } - pub(crate) fn is_value(&self, db: &dyn HirAnalysisDb) -> bool { - !self.is_type(db) && !self.is_trait(db) + pub(crate) fn is_enum(&self) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => scope.is_enum(), + } + } + + pub(crate) fn is_mod(&self) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => scope.is_mod(), + } + } + + pub(crate) fn is_value(&self) -> bool { + !self.is_type() && !self.is_trait() } /// Returns the scope of the name resolution if the name is not a builtin @@ -744,6 +758,9 @@ pub enum NameResolutionError { /// The name is found, but it's ambiguous. Ambiguous(Vec), + + /// The name is found ,but it can't be used in the middle of a use path. + InvalidUsePathSegment(NameRes), } pub type NameResolutionResult = Result; @@ -755,6 +772,10 @@ impl fmt::Display for NameResolutionError { NameResolutionError::Invalid => write!(f, "invalid name"), NameResolutionError::Invisible(_) => write!(f, "name is not visible"), NameResolutionError::Ambiguous(_) => write!(f, "name is ambiguous"), + NameResolutionError::InvalidUsePathSegment(_) => write!( + f, + "the found resolution can't be used in the middle of a use path" + ), } } } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index fe3420a8d2..c935f4cf8d 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -246,7 +246,7 @@ impl<'a> IntermediatePath<'a> { debug_assert!(self.idx < self.path.len()); let is_type_dependent = - (self.current_res.is_type(db) || self.current_res.is_trait(db)) && self.idx != 0; + (self.current_res.is_type() || self.current_res.is_trait()) && self.idx != 0; if (self.idx == self.path.len() - 1) && !is_type_dependent { IntermediatePathState::ReadyToFinalize diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index c8cd27f178..ac5c44d0e8 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -218,14 +218,27 @@ impl ScopeId { } /// Returns `true` if the scope is a type. - pub fn is_type(self, db: &dyn HirDb) -> bool { - match self.data(db).id { + pub fn is_type(self) -> bool { + match self { ScopeId::Item(item) => item.is_type(), ScopeId::GenericParam(..) => true, _ => false, } } + pub fn is_enum(self) -> bool { + matches!(self, ScopeId::Item(ItemKind::Enum(_))) + } + + pub fn is_mod(self) -> bool { + matches!(self, ScopeId::Item(ItemKind::Mod(_) | ItemKind::TopMod(_))) + } + + /// Returns `true` if the scope is a trait definition. + pub fn is_trait(self) -> bool { + matches!(self, ScopeId::Item(ItemKind::Trait(_))) + } + /// Returns the item that contains this scope. pub fn parent_item(self, db: &dyn HirDb) -> Option { let mut parent = self.parent(db)?; @@ -239,14 +252,6 @@ impl ScopeId { } } - /// Returns `true` if the scope is a trait definition. - pub fn is_trait(self, db: &dyn HirDb) -> bool { - match self.data(db).id { - ScopeId::Item(item) => item.is_trait(), - _ => false, - } - } - pub fn name(self, db: &dyn HirDb) -> Option { match self.data(db).id { ScopeId::Item(item) => item.name(db), diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap index bf0f97e2ee..8452583d7f 100644 --- a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap @@ -3,22 +3,28 @@ source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_unimpotable.fe --- -error[2-0002]: `t` is not found - ┌─ import_unimpotable.fe:1:9 +error[2-0002]: `T` is not found + ┌─ import_unimpotable.fe:2:8 │ -1 │ use S::{t, T} - │ ^ `t` is not found +2 │ use E::T + │ ^ `T` is not found -error[2-0002]: `T` is not found - ┌─ import_unimpotable.fe:1:12 +error[2-0005]: `S` can't be used as a middle segment of a use path + ┌─ import_unimpotable.fe:1:5 │ 1 │ use S::{t, T} - │ ^ `T` is not found + │ ^ `S` can't be used as a middle segment of a use path + · +5 │ struct S { + │ - `S` is defined here -error[2-0002]: `T` is not found - ┌─ import_unimpotable.fe:2:8 +error[2-0005]: `S` can't be used as a middle segment of a use path + ┌─ import_unimpotable.fe:1:5 │ -2 │ use E::T - │ ^ `T` is not found +1 │ use S::{t, T} + │ ^ `S` can't be used as a middle segment of a use path + · +5 │ struct S { + │ - `S` is defined here diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap index c0bc639bc6..a0b03eda8c 100644 --- a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap @@ -3,25 +3,25 @@ source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/path_invalid_domain.fe --- -error[2-0005]: expected type item here +error[2-0006]: expected type item here ┌─ path_invalid_domain.fe:14:26 │ 14 │ U: MyTWithGenerics │ ^^^ expected type here, but found trait `MyT` -error[2-0005]: expected type item here +error[2-0006]: expected type item here ┌─ path_invalid_domain.fe:16:13 │ 16 │ Variant(MyC) │ ^^^ expected type here, but found const `MyC` -error[2-0005]: expected type item here +error[2-0006]: expected type item here ┌─ path_invalid_domain.fe:17:14 │ 17 │ Variant2(Var) │ ^^^ expected type here, but found value `Var` -error[2-0006]: expected trait item here +error[2-0007]: expected trait item here ┌─ path_invalid_domain.fe:13:10 │ 13 │ where T: MyE diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.snap b/crates/uitest/fixtures/name_resolution/path_shadow.snap index 8c664920ff..035bba2db1 100644 --- a/crates/uitest/fixtures/name_resolution/path_shadow.snap +++ b/crates/uitest/fixtures/name_resolution/path_shadow.snap @@ -3,7 +3,7 @@ source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/path_shadow.fe --- -error[2-0006]: expected trait item here +error[2-0007]: expected trait item here ┌─ path_shadow.fe:3:14 │ 3 │ where U: T From 1171489ce388ddacfe84d4d996443dcfcdb82b4d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 18 Jul 2023 17:56:56 +0200 Subject: [PATCH 228/678] Improve error message when a middle path egment is resolved to inappropriate scope --- .../src/name_resolution/diagnostics.rs | 19 ++++++++--------- .../src/name_resolution/import_resolver.rs | 6 +++--- .../hir-analysis/src/name_resolution/mod.rs | 2 +- .../src/name_resolution/name_resolver.rs | 11 ++++------ .../src/name_resolution/path_resolver.rs | 21 +++++++++++++++---- .../name_resolution/import_unimpotable.snap | 15 +++++++------ .../name_resolution/path_invalid_domain.fe | 6 ++++++ .../name_resolution/path_invalid_domain.snap | 9 ++++++++ 8 files changed, 58 insertions(+), 31 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index beeb21270e..5ca03b10d7 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -32,9 +32,8 @@ pub enum NameResDiag { /// The resolved name is ambiguous. Ambiguous(DynLazySpan, IdentId, Vec), - /// The name is found, but it can't be used as a middle segment of a use - /// path. - InvalidUsePathSegment(DynLazySpan, IdentId, Option), + /// The name is found, but it can't be used as a middle segment of a path. + InvalidPathSegment(DynLazySpan, IdentId, Option), /// The name is found but belongs to a different name domain other than the /// Type. @@ -61,7 +60,7 @@ impl NameResDiag { Self::NotFound(span, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Invisible(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::Ambiguous(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), - Self::InvalidUsePathSegment(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::InvalidPathSegment(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedType(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedTrait(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), Self::ExpectedValue(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), @@ -104,7 +103,7 @@ impl NameResDiag { found: NameRes, ) -> Self { let found = found.kind.name_span(db); - Self::InvalidUsePathSegment(span, ident, found) + Self::InvalidPathSegment(span, ident, found) } fn local_code(&self) -> u16 { @@ -113,7 +112,7 @@ impl NameResDiag { Self::NotFound(..) => 2, Self::Invisible(..) => 3, Self::Ambiguous(..) => 4, - Self::InvalidUsePathSegment(..) => 5, + Self::InvalidPathSegment(..) => 5, Self::ExpectedType(..) => 6, Self::ExpectedTrait(..) => 7, Self::ExpectedValue(..) => 8, @@ -134,9 +133,9 @@ impl NameResDiag { format!("`{}` is not visible", name.data(db),) } Self::Ambiguous(_, name, _) => format!("`{}` is ambiguous", name.data(db)), - Self::InvalidUsePathSegment(_, name, _) => { + Self::InvalidPathSegment(_, name, _) => { format!( - "`{}` can't be used as a middle segment of a use path", + "`{}` can't be used as a middle segment of a path", name.data(db) ) } @@ -224,11 +223,11 @@ impl NameResDiag { diags } - Self::InvalidUsePathSegment(prim_span, name, res_span) => { + Self::InvalidPathSegment(prim_span, name, res_span) => { let name = name.data(db.as_hir_db()); let mut diag = vec![SubDiagnostic::new( LabelStyle::Primary, - format!("`{}` can't be used as a middle segment of a use path", name,), + format!("`{}` can't be used as a middle segment of a path", name,), prim_span.resolve(db), )]; diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 5d3c5c08c1..417183a6c4 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -552,7 +552,7 @@ impl<'db> ImportResolver<'db> { )); } - NameResolutionError::InvalidUsePathSegment(res) => { + NameResolutionError::InvalidPathSegment(res) => { self.accumulated_errors .push(NameResDiag::invalid_use_path_segment( self.db, @@ -811,7 +811,7 @@ impl IntermediateUse { Ok(res) => res.clone(), Err(_) => { let res = bucket.iter().next().unwrap(); - return Err(NameResolutionError::InvalidUsePathSegment(res.clone())); + return Err(NameResolutionError::InvalidPathSegment(res.clone())); } }; @@ -823,7 +823,7 @@ impl IntermediateUse { unresolved_from: self.unresolved_from + 1, }) } else { - Err(NameResolutionError::InvalidUsePathSegment(next_res)) + Err(NameResolutionError::InvalidPathSegment(next_res)) } } diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index d240af50d2..da2587b4a2 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -452,7 +452,7 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { NameResDiag::ambiguous(self.db, span.into(), *ident.unwrap(), cands) } - NameResolutionError::InvalidUsePathSegment(res) => { + NameResolutionError::InvalidPathSegment(res) => { NameResDiag::invalid_use_path_segment( self.db, span.into(), diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index c1905dd03b..9164f80fb7 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -378,10 +378,7 @@ impl NameRes { } pub(super) fn is_importable(&self) -> bool { - match self.kind { - NameResKind::Scope(scope) => matches!(scope, ScopeId::Item(_) | ScopeId::Variant(..)), - NameResKind::Prim(_) => true, - } + matches!(self.domain, NameDomain::Type | NameDomain::Value) } fn new_prim(prim: PrimTy) -> Self { @@ -760,7 +757,7 @@ pub enum NameResolutionError { Ambiguous(Vec), /// The name is found ,but it can't be used in the middle of a use path. - InvalidUsePathSegment(NameRes), + InvalidPathSegment(NameRes), } pub type NameResolutionResult = Result; @@ -772,9 +769,9 @@ impl fmt::Display for NameResolutionError { NameResolutionError::Invalid => write!(f, "invalid name"), NameResolutionError::Invisible(_) => write!(f, "name is not visible"), NameResolutionError::Ambiguous(_) => write!(f, "name is ambiguous"), - NameResolutionError::InvalidUsePathSegment(_) => write!( + NameResolutionError::InvalidPathSegment(_) => write!( f, - "the found resolution can't be used in the middle of a use path" + "the found resolution can't be used in the middle of a path" ), } } diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs index c935f4cf8d..e6f49686ee 100644 --- a/crates/hir-analysis/src/name_resolution/path_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -231,10 +231,23 @@ impl<'a> IntermediatePath<'a> { /// If the `bucket` doesn't contain proper resolution, then an error is /// returned. fn proceed(&mut self, bucket: NameResBucket) -> PathResolutionResult<()> { - let next_res = bucket - .pick(NameDomain::Type) - .clone() - .map_err(|err| PathResolutionError::new(err, self.idx))?; + let next_res = match bucket.pick(NameDomain::Type) { + Ok(res) => Ok(res.clone()), + Err(NameResolutionError::NotFound) => { + if let Some(res) = bucket.iter().next() { + Err(PathResolutionError::new( + NameResolutionError::InvalidPathSegment(res.clone()), + self.idx, + )) + } else { + Err(PathResolutionError::new( + NameResolutionError::NotFound, + self.idx, + )) + } + } + Err(err) => Err(PathResolutionError::new(err.clone(), self.idx)), + }?; let old_res = std::mem::replace(&mut self.current_res, next_res); self.idx += 1; diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap index 8452583d7f..e3c32bfdce 100644 --- a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap @@ -3,26 +3,29 @@ source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_unimpotable.fe --- -error[2-0002]: `T` is not found +error[2-0003]: `T` is not visible ┌─ import_unimpotable.fe:2:8 │ 2 │ use E::T - │ ^ `T` is not found + │ ^ `T` is not visible + · +9 │ enum E { + │ - `T is defined here -error[2-0005]: `S` can't be used as a middle segment of a use path +error[2-0005]: `S` can't be used as a middle segment of a path ┌─ import_unimpotable.fe:1:5 │ 1 │ use S::{t, T} - │ ^ `S` can't be used as a middle segment of a use path + │ ^ `S` can't be used as a middle segment of a path · 5 │ struct S { │ - `S` is defined here -error[2-0005]: `S` can't be used as a middle segment of a use path +error[2-0005]: `S` can't be used as a middle segment of a path ┌─ import_unimpotable.fe:1:5 │ 1 │ use S::{t, T} - │ ^ `S` can't be used as a middle segment of a use path + │ ^ `S` can't be used as a middle segment of a path · 5 │ struct S { │ - `S` is defined here diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe index d4af57fc20..007d7a11d3 100644 --- a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe @@ -16,3 +16,9 @@ where T: MyE Variant(MyC) Variant2(Var) } + +fn foo(t: T) {} + +pub struct S { + s: foo::T +} diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap index a0b03eda8c..0638523a15 100644 --- a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap @@ -3,6 +3,15 @@ source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/path_invalid_domain.fe --- +error[2-0005]: `foo` can't be used as a middle segment of a path + ┌─ path_invalid_domain.fe:23:8 + │ +20 │ fn foo(t: T) {} + │ --- `foo` is defined here + · +23 │ s: foo::T + │ ^^^ `foo` can't be used as a middle segment of a path + error[2-0006]: expected type item here ┌─ path_invalid_domain.fe:14:26 │ From 973665d675980a684411a0d855888a0300ca14b3 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 21 Jul 2023 18:42:22 -0500 Subject: [PATCH 229/678] copy driver2 salsa db to language-server --- crates/language-server/Cargo.toml | 2 +- crates/language-server/src/db.rs | 141 +++++++++++++++++++++++++++++- 2 files changed, 141 insertions(+), 2 deletions(-) diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 367750f31a..e9d35fc524 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -11,7 +11,7 @@ description = "An LSP language server for Fe lang" [dependencies] fe-analyzer = {path = "../analyzer", version = "^0.22.0"} -fe-common = {path = "../common", version = "^0.22.0"} +common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" clap = "4.2.7" crossbeam-channel = "0.5.8" diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 934a11a9cf..46d3166ce0 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -1 +1,140 @@ -// to-do: implement a salsa database for the language server \ No newline at end of file +use std::{collections::BTreeSet, path}; + +use codespan_reporting::term::{ + self, + termcolor::{BufferWriter, ColorChoice}, +}; +use common::{ + diagnostics::CompleteDiagnostic, + input::{IngotKind, Version}, + InputDb, InputFile, InputIngot, +}; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, + lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, +}; +use hir_analysis::{ + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, + HirAnalysisDb, +}; + +#[salsa::jar(db = LanguageServerDb)] +pub struct Jar(diagnostics::file_line_starts); + +pub trait LanguageServerDb: + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +impl LanguageServerDb for DB where + DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +#[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] +pub struct LanguageServerDataBase { + storage: salsa::Storage, + diags: Vec>, +} + +impl LanguageServerDataBase { + pub fn run_on_top_mod(&mut self, top_mod: TopLevelMod) { + self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass); + } + + pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) + where + F: FnOnce(&LanguageServerDataBase) -> AnalysisPassManager<'_>, + { + self.diags.clear(); + self.diags = { + let mut pass_manager = pm_builder(self); + pass_manager.run_on_module(top_mod) + }; + } + + pub fn top_mod_from_file(&mut self, file_path: &path::Path, source: &str) -> TopLevelMod { + let kind = IngotKind::StandAlone; + + // We set the ingot version to 0.0.0 for stand-alone file. + let version = Version::new(0, 0, 0); + let root_file = file_path; + let ingot = InputIngot::new( + self, + file_path.parent().unwrap().as_os_str().to_str().unwrap(), + kind, + version, + BTreeSet::new(), + ); + + let file_name = root_file.file_name().unwrap().to_str().unwrap(); + let file = InputFile::new(self, ingot, file_name.into(), source.to_string()); + ingot.set_root_file(self, file); + ingot.set_files(self, [file].into()); + + map_file_to_mod(self, file) + } + + + /// Prints accumulated diagnostics to stderr. + pub fn emit_diags(&self) { + let writer = BufferWriter::stderr(ColorChoice::Auto); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in self.finalize_diags() { + term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); + } + + eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); + } + + /// Format the accumulated diagnostics to a string. + pub fn format_diags(&self) -> String { + let writer = BufferWriter::stderr(ColorChoice::Never); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in self.finalize_diags() { + term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); + } + + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() + } + + fn finalize_diags(&self) -> Vec { + let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + diags + } +} + +impl HirDb for LanguageServerDataBase {} +impl SpannedHirDb for LanguageServerDataBase {} +impl LowerHirDb for LanguageServerDataBase {} +impl salsa::Database for LanguageServerDataBase { + fn salsa_event(&self, _: salsa::Event) {} +} + +impl Default for LanguageServerDataBase { + fn default() -> Self { + let db = Self { + storage: Default::default(), + diags: Vec::new(), + }; + db.prefill(); + db + } +} + +fn initialize_analysis_pass(db: &LanguageServerDataBase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager +} From b24bd32c261fd93ca0e0211ef21516aa8c699dd5 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 24 Jul 2023 13:10:45 -0500 Subject: [PATCH 230/678] switching to new diagnostics types --- Cargo.lock | 161 ++++++++++++++++-- crates/language-server/Cargo.toml | 14 +- .../src/handlers/notifications.rs | 77 ++++----- crates/language-server/src/util.rs | 26 +-- 4 files changed, 208 insertions(+), 70 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fdfe363ab3..b68ccb46d2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -82,6 +82,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "anyhow" +version = "1.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" + [[package]] name = "arc-swap" version = "1.6.0" @@ -298,7 +304,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.27", ] [[package]] @@ -718,7 +724,7 @@ checksum = "48016319042fb7c87b78d2993084a831793a897a5cd1a2a67cab9d1eeb4b7d76" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.27", ] [[package]] @@ -1096,6 +1102,28 @@ dependencies = [ "smallvec", ] +[[package]] +name = "fe-language-server" +version = "0.23.0" +dependencies = [ + "anyhow", + "camino", + "clap 4.3.12", + "codespan-reporting", + "crossbeam-channel", + "fe-analyzer", + "fe-common2", + "fe-hir", + "fe-hir-analysis", + "fe-macros", + "indexmap", + "lsp-server", + "lsp-types", + "salsa-2022", + "serde", + "serde_json", +] + [[package]] name = "fe-library" version = "0.23.0" @@ -1110,7 +1138,7 @@ dependencies = [ "glob", "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.27", ] [[package]] @@ -1231,6 +1259,15 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -1407,6 +1444,16 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "if_chain" version = "1.0.2" @@ -1650,6 +1697,31 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "lsp-server" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ea9ae5a5082ca3b6ae824fc7666cd206b99168a4d4c769ad8fe9cc740df6a6" +dependencies = [ + "crossbeam-channel", + "log", + "serde", + "serde_json", +] + +[[package]] +name = "lsp-types" +version = "0.94.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" +dependencies = [ + "bitflags", + "serde", + "serde_json", + "serde_repr", + "url", +] + [[package]] name = "memchr" version = "2.6.4" @@ -1860,6 +1932,12 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + [[package]] name = "petgraph" version = "0.6.3" @@ -1966,9 +2044,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.56" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -1999,9 +2077,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quote" -version = "1.0.26" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ "proc-macro2", ] @@ -2496,9 +2574,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.160" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" dependencies = [ "serde_derive", ] @@ -2515,13 +2593,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.160" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.27", ] [[package]] @@ -2535,6 +2613,17 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_repr" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e168eaaf71e8f9bd6037feb05190485708e019f4fd87d161b3c0a0d37daf85e5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + [[package]] name = "serde_test" version = "1.0.160" @@ -2677,9 +2766,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.15" +version = "2.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" dependencies = [ "proc-macro2", "quote", @@ -2739,7 +2828,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.27", ] [[package]] @@ -2761,6 +2850,21 @@ dependencies = [ "serde_json", ] +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "toml" version = "0.5.11" @@ -2827,12 +2931,27 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccb97dac3243214f8d8507998906ca3e2e0b900bf9bf4870477f125b82e68f6e" +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + [[package]] name = "unicode-ident" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + [[package]] name = "unicode-segmentation" version = "1.10.1" @@ -2845,6 +2964,18 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + [[package]] name = "utf8parse" version = "0.2.1" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index e9d35fc524..1f6a6693fd 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "fe-language-server" -version = "0.22.0" +version = "0.23.0" edition = "2021" authors = ["The Fe Developers "] license = "Apache-2.0" @@ -10,16 +10,20 @@ description = "An LSP language server for Fe lang" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -fe-analyzer = {path = "../analyzer", version = "^0.22.0"} +salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } +codespan-reporting = "0.11" +hir = { path = "../hir", package = "fe-hir" } +macros = { path = "../macros", package = "fe-macros" } +hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } +camino = "1.1.4" +clap = { version = "4.3", features = ["derive"] } +fe-analyzer = {path = "../analyzer", version = "^0.23.0"} common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" -clap = "4.2.7" crossbeam-channel = "0.5.8" lsp-server = "0.7.0" lsp-types = "0.94.0" serde = "1.0.162" serde_json = "1.0.96" -# salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } -salsa = "0.16.1" indexmap = "1.6.2" diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index d7c83e8ddb..6ffe28384c 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,52 +1,53 @@ -use anyhow::Result; -use fe_analyzer::{namespace::items::ModuleId, TestDb}; -use serde::Deserialize; +// use anyhow::Result; +// use fe_analyzer::{namespace::items::ModuleId, TestDb}; +// use serde::Deserialize; -use crate::{state::ServerState, util::diag_to_lsp}; +// use crate::{state::ServerState, util::diag_to_lsp}; -fn string_diagnostics(path: &str, src: &str) -> Vec { - let mut db = TestDb::default(); - let module = ModuleId::new_standalone(&mut db, path, src); +// fn string_diagnostics(path: &str, src: &str) -> Vec { +// let mut db = TestDb::default(); +// let module = ModuleId::new_standalone(&mut db, path, src); - module.diagnostics(&db) -} +// module.diagnostics(&db) +// } // pub(crate) fn handle_document_did_change(state: &mut ServerState, req: lsp_server::Request) -> Result<(), Error> { // let params = lsp_types::DidChangeTextDocumentParams::deserialize(req.params)?; // let text = params.text_document.text; // } -pub(crate) fn handle_document_did_open( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), anyhow::Error> { - let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; - let text = params.text_document.text; - - let diags = string_diagnostics( - params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), - text.as_str(), - ); + +// pub(crate) fn handle_document_did_open( +// state: &mut ServerState, +// note: lsp_server::Notification, +// ) -> Result<(), anyhow::Error> { +// let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; +// let text = params.text_document.text; + +// let diags = string_diagnostics( +// params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), +// text.as_str(), +// ); - state.log_info(format!("diagnostics: {:?}", diags))?; +// state.log_info(format!("diagnostics: {:?}", diags))?; - // send diagnostics using `state.send_response` for each diagnostic +// // send diagnostics using `state.send_response` for each diagnostic - let diagnostics = diags.into_iter().flat_map(|diag| { - diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() - }); +// let diagnostics = diags.into_iter().flat_map(|diag| { +// diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() +// }); - let result = lsp_types::PublishDiagnosticsParams { - uri: params.text_document.uri.clone(), - diagnostics: diagnostics.collect(), - version: None, - }; - let response = lsp_server::Message::Notification(lsp_server::Notification { - method: String::from("textDocument/publishDiagnostics"), - params: serde_json::to_value(result).unwrap(), - }); - - state.sender.send(response)?; +// let result = lsp_types::PublishDiagnosticsParams { +// uri: params.text_document.uri.clone(), +// diagnostics: diagnostics.collect(), +// version: None, +// }; +// let response = lsp_server::Message::Notification(lsp_server::Notification { +// method: String::from("textDocument/publishDiagnostics"), +// params: serde_json::to_value(result).unwrap(), +// }); + +// state.sender.send(response)?; - Ok(()) -} +// Ok(()) +// } diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index 4e6cc6903c..0c331ff674 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,5 +1,4 @@ -use fe_common::diagnostics::{Severity, Diagnostic}; -use fe_common::Span; +use common::diagnostics::{Severity, CompleteDiagnostic, Span}; use lsp_types::Position; // TODO: these could potentially be moved into the common crate @@ -19,15 +18,18 @@ pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { // now we get the line and character offsets let start_line = line_offsets - .binary_search(&span.start) + .binary_search(&span.range.start().into()) .unwrap_or_else(|x| x - 1); let end_line = line_offsets - .binary_search(&span.end) + .binary_search(&span.range.end().into()) .unwrap_or_else(|x| x - 1); - let start_character = span.start - line_offsets[start_line]; - let end_character = span.end - line_offsets[end_line]; + + + // except that we need a fully qualified path to use `into`... + let start_character: usize = span.range.start().into(); + let end_character: usize = span.range.end().into(); lsp_types::Range { start: Position::new(start_line as u32, start_character as u32), @@ -36,19 +38,19 @@ pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { } pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { - Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, + // Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, Severity::Error => lsp_types::DiagnosticSeverity::ERROR, Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, Severity::Note => lsp_types::DiagnosticSeverity::HINT, - Severity::Help => lsp_types::DiagnosticSeverity::INFORMATION, + // Severity::Help => lsp_types::DiagnosticSeverity::INFORMATION, } } -pub(crate) fn diag_to_lsp(diag: Diagnostic, text: &str) -> Vec { - diag.labels +pub(crate) fn diag_to_lsp(diag: CompleteDiagnostic, text: &str) -> Vec { + diag.sub_diagnostics .into_iter() - .map(|label| { - let range = span_to_range(label.span, text); + .map(|sub| { + let range = span_to_range(sub.span.unwrap(), text); lsp_types::Diagnostic { range, severity: Some(severity_to_lsp(diag.severity)), From 347e0084df6168f9ba57997f7995fa216f82b421 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 28 Jul 2023 15:02:28 -0500 Subject: [PATCH 231/678] sketch diagnostics using name-resolution overhaul --- crates/language-server/Cargo.toml | 1 + crates/language-server/src/db.rs | 33 +---- crates/language-server/src/diagnostics.rs | 123 ++++++++++++++++++ .../src/handlers/notifications.rs | 82 ++++++------ crates/language-server/src/main.rs | 2 + crates/language-server/src/state.rs | 19 +-- 6 files changed, 181 insertions(+), 79 deletions(-) create mode 100644 crates/language-server/src/diagnostics.rs diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 1f6a6693fd..915b2310a4 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -18,6 +18,7 @@ hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" clap = { version = "4.3", features = ["derive"] } fe-analyzer = {path = "../analyzer", version = "^0.23.0"} +driver = { path = "../driver2", package = "fe-driver2" } common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" crossbeam-channel = "0.5.8" diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 46d3166ce0..30ff70369b 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -18,8 +18,10 @@ use hir_analysis::{ HirAnalysisDb, }; +use crate::diagnostics::ToCsDiag; + #[salsa::jar(db = LanguageServerDb)] -pub struct Jar(diagnostics::file_line_starts); +pub struct Jar(crate::diagnostics::file_line_starts); pub trait LanguageServerDb: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb @@ -74,35 +76,8 @@ impl LanguageServerDataBase { map_file_to_mod(self, file) } - - - /// Prints accumulated diagnostics to stderr. - pub fn emit_diags(&self) { - let writer = BufferWriter::stderr(ColorChoice::Auto); - let mut buffer = writer.buffer(); - let config = term::Config::default(); - - for diag in self.finalize_diags() { - term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); - } - - eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); - } - - /// Format the accumulated diagnostics to a string. - pub fn format_diags(&self) -> String { - let writer = BufferWriter::stderr(ColorChoice::Never); - let mut buffer = writer.buffer(); - let config = term::Config::default(); - - for diag in self.finalize_diags() { - term::emit(&mut buffer, &config, self, &diag.to_cs(self)).unwrap(); - } - - std::str::from_utf8(buffer.as_slice()).unwrap().to_string() - } - fn finalize_diags(&self) -> Vec { + pub fn finalize_diags(&self) -> Vec { let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs new file mode 100644 index 0000000000..3dcfbbf6c8 --- /dev/null +++ b/crates/language-server/src/diagnostics.rs @@ -0,0 +1,123 @@ +use std::ops::Range; + +use camino::Utf8Path; +use codespan_reporting as cs; +use cs::{diagnostic as cs_diag, files as cs_files}; + +use common::{ + diagnostics::{LabelStyle, Severity}, + InputFile, +}; +use hir::diagnostics::DiagnosticVoucher; + +use crate::db::{LanguageServerDataBase, LanguageServerDb}; + + +pub trait ToCsDiag { + fn to_cs(&self, db: &LanguageServerDataBase) -> cs_diag::Diagnostic; +} + +impl ToCsDiag for T +where + T: DiagnosticVoucher, +{ + fn to_cs(&self, db: &LanguageServerDataBase) -> cs_diag::Diagnostic { + let complete = self.to_complete(db); + + let severity = convert_severity(complete.severity); + let code = Some(complete.error_code.to_string()); + let message = complete.message; + + let labels = complete + .sub_diagnostics + .into_iter() + .filter_map(|sub_diag| { + let span = sub_diag.span?; + match sub_diag.style { + LabelStyle::Primary => { + cs_diag::Label::new(cs_diag::LabelStyle::Primary, span.file, span.range) + } + LabelStyle::Secondary => { + cs_diag::Label::new(cs_diag::LabelStyle::Secondary, span.file, span.range) + } + } + .with_message(sub_diag.message) + .into() + }) + .collect(); + + cs_diag::Diagnostic { + severity, + code, + message, + labels, + notes: vec![], + } + } +} + +fn convert_severity(severity: Severity) -> cs_diag::Severity { + match severity { + Severity::Error => cs_diag::Severity::Error, + Severity::Warning => cs_diag::Severity::Warning, + Severity::Note => cs_diag::Severity::Note, + } +} + +#[salsa::tracked(return_ref)] +pub fn file_line_starts(db: &dyn LanguageServerDb, file: InputFile) -> Vec { + cs::files::line_starts(file.text(db.as_input_db())).collect() +} + +impl<'a> cs_files::Files<'a> for LanguageServerDataBase { + type FileId = InputFile; + type Name = &'a Utf8Path; + type Source = &'a str; + + fn name(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.path(self).as_path()) + } + + fn source(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.text(self)) + } + + fn line_index( + &'a self, + file_id: Self::FileId, + byte_index: usize, + ) -> Result { + let starts = file_line_starts(self, file_id); + Ok(starts + .binary_search(&byte_index) + .unwrap_or_else(|next_line| next_line - 1)) + } + + fn line_range( + &'a self, + file_id: Self::FileId, + line_index: usize, + ) -> Result, cs_files::Error> { + let line_starts = file_line_starts(self, file_id); + + let start = *line_starts + .get(line_index) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })?; + + let end = if line_index == line_starts.len() - 1 { + file_id.text(self).len() + } else { + *line_starts + .get(line_index + 1) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })? + }; + + Ok(Range { start, end }) + } +} diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 6ffe28384c..43afceec72 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,53 +1,51 @@ -// use anyhow::Result; -// use fe_analyzer::{namespace::items::ModuleId, TestDb}; -// use serde::Deserialize; +use anyhow::{Result, Error}; +use serde::Deserialize; -// use crate::{state::ServerState, util::diag_to_lsp}; +use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDataBase}; -// fn string_diagnostics(path: &str, src: &str) -> Vec { -// let mut db = TestDb::default(); -// let module = ModuleId::new_standalone(&mut db, path, src); - -// module.diagnostics(&db) -// } +fn string_diagnostics(mut db: &mut LanguageServerDataBase, path: &str, src: &str) -> Vec { + let file_path = std::path::Path::new(path); + let top_mod = db.top_mod_from_file(file_path, src); + db.run_on_top_mod(top_mod); + db.finalize_diags() +} // pub(crate) fn handle_document_did_change(state: &mut ServerState, req: lsp_server::Request) -> Result<(), Error> { -// let params = lsp_types::DidChangeTextDocumentParams::deserialize(req.params)?; -// let text = params.text_document.text; - +// todo: incremental parsing and diagnostics // } -// pub(crate) fn handle_document_did_open( -// state: &mut ServerState, -// note: lsp_server::Notification, -// ) -> Result<(), anyhow::Error> { -// let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; -// let text = params.text_document.text; - -// let diags = string_diagnostics( -// params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), -// text.as_str(), -// ); +pub(crate) fn handle_document_did_open( + state: &mut ServerState, + note: lsp_server::Notification, +) -> Result<(), Error> { + let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; + let text = params.text_document.text; + + let diags = string_diagnostics( + &mut state.db, + params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), + text.as_str(), + ); -// state.log_info(format!("diagnostics: {:?}", diags))?; + state.log_info(format!("diagnostics: {:?}", diags))?; -// // send diagnostics using `state.send_response` for each diagnostic + // send diagnostics using `state.send_response` for each diagnostic -// let diagnostics = diags.into_iter().flat_map(|diag| { -// diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() -// }); + let diagnostics = diags.into_iter().flat_map(|diag| { + diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() + }); -// let result = lsp_types::PublishDiagnosticsParams { -// uri: params.text_document.uri.clone(), -// diagnostics: diagnostics.collect(), -// version: None, -// }; -// let response = lsp_server::Message::Notification(lsp_server::Notification { -// method: String::from("textDocument/publishDiagnostics"), -// params: serde_json::to_value(result).unwrap(), -// }); - -// state.sender.send(response)?; + let result = lsp_types::PublishDiagnosticsParams { + uri: params.text_document.uri.clone(), + diagnostics: diagnostics.collect(), + version: None, + }; + let response = lsp_server::Message::Notification(lsp_server::Notification { + method: String::from("textDocument/publishDiagnostics"), + params: serde_json::to_value(result).unwrap(), + }); + + state.sender.send(response)?; -// Ok(()) -// } + Ok(()) +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index af37ada9c6..83df9914c5 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -2,7 +2,9 @@ mod server; mod state; mod db; mod util; +mod diagnostics; +use db::Jar; mod handlers { pub(crate) mod notifications; pub(crate) mod request; diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 64ae0d177f..08a5c3a872 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,20 +1,23 @@ -use fe_analyzer::db::TestDb; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; -use lsp_server::{Message}; -use lsp_types::{notification::Notification, request::Request}; -// use super::db::LanguageServerDb; +use lsp_server::Message; +use lsp_types::notification::Notification; +use lsp_types::request::Request; +use crate::db::LanguageServerDataBase; -use crate::handlers::{request::handle_hover, notifications::handle_document_did_open}; +use crate::handlers::{ + request::handle_hover, + notifications::handle_document_did_open +}; pub struct ServerState { pub sender: Sender, - pub analyzer_db: TestDb, + pub db: LanguageServerDataBase, } impl ServerState { pub fn new(sender: Sender) -> Self { - ServerState { sender, analyzer_db: TestDb::default() } + ServerState { sender, db: LanguageServerDataBase::default() } } pub fn run(&mut self, receiver: Receiver) -> Result<()> { @@ -42,7 +45,7 @@ impl ServerState { match req.method.as_str() { // TODO: implement actually useful hover handler - // lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, + lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, _ => {} } From 5d30d3c6fbcdaa48a883d386a3e1674e67c23ab7 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 28 Jul 2023 23:14:08 +0200 Subject: [PATCH 232/678] Fix a bug that import resolver falls into infinite loop --- .../src/name_resolution/import_resolver.rs | 14 +++++------ .../test_files/imports/cycle_glob.fe | 17 +++++++++++++ .../test_files/imports/cycle_glob.snap | 24 +++++++++++++++++++ .../name_resolution/import_alias_cycle.fe | 7 ++++++ .../name_resolution/import_alias_cycle.snap | 18 ++++++++++++++ .../fixtures/name_resolution/import_cycle.fe | 7 ++++++ .../name_resolution/import_cycle.snap | 14 ++++++++++- 7 files changed, 93 insertions(+), 8 deletions(-) create mode 100644 crates/hir-analysis/test_files/imports/cycle_glob.fe create mode 100644 crates/hir-analysis/test_files/imports/cycle_glob.snap create mode 100644 crates/uitest/fixtures/name_resolution/import_alias_cycle.fe create mode 100644 crates/uitest/fixtures/name_resolution/import_alias_cycle.snap diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 417183a6c4..28ec5108d1 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -88,16 +88,16 @@ impl<'db> ImportResolver<'db> { .unwrap(); match self.resolve_i_use(i_use) { - (Some(updated_i_use), resolved) => { - changed |= resolved; + (Some(updated_i_use), i_use_changed) => { + changed |= i_use_changed; self.intermediate_uses .get_mut(&scope) .unwrap() .push_back(updated_i_use); } - (None, resolved) => { - changed |= resolved; + (None, i_use_changed) => { + changed |= i_use_changed; } } } @@ -166,11 +166,11 @@ impl<'db> ImportResolver<'db> { IUseResolution::Full(_) => unreachable!(), IUseResolution::BasePath(base_path_resolved) => { - if self.try_finalize_named_use(base_path_resolved) { + if self.try_finalize_named_use(base_path_resolved.clone()) { (None, true) } else { let changed = !i_use.is_base_resolved(self.db); - (Some(i_use), changed) + (Some(base_path_resolved), changed) } } @@ -280,7 +280,7 @@ impl<'db> ImportResolver<'db> { /// - `None` if the error happens during the resolution, the error is /// accumulated in the function. fn resolve_base_path(&mut self, mut i_use: IntermediateUse) -> Option { - let mut changed = true; + let mut changed = false; if i_use.is_base_resolved(self.db) { return Some(IUseResolution::BasePath(i_use)); } diff --git a/crates/hir-analysis/test_files/imports/cycle_glob.fe b/crates/hir-analysis/test_files/imports/cycle_glob.fe new file mode 100644 index 0000000000..6f2c26e358 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/cycle_glob.fe @@ -0,0 +1,17 @@ +pub mod mod1 { + // `Foo`, `Bar`, and `BarImported` are visible in this scope. + pub use super::mod2::Bar as BarImported + pub use super::mod2::* + + pub struct Foo {} + +} + +pub mod mod2 { + // `Foo`, `Bar`, `BarImported`, and `BarPriv` are visible in this scope. + pub use super::mod1::* + + pub struct Bar {} + + struct BarPriv {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/cycle_glob.snap b/crates/hir-analysis/test_files/imports/cycle_glob.snap new file mode 100644 index 0000000000..d550d51692 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/cycle_glob.snap @@ -0,0 +1,24 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/cycle_glob.fe +--- +note: + ┌─ test_file.fe:3:5 + │ +3 │ pub use super::mod2::Bar as BarImported + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ test_file::mod2::Bar + +note: + ┌─ test_file.fe:4:5 + │ +4 │ pub use super::mod2::* + │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo | test_file::mod2::Bar | test_file::mod2::Bar + +note: + ┌─ test_file.fe:12:5 + │ +12 │ pub use super::mod1::* + │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo | test_file::mod2::Bar | test_file::mod2::Bar + + diff --git a/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe b/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe new file mode 100644 index 0000000000..c46e8d2049 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe @@ -0,0 +1,7 @@ +pub mod mod1 { + pub use super::mod2::Foo as Bar // Error +} + +pub mod mod2 { + pub use super::mod1::Bar as Foo // Error +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap b/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap new file mode 100644 index 0000000000..6efe635fd3 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_alias_cycle.fe +--- +error[2-0002]: `Foo` is not found + ┌─ import_alias_cycle.fe:2:26 + │ +2 │ pub use super::mod2::Foo as Bar // Error + │ ^^^ `Foo` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_alias_cycle.fe:6:26 + │ +6 │ pub use super::mod1::Bar as Foo // Error + │ ^^^ `Bar` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.fe b/crates/uitest/fixtures/name_resolution/import_cycle.fe index c1f0a861bc..ab4a9ef004 100644 --- a/crates/uitest/fixtures/name_resolution/import_cycle.fe +++ b/crates/uitest/fixtures/name_resolution/import_cycle.fe @@ -2,4 +2,11 @@ use Foo as Bar use Bar as Baz use Baz as Foo +pub mod mod1 { + pub use super::mod2::Foo + +} +pub mod mod2 { + pub use super::mod1::Foo +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.snap b/crates/uitest/fixtures/name_resolution/import_cycle.snap index 9f0ae1e7c3..4a9390c2c5 100644 --- a/crates/uitest/fixtures/name_resolution/import_cycle.snap +++ b/crates/uitest/fixtures/name_resolution/import_cycle.snap @@ -1,5 +1,5 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/import_cycle.fe --- @@ -21,4 +21,16 @@ error[2-0002]: `Baz` is not found 3 │ use Baz as Foo │ ^^^ `Baz` is not found +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:6:26 + │ +6 │ pub use super::mod2::Foo + │ ^^^ `Foo` is not found + +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:11:26 + │ +11 │ pub use super::mod1::Foo + │ ^^^ `Foo` is not found + From bab2113af6a7dcb5c14dbefbe23afbf8d026f038 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 28 Jul 2023 23:37:23 +0200 Subject: [PATCH 233/678] Improve readability of snap files for import test --- .../test_files/imports/cycle_glob.snap | 4 ++-- .../test_files/imports/glob_chain.snap | 4 ++-- .../test_files/imports/glob_mutual_dep.snap | 6 +++--- .../test_files/imports/glob_shadow.snap | 4 ++-- .../test_files/imports/use_depends_glob.snap | 2 +- crates/hir-analysis/tests/import.rs | 14 ++++++++------ 6 files changed, 18 insertions(+), 16 deletions(-) diff --git a/crates/hir-analysis/test_files/imports/cycle_glob.snap b/crates/hir-analysis/test_files/imports/cycle_glob.snap index d550d51692..c3a4573ec3 100644 --- a/crates/hir-analysis/test_files/imports/cycle_glob.snap +++ b/crates/hir-analysis/test_files/imports/cycle_glob.snap @@ -13,12 +13,12 @@ note: ┌─ test_file.fe:4:5 │ 4 │ pub use super::mod2::* - │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo | test_file::mod2::Bar | test_file::mod2::Bar + │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo as Foo | test_file::mod2::Bar as Bar | test_file::mod2::Bar as BarImported note: ┌─ test_file.fe:12:5 │ 12 │ pub use super::mod1::* - │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo | test_file::mod2::Bar | test_file::mod2::Bar + │ ^^^^^^^^^^^^^^^^^^^^^^ test_file::mod1::Foo as Foo | test_file::mod2::Bar as Bar | test_file::mod2::Bar as BarImported diff --git a/crates/hir-analysis/test_files/imports/glob_chain.snap b/crates/hir-analysis/test_files/imports/glob_chain.snap index b13fe3c74f..c41f252539 100644 --- a/crates/hir-analysis/test_files/imports/glob_chain.snap +++ b/crates/hir-analysis/test_files/imports/glob_chain.snap @@ -7,12 +7,12 @@ note: ┌─ test_file.fe:1:1 │ 1 │ use foo::* - │ ^^^^^^^^^^ test_file::foo::MyEnum | test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 | test_file::foo::Variant + │ ^^^^^^^^^^ test_file::foo::MyEnum as MyEnum | test_file::foo::MyEnum::Variant as Variant | test_file::foo::MyEnum::Variant2 as Variant2 | test_file::foo::Variant as Variant note: ┌─ test_file.fe:4:5 │ 4 │ pub use MyEnum::* - │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 + │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant as Variant | test_file::foo::MyEnum::Variant2 as Variant2 diff --git a/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap index e351d68f38..db880547ce 100644 --- a/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap +++ b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap @@ -7,18 +7,18 @@ note: ┌─ test_file.fe:1:1 │ 1 │ use foo::* - │ ^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + │ ^^^^^^^^^^ test_file::bar::Bar as Bar | test_file::foo::Foo as Foo note: ┌─ test_file.fe:4:5 │ 4 │ pub use super::bar::* - │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar as Bar | test_file::foo::Foo as Foo note: ┌─ test_file.fe:10:5 │ 10 │ pub use super::foo::* - │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar | test_file::foo::Foo + │ ^^^^^^^^^^^^^^^^^^^^^ test_file::bar::Bar as Bar | test_file::foo::Foo as Foo diff --git a/crates/hir-analysis/test_files/imports/glob_shadow.snap b/crates/hir-analysis/test_files/imports/glob_shadow.snap index 588425ab8a..2fcd9ec749 100644 --- a/crates/hir-analysis/test_files/imports/glob_shadow.snap +++ b/crates/hir-analysis/test_files/imports/glob_shadow.snap @@ -7,12 +7,12 @@ note: ┌─ test_file.fe:1:1 │ 1 │ use foo::* - │ ^^^^^^^^^^ test_file::foo::MyEnum | test_file::foo::MyEnum::Variant2 | test_file::foo::Variant + │ ^^^^^^^^^^ test_file::foo::MyEnum as MyEnum | test_file::foo::MyEnum::Variant2 as Variant2 | test_file::foo::Variant as Variant note: ┌─ test_file.fe:4:5 │ 4 │ pub use MyEnum::* - │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant | test_file::foo::MyEnum::Variant2 + │ ^^^^^^^^^^^^^^^^^ test_file::foo::MyEnum::Variant as Variant | test_file::foo::MyEnum::Variant2 as Variant2 diff --git a/crates/hir-analysis/test_files/imports/use_depends_glob.snap b/crates/hir-analysis/test_files/imports/use_depends_glob.snap index 5c16f75413..6eb36850a4 100644 --- a/crates/hir-analysis/test_files/imports/use_depends_glob.snap +++ b/crates/hir-analysis/test_files/imports/use_depends_glob.snap @@ -13,6 +13,6 @@ note: ┌─ test_file.fe:2:1 │ 2 │ use foo::* - │ ^^^^^^^^^^ test_file::foo::bar + │ ^^^^^^^^^^ test_file::foo::bar as bar diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs index 696c221bff..24e9becd99 100644 --- a/crates/hir-analysis/tests/import.rs +++ b/crates/hir-analysis/tests/import.rs @@ -50,12 +50,14 @@ fn format_imports( } for (_, glob_set) in imports.glob_resolved.iter() { - for (&use_, res_set) in glob_set.iter() { - for res in res_set.values().flatten() { - use_res_map - .entry(use_) - .or_default() - .push(res.pretty_path(db).unwrap()) + for (&use_, res_set_with_ident) in glob_set.iter() { + for (ident, res_set) in res_set_with_ident.iter() { + let ident = ident.data(db); + for res in res_set { + let def_path = res.pretty_path(db).unwrap(); + let resolved = format!("{} as {}", def_path, ident); + use_res_map.entry(use_).or_default().push(resolved) + } } } } From 010884bee682a4ecf6f8027074a7e8ec5b93be34 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 29 Jul 2023 00:55:46 +0200 Subject: [PATCH 234/678] Remove duplicated error messages by cleaning suspicious import set --- .../src/name_resolution/import_resolver.rs | 14 +++++++++++--- crates/hir-analysis/src/name_resolution/mod.rs | 4 ++++ .../src/name_resolution/name_resolver.rs | 4 ++++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index 28ec5108d1..d8a958581d 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -451,7 +451,8 @@ impl<'db> ImportResolver<'db> { .resolved_imports .set_named_bucket(self.db, &i_use, bucket) { - self.accumulated_errors.push(err); + self.register_error(&i_use, err); + return true; } is_decidable @@ -530,6 +531,8 @@ impl<'db> ImportResolver<'db> { } fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { + self.suspicious_imports.remove(&i_use.use_); + match err { NameResolutionError::NotFound => { self.accumulated_errors.push(NameResDiag::not_found( @@ -569,6 +572,11 @@ impl<'db> ImportResolver<'db> { invisible_span, )); } + + NameResolutionError::Conflict(ident, spans) => { + self.accumulated_errors + .push(NameResDiag::Conflict(ident, spans)); + } } } @@ -910,7 +918,7 @@ impl IntermediateResolvedImports { db: &dyn HirAnalysisDb, i_use: &IntermediateUse, mut bucket: NameResBucket, - ) -> Result<(), NameResDiag> { + ) -> NameResolutionResult<()> { let scope = i_use.original_scope; bucket.set_derivation(NameDerivation::NamedImported(i_use.use_)); @@ -942,7 +950,7 @@ impl IntermediateResolvedImports { }; if i_use.use_ != use_ { - return Err(NameResDiag::conflict( + return Err(NameResolutionError::Conflict( imported_name, vec![ i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index da2587b4a2..b963fce2b6 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -460,6 +460,10 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { res, ) } + + NameResolutionError::Conflict(name, spans) => { + NameResDiag::Conflict(name, spans) + } }; self.diags.push(diag); diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 9164f80fb7..043ceaa709 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -758,6 +758,9 @@ pub enum NameResolutionError { /// The name is found ,but it can't be used in the middle of a use path. InvalidPathSegment(NameRes), + + /// The definition conflicts with other definitions. + Conflict(IdentId, Vec), } pub type NameResolutionResult = Result; @@ -773,6 +776,7 @@ impl fmt::Display for NameResolutionError { f, "the found resolution can't be used in the middle of a path" ), + NameResolutionError::Conflict(_, _) => write!(f, "name conflicts with other names"), } } } From 47ae0f88ee7b7f80cafd253849e8e9cff228356e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 29 Jul 2023 17:44:07 +0200 Subject: [PATCH 235/678] Refactor import ambiguity verification function --- .../src/name_resolution/import_resolver.rs | 121 ++++++++---------- 1 file changed, 51 insertions(+), 70 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs index d8a958581d..343fde1206 100644 --- a/crates/hir-analysis/src/name_resolution/import_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -382,10 +382,15 @@ impl<'db> ImportResolver<'db> { // If the resolution is derived from glob import or external crate, we have to // insert the use into the `suspicious_imports` set to verify the ambiguity // after the algorithm reaches the fixed point. - for res in bucket.iter() { - if res.is_builtin() || res.is_external(self.db, i_use) || res.is_derived_from_glob() { - self.suspicious_imports.insert(i_use.use_); - break; + if i_use.is_first_segment() { + for res in bucket.iter() { + if res.is_builtin() + || res.is_external(self.db, self.ingot) + || res.is_derived_from_glob() + { + self.suspicious_imports.insert(i_use.use_); + break; + } } } @@ -462,72 +467,55 @@ impl<'db> ImportResolver<'db> { /// an error if it is ambiguous. /// An additional ambiguity check should be performed after the import /// resolution reaches a fixed point. + // + // The ambiguity in the first segment possibly occurs when the segment is + // resolved to either a glob imported derived resolution or an external ingot in + // the `i_use` resolution. + // + // This is because: + // 1. the resolution of the first segment changes depending on whether the + // dependent glob is resolved or not at the time of `i_use` resolution, + // 2. the order in which uses are resolved is nondeterministic. + // + // In normal name resolution rules, the name brought in by a glob always shadows + // the external ingot, so this ambiguity is inherent in import resolution. + // As a result, we need to add additional verification to check this kind of + // ambiguity. fn verify_ambiguity(&mut self, use_: Use) { let i_use = IntermediateUse::new(self.db, use_); let first_segment_ident = i_use.current_segment_ident(self.db).unwrap(); - let scope = i_use.original_scope; - let ingot = scope.ingot(self.db.as_hir_db()); - - // The ambiguity in the first segment possibly occurs when the segment is - // resolved to either a glob imported derived resolution or an external ingot in - // the `i_use` resolution. - // - // This is because: - // 1. the resolution of the first segment changes depending on whether the - // dependent glob is resolved or not at the time of `i_use` resolution, - // 2. the order in which uses are resolved is nondeterministic. - // - // In normal name resolution rules, the name brought in by a glob always shadows - // the external ingot, so this ambiguity is inherent in import resolution. - // As a result, we need to add additional verification to check this kind of - // ambiguity. - match self.resolve_segment(&i_use) { - Some(IUseResolution::Full(_)) => { - // The ambiguity about the final segment of the path is already verified during - // the fixed point calculation, so verification is not - // necessary. - return; - } - Some(IUseResolution::BasePath(resolved) | IUseResolution::Partial(resolved)) => { - if matches!( - resolved.current_res.unwrap().derivation, - NameDerivation::GlobImported(_) - ) && (ingot - .external_ingots(self.db.as_hir_db()) - .iter() - .any(|(ingot_name, _)| *ingot_name == first_segment_ident) - || PrimTy::all_types() - .iter() - .any(|ty| ty.name() == first_segment_ident)) - { - // The resolved scope is shadowed by an glob imports while originally - // the use might be resolved to an external ingot or builtin. This means there - // is an ambiguity between the external ingot and the name - // imported by the glob import. - self.register_error(&i_use, NameResolutionError::Ambiguous(vec![])); + let res = match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(bucket)) => match bucket.pick(NameDomain::Type) { + Ok(res) => res.clone(), + _ => { + return; } + }, + + Some(IUseResolution::BasePath(i_use) | IUseResolution::Partial(i_use)) => { + i_use.current_res.unwrap() } - Some(IUseResolution::Unchanged(_)) => {} + Some(IUseResolution::Unchanged(_)) | None => return, + }; - None => { - return; - } + // The resolved scope is shadowed by an glob imports while originally + // the use might be resolved to an external ingot or builtin. This means there + // is an ambiguity between the external ingot and the name + // imported by the glob import. + if !res.is_external(self.db, self.ingot) + && (self + .ingot + .external_ingots(self.db.as_hir_db()) + .iter() + .any(|(ingot_name, _)| *ingot_name == first_segment_ident) + || PrimTy::all_types() + .iter() + .any(|ty| ty.name() == first_segment_ident)) + { + self.register_error(&i_use, NameResolutionError::Ambiguous(vec![])); } - - // The ambiguity in the base path arises when multiple items of the same name - // are glob imported into the same scope. It is necessary to verify this - // after the fixed point is reached, since it cannot be assumed that all - // globs in that scope have been resolved at the time of `i_use` name - // resolution. - // - // This ambiguity can be detected by the normal shadowing rules , so it can be - // verified by calling `resolve_base_path`. - // - // The ambiguity about the final segment of the path can be verified during the - // fixed point calculation, so verification is not necessary. - self.resolve_base_path(i_use); } fn register_error(&mut self, i_use: &IntermediateUse, err: NameResolutionError) { @@ -1039,16 +1027,9 @@ fn resolved_imports_for_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> &Resolv impl NameRes { /// Returns true if the bucket contains an resolution that is not in the /// same ingot as the current resolution of the `i_use`. - fn is_external(&self, db: &dyn HirAnalysisDb, i_use: &IntermediateUse) -> bool { - let Some(current_ingot) = i_use - .current_scope() - .map(|scope| scope.ingot(db.as_hir_db())) - else { - return false; - }; - + fn is_external(&self, db: &dyn HirAnalysisDb, ingot: IngotId) -> bool { match self.kind { - NameResKind::Scope(scope) => scope.ingot(db.as_hir_db()) != current_ingot, + NameResKind::Scope(scope) => scope.ingot(db.as_hir_db()) != ingot, NameResKind::Prim(_) => true, } } From a61a20e9417991a1b80e1ef40095d2f1b561740e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 29 Jul 2023 17:45:20 +0200 Subject: [PATCH 236/678] Improve parsing recovery policy in function parsing --- crates/parser2/src/parser/func.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 3ddda04f4b..253a3b32aa 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -50,12 +50,12 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { None, ) }, - &[SyntaxKind::Lt, SyntaxKind::LParen], + &[SyntaxKind::Lt, SyntaxKind::LParen, SyntaxKind::LBrace], ); parser.with_next_expected_tokens( |parser| parse_generic_params_opt(parser), - &[SyntaxKind::LParen], + &[SyntaxKind::LParen, SyntaxKind::LBrace], ); parser.with_next_expected_tokens( From 79baecdc6256e7f389b0208db8460fa184e8b27e Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 1 Aug 2023 19:47:00 -0500 Subject: [PATCH 237/678] fix LSP spans --- crates/language-server/src/db.rs | 6 ------ .../language-server/src/handlers/notifications.rs | 2 +- crates/language-server/src/util.rs | 14 +++----------- 3 files changed, 4 insertions(+), 18 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 30ff70369b..b136d4c5a8 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -1,9 +1,5 @@ use std::{collections::BTreeSet, path}; -use codespan_reporting::term::{ - self, - termcolor::{BufferWriter, ColorChoice}, -}; use common::{ diagnostics::CompleteDiagnostic, input::{IngotKind, Version}, @@ -18,8 +14,6 @@ use hir_analysis::{ HirAnalysisDb, }; -use crate::diagnostics::ToCsDiag; - #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 43afceec72..f77dd6f95a 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -3,7 +3,7 @@ use serde::Deserialize; use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDataBase}; -fn string_diagnostics(mut db: &mut LanguageServerDataBase, path: &str, src: &str) -> Vec { +fn string_diagnostics(db: &mut LanguageServerDataBase, path: &str, src: &str) -> Vec { let file_path = std::path::Path::new(path); let top_mod = db.top_mod_from_file(file_path, src); db.run_on_top_mod(top_mod); diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index 0c331ff674..148828662f 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,12 +1,7 @@ use common::diagnostics::{Severity, CompleteDiagnostic, Span}; use lsp_types::Position; -// TODO: these could potentially be moved into the common crate -// for more idiomatic use in the analyzer and the language server - pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { - // we need to get line and character offsets from the text, - // first we get the line offsets let line_offsets: Vec = text .lines() .scan(0, |state, line| { @@ -16,7 +11,6 @@ pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { }) .collect(); - // now we get the line and character offsets let start_line = line_offsets .binary_search(&span.range.start().into()) .unwrap_or_else(|x| x - 1); @@ -25,17 +19,15 @@ pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { .binary_search(&span.range.end().into()) .unwrap_or_else(|x| x - 1); - - - // except that we need a fully qualified path to use `into`... - let start_character: usize = span.range.start().into(); - let end_character: usize = span.range.end().into(); + let start_character: usize = usize::from(span.range.start()) - line_offsets[start_line]; + let end_character: usize = usize::from(span.range.end()) - line_offsets[end_line]; lsp_types::Range { start: Position::new(start_line as u32, start_character as u32), end: Position::new(end_line as u32, end_character as u32), } } + pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { // Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, From 12cc3964d0b1ceca67d9c940733945a3d6f6c8cc Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 1 Aug 2023 19:52:40 -0500 Subject: [PATCH 238/678] add textmate grammar to vscode extension --- .../editors/vscode/fe.tmLanguage.json | 378 ++++++++++++++++++ .../editors/vscode/package.json | 7 + 2 files changed, 385 insertions(+) create mode 100644 crates/language-server/editors/vscode/fe.tmLanguage.json diff --git a/crates/language-server/editors/vscode/fe.tmLanguage.json b/crates/language-server/editors/vscode/fe.tmLanguage.json new file mode 100644 index 0000000000..020eafc1f8 --- /dev/null +++ b/crates/language-server/editors/vscode/fe.tmLanguage.json @@ -0,0 +1,378 @@ +{ + "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", + "name": "Fe", + "scopeName": "source.fe", + "comment": "Included patterns are listed in descending order of matching precedence. In general, each element of the grammar is given one or two names: if necessary, a 'meta' name that describes the element in terms of this grammar file, and a non-'meta', conventional TextMate grammar name, which informs tools such as VS Code on how to color the element. If the conventional TextMate name describes the matched token perfectly, the meta name is obviated.", + "patterns": [ + { "include": "#line-comment" }, + { "include": "#block-comment" }, + { "include": "#has-abilities" }, + { "include": "#type-address" }, + { "include": "#keyword" }, + { "include": "#type" }, + { "include": "#function" }, + { "include": "#value" }, + { "include": "#identifier" }, + { "include": "#punctuation" } + ], + "repository": { + "line-comment": { + "comment": "Single-line comments such as `// ...` and `/// ...`.", + "patterns": [ + { + "name": "meta.comment.line.documentation.fe comment.block.documentation.fe", + "comment": "A single-line comment of the form `/// ...`. fe considers this to be a documentation comment. TextMate's naming conventions don't include single-line documentation comments, so this is named 'comment.block.documentation' instead.", + "begin": "(///)", + "beginCaptures": { "1": { "name": "meta.punctuation.slash-slash-slash.fe" } }, + "end": "(\u000a)$", + "endCaptures": { "1": { "name": "meta.punctuation.line-feed.fe" } }, + "patterns": [{ "include": "#comment-ambiguous-character" }] + }, + { + "name": "comment.line.fe", + "comment": "A single-line comment of the form `// ...`.", + "begin": "(//)", + "beginCaptures": { "1": { "name": "meta.punctuation.slash-slash.fe" } }, + "end": "(\u000a)$", + "endCaptures": { "1": { "name": "meta.punctuation.line-feed.fe" } }, + "patterns": [{ "include": "#comment-ambiguous-character" }] + } + ] + }, + "block-comment": { + "comment": "Block comments such as `/* ... */` and `/** ... */`. These need special handling because they can be nested, creating a stack of block of comments that are closed by `*/` in LIFO order.", + "patterns": [ + { + "name": "comment.block.documentation.fe", + "comment": "A block comment of the form `/** ... */`. fe considers this to be a documentation comment.", + "begin": "/\\*\\*(?!/)", + "end": "\\*/", + "patterns": [ + { "include": "#comment-ambiguous-character" }, + { "include": "#block-comment" } + ] + }, + { + "name": "comment.block.fe", + "comment": "A block comment of the form `/* ... */`.", + "begin": "/\\*", + "end": "\\*/", + "patterns": [ + { "include": "#comment-ambiguous-character" }, + { "include": "#block-comment" } + ] + } + ] + }, + "comment-ambiguous-character": { + "comment": "Characters that may be considered confusing when used in comments.", + "patterns": [ + { + "name": "meta.comment.ambiguous-character.isolated-carriage-return.fe invalid.illegal.fe", + "match": "[\r][^\n]" + }, + { + "name": "meta.comment.ambiguous-character.whitespace.fe invalid.illegal.fe", + "comment": "Unicode code points that represent esoteric white space characters. Most of these come from the 'White_Space' property list from https://www.unicode.org/Public/UCD/latest/ucd/PropList.txt.", + "match": "(?:\u000b|\u000c|\u0085|\u00a0|\u1680|\u180e|[\u2000-\u200d]|\u2028|\u2029|\u202f|\u205f|\u2060|\u3000|\ufeff)" + }, + { + "name": "meta.comment.ambiguous-character.asterisk-lookalike.fe invalid.illegal.fe", + "comment": "Unicode code points that represent characters that appear similar to an asterisk '*', which fe uses to delimit comments.", + "match": "(?:\u066d|\u204e|\u2217|\u26b9|\u2731)" + }, + { + "name": "meta.comment.ambiguous-character.slash-lookalike.fe invalid.illegal.fe", + "comment": "Unicode code points that represent characters that appear similar to a forward slash '/', which fe uses to delimit comments.", + "match": "(?:\u01c0|\u0338|\u2044|\u2215|\u2e4a)" + } + ] + }, + "has-abilities": { + "comment": "`copy` is an operator in fe, as well as a struct ability. To disambiguate, preferentially match abilities that appear after the `has` keyword.", + "begin": "\\b(has)\\b", + "beginCaptures": { "1": { "name": "keyword.other.fe" } }, + "end": "(?:(\\{)|(;))", + "endCaptures": { + "1": { "name": "meta.punctuation.left-brace.fe" }, + "2": { "name": "meta.punctuation.semicolon.fe" } + }, + "patterns": [ + { "include": "#line-comment" }, + { "include": "#block-comment" }, + { "include": "#ability" } + ] + }, + "type-address": { + "name": "meta.type-address.fe", + "comment": "`address` is both a keyword (`address 0x1 { ... }`) and a type (`a: &address`). Make a simple attempt to disambiguate, by matching the most common usages of the type. This is a best-effort and is easily foiled with block comments such as `a: /**/ address`.", + "match": "(\\:)\\s*(&)?(address)\\b", + "captures": { + "1": { "name": "meta.punctuation.colon.fe" }, + "2": { "name": "meta.punctuation.ampersand.fe" }, + "3": { "name": "meta.type.builtin.address.fe entity.name.type.fe" } + } + }, + "keyword": { + "comment": "Keywords such as 'if' or 'let', operator keywords such as 'fe' or 'copy', contextual keywords such as 'invariant' or 'phantom', or builtin functions such as 'borrow_global_mut'.", + "patterns": [ + { + "name": "keyword.control.fe", + "match": "\\b(?:abort|break|continue|else|if|loop|return|while)\\b" + }, + { + "name": "keyword.operator.fe", + "match": "\\b(?:copy|fe)\\b" + }, + { + "name": "keyword.other.fe", + "match": "\\b(?:acquires|as|friend|has|invariant|Self|spec|use)\\b" + }, + { + "name": "keyword.other.fe.specification", + "match": "\\b(?:aborts_if|aborts_with|apply|assume|axiom|choose|decreases|emits|ensures|except|forall|global|include|internal|local|min|modifies|pragma|requires|schema|succeeds_if|to|update|with|where)\\b" + }, + { + "name": "storage.type.fe", + "match": "\\b(?:address|const|fun|let|module|post|script|struct|phantom)\\b" + }, + { + "name": "storage.modifier.fe", + "match": "\\b(?:mut|native|public)\\b" + }, + { + "name": "support.function.fe", + "match": "\\b(?:assert|borrow_global|borrow_global_mut|exists|freeze|fe_from|fe_to|old)\\b" + } + ] + }, + "type": { + "comment": "A type name, such as `u128` or `&mut 0x2::M::S`.", + "patterns": [ + { + "name": "meta.type.builtin.number.fe entity.name.type.fe", + "match": "\\bu(?:8|64|128)\\b" + }, + { + "name": "meta.type.builtin.bool.fe entity.name.type.fe", + "match": "\\bbool\\b" + }, + { + "name": "meta.type.builtin.address.fe entity.name.type.fe", + "match": "\\baddress\\b" + }, + { + "name": "meta.type.builtin.signer.fe entity.name.type.fe", + "match": "\\bsigner\\b" + }, + { + "name": "meta.type.builtin.vector.fe", + "begin": "(vector)(<)", + "beginCaptures": { + "1": { "name": "entity.name.type.fe" }, + "2": { "name": "meta.punctuation.left-angle-bracket.fe" } + }, + "end": "(>)", + "endCaptures": { + "1": { "name": "meta.punctuation.right-angle-bracket.fe" } + }, + "patterns": [ + { "include": "#line-comment" }, + { "include": "#block-comment" }, + { "include": "#type" }, + { "include": "#identifier" } + ] + } + ] + }, + "function": { + "comment": "We assume an identifier that does not begin with a capital letter, eventually followed by an open parenthesis `(`, is a function (no distinction is made here between function call expressions and function declarations). This means that parameterized attributes such as `#[expected_failure(abort_code = 1)]` are classified as functions, but that doesn't seem so bad.", + "patterns": [ + { + "comment": "A function followed by a type list, for example `spec_none()`.", + "begin": "([_a-z][_a-zA-Z0-9]*)(<)", + "beginCaptures": { + "1": { "name": "meta.function.fe entity.name.function.fe" }, + "2": { "name": "meta.punctuation.less.fe" } + }, + "end": "(>)(\\()", + "endCaptures": { + "1": { "name": "meta.punctuation.greater.fe" }, + "2": { "name": "meta.punctuation.left-parenthesis.fe" } + }, + "patterns": [ + { "include": "#line-comment" }, + { "include": "#block-comment" }, + { "include": "#type" }, + { "include": "#ability" }, + { "include": "#identifier" } + ] + }, + { + "comment": "A function immediately followed by an open parenthesis, for example `len(vec)`.", + "match": "([_a-z][_a-zA-Z0-9]*)(\\()", + "captures": { + "1": { "name": "meta.function.fe entity.name.function.fe" }, + "2": { "name": "meta.punctuation.left-parenthesis.fe" } + } + } + ] + }, + "value": { + "name": "meta.value.fe", + "patterns": [ + { + "comment": "An address value with a hexadecimal constant, for example `@0x1bEeF`.", + "name": "meta.value.address.hexadecimal.fe variable.other.fe", + "match": "@0x[a-fA-F0-9]+(u(8|64|128))?" + }, + { + "comment": "An address value with a non-hexadecimal numeric constant, for example `@42u8`.", + "name": "meta.value.address.decimal.fe variable.other.fe", + "match": "@[0-9]+(u(8|64|128))?" + }, + { + "comment": "An address value with an identifier, for example `@addr`.", + "name": "meta.value.address.identifier.fe variable.other.fe", + "match": "@[_a-zA-Z][_a-zA-Z0-9]*" + }, + { + "comment": "A boolean constant: `true` or `false`.", + "name": "meta.value.boolean.fe constant.language.boolean.fe", + "match": "\\b(true|false)\\b" + }, + { + "comment": "A hexadecimal constant number, for example `0x11Eeu64`.", + "name": "meta.value.number.hexadecimal.fe constant.numeric.fe", + "match": "\\b0x[a-fA-F0-9]+(u(8|64|128))?\\b" + }, + { + "comment": "A non-hexadecimal constant number, for example `42u128`.", + "name": "meta.value.number.decimal.fe constant.numeric.fe", + "match": "\\b[0-9]+(u(8|64|128))?\\b" + }, + { + "comment": "A byte string, for example b\"hel\tlo\".", + "name": "meta.value.bytestring.fe string.quoted.double.fe", + "begin": "(b\")", + "beginCaptures": { "1": { "name": "meta.value.bytestring.prefix.fe" } }, + "end": "(\")", + "endCaptures": { "1": { "name": "meta.value.bytestring.suffix.fe" } }, + "patterns": [{ + "comment": "An escaped character within a string.", + "name": "constant.character.escape.fe", + "match": "\\\\." + }] + }, + { + "comment": "A hexadecimal string, for example x\"01dF\".", + "name": "meta.value.hexadecimal-string.fe string.quoted.double.fe", + "begin": "(x\")", + "beginCaptures": { "1": { "name": "meta.value.hexadecimal-string.prefix.fe" } }, + "end": "(\")", + "endCaptures": { "1": { "name": "meta.value.hexadecimal-string.suffix.fe" } }, + "patterns": [{ + "comment": "Invalid hexadecimal character within a string.", + "name": "meta.value.hexadecimal-string.invalid.fe invalid.illegal.fe", + "match": "[^a-fA-F0-9]" + }] + } + ] + }, + "identifier": { + "patterns": [ + { + "name": "meta.identifier.all-capitals.fe constant.other.fe", + "comment": "We assume any identifier composed of two or more capital letters is a constant.", + "match": "\\b[A-Z][_A-Z0-9]+\\b" + }, + { + "name": "meta.identifier.camel-case-type-list.fe", + "comment": "We assume any identifier beginning with a capital letter is a type. This pattern matches such a type followed by angle brackets `<...>`.", + "begin": "\\b([A-Z][_a-zA-Z0-9]*)(<)", + "beginCaptures": { + "1": { "name": "entity.name.type.fe" }, + "2": { "name": "meta.punctuation.less.fe" } + }, + "end": "(>)", + "endCaptures": { "1": { "name": "meta.punctuation.greater.fe" } }, + "patterns": [ + { "include": "#type" }, + { "include": "#ability" }, + { "include": "#identifier" } + ] + }, + { + "name": "meta.identifier.camel-case.fe entity.name.type.fe", + "comment": "We assume any identifier beginning with a capital letter is a type.", + "match": "\\b[A-Z][_a-zA-Z0-9]*\\b" + }, + { + "name": "meta.identifier.type-list.fe", + "comment": "This pattern matches an identifier followed by angle brackets `<...>`.", + "begin": "\\b([_a-zA-Z][_a-zA-Z0-9]*)(<)", + "beginCaptures": { + "1": { "name": "variable.other.fe" }, + "2": { "name": "meta.punctuation.less.fe" } + }, + "end": "(>)", + "endCaptures": { "1": { "name": "meta.punctuation.greater.fe" } }, + "patterns": [ + { "include": "#type" }, + { "include": "#ability" }, + { "include": "#identifier" } + ] + }, + { + "name": "meta.identifier.fe variable.other.fe", + "match": "\\b[_a-zA-Z][_a-zA-Z0-9]*\\b" + } + ] + }, + "ability": { + "name": "meta.ability.fe entity.name.type.fe", + "comment": "This pattern matches any of the built-in type abilities.", + "match": "\\b(copy|drop|key|store)\\b" + }, + "punctuation": { + "comment": "Patterns used to capture punctuators such as `{` or `::` as individual tokens.", + "patterns": [ + { "name": "meta.punctuation.exclaim-equal.fe", "match": "!=" }, + { "name": "meta.punctuation.exclaim.fe", "match": "!" }, + { "name": "meta.punctuation.percent.fe", "match": "%" }, + { "name": "meta.punctuation.ampersand-ampersand.fe", "match": "&&" }, + { "name": "meta.punctuation.ampersand.fe", "match": "&" }, + { "name": "meta.punctuation.left-parenthesis.fe", "match": "\\(" }, + { "name": "meta.punctuation.right-parenthesis.fe", "match": "\\)" }, + { "name": "meta.punctuation.hash-left-bracket.fe", "match": "#\\[" }, + { "name": "meta.punctuation.left-bracket.fe", "match": "\\[" }, + { "name": "meta.punctuation.right-bracket.fe", "match": "\\]" }, + { "name": "meta.punctuation.asterisk.fe", "match": "\\*" }, + { "name": "meta.punctuation.plus.fe", "match": "\\+" }, + { "name": "meta.punctuation.comma.fe", "match": "," }, + { "name": "meta.punctuation.minus.fe", "match": "-" }, + { "name": "meta.punctuation.period-period.fe", "match": "\\.\\." }, + { "name": "meta.punctuation.period.fe", "match": "\\." }, + { "name": "meta.punctuation.slash.fe", "match": "/" }, + { "name": "meta.punctuation.colon-colon.fe", "match": "::" }, + { "name": "meta.punctuation.colon.fe", "match": ":" }, + { "name": "meta.punctuation.semicolon.fe", "match": ";" }, + { "name": "meta.punctuation.less-equal-equal-greater.fe", "match": "<==>" }, + { "name": "meta.punctuation.equal-equal-greater.fe", "match": "==>" }, + { "name": "meta.punctuation.equal-equal.fe", "match": "==" }, + { "name": "meta.punctuation.less-less.fe", "match": "<<" }, + { "name": "meta.punctuation.less-equal.fe", "match": "<=" }, + { "name": "meta.punctuation.greater-greater.fe", "match": ">>" }, + { "name": "meta.punctuation.greater-equal.fe", "match": ">=" }, + { "name": "meta.punctuation.equal.fe", "match": "=" }, + { "name": "meta.punctuation.less.fe", "match": "<" }, + { "name": "meta.punctuation.greater.fe", "match": ">" }, + { "name": "meta.punctuation.caret.fe", "match": "\\^" }, + { "name": "meta.punctuation.pipe-pipe.fe", "match": "\\|\\|" }, + { "name": "meta.punctuation.pipe.fe", "match": "\\|" }, + { "name": "meta.punctuation.left-brace.fe", "match": "\\{" }, + { "name": "meta.punctuation.right-brace.fe", "match": "\\}" } + ] + } + } +} diff --git a/crates/language-server/editors/vscode/package.json b/crates/language-server/editors/vscode/package.json index aea5cee424..eaff771874 100644 --- a/crates/language-server/editors/vscode/package.json +++ b/crates/language-server/editors/vscode/package.json @@ -27,6 +27,13 @@ ".fe" ] } + ], + "grammars": [ + { + "language": "fe", + "scopeName": "source.fe", + "path": "./fe.tmLanguage.json" + } ] }, "scripts": { From 432d109b3c396809176d42f4713c34492e8414ec Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 4 Aug 2023 08:35:40 -0500 Subject: [PATCH 239/678] 'Database' spelling convention --- Cargo.lock | 1 + crates/language-server/src/db.rs | 18 +++++++++--------- crates/language-server/src/diagnostics.rs | 8 ++++---- .../src/handlers/notifications.rs | 4 ++-- crates/language-server/src/state.rs | 6 +++--- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b68ccb46d2..3489ac14ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1113,6 +1113,7 @@ dependencies = [ "crossbeam-channel", "fe-analyzer", "fe-common2", + "fe-driver2", "fe-hir", "fe-hir-analysis", "fe-macros", diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index b136d4c5a8..8ad820abc1 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -28,19 +28,19 @@ impl LanguageServerDb for DB where } #[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] -pub struct LanguageServerDataBase { +pub struct LanguageServerDatabase { storage: salsa::Storage, diags: Vec>, } -impl LanguageServerDataBase { +impl LanguageServerDatabase { pub fn run_on_top_mod(&mut self, top_mod: TopLevelMod) { self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass); } pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) where - F: FnOnce(&LanguageServerDataBase) -> AnalysisPassManager<'_>, + F: FnOnce(&LanguageServerDatabase) -> AnalysisPassManager<'_>, { self.diags.clear(); self.diags = { @@ -81,14 +81,14 @@ impl LanguageServerDataBase { } } -impl HirDb for LanguageServerDataBase {} -impl SpannedHirDb for LanguageServerDataBase {} -impl LowerHirDb for LanguageServerDataBase {} -impl salsa::Database for LanguageServerDataBase { +impl HirDb for LanguageServerDatabase {} +impl SpannedHirDb for LanguageServerDatabase {} +impl LowerHirDb for LanguageServerDatabase {} +impl salsa::Database for LanguageServerDatabase { fn salsa_event(&self, _: salsa::Event) {} } -impl Default for LanguageServerDataBase { +impl Default for LanguageServerDatabase { fn default() -> Self { let db = Self { storage: Default::default(), @@ -99,7 +99,7 @@ impl Default for LanguageServerDataBase { } } -fn initialize_analysis_pass(db: &LanguageServerDataBase) -> AnalysisPassManager<'_> { +fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { let mut pass_manager = AnalysisPassManager::new(); pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 3dcfbbf6c8..93ee94c983 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -10,18 +10,18 @@ use common::{ }; use hir::diagnostics::DiagnosticVoucher; -use crate::db::{LanguageServerDataBase, LanguageServerDb}; +use crate::db::{LanguageServerDatabase, LanguageServerDb}; pub trait ToCsDiag { - fn to_cs(&self, db: &LanguageServerDataBase) -> cs_diag::Diagnostic; + fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; } impl ToCsDiag for T where T: DiagnosticVoucher, { - fn to_cs(&self, db: &LanguageServerDataBase) -> cs_diag::Diagnostic { + fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); let severity = convert_severity(complete.severity); @@ -69,7 +69,7 @@ pub fn file_line_starts(db: &dyn LanguageServerDb, file: InputFile) -> Vec cs_files::Files<'a> for LanguageServerDataBase { +impl<'a> cs_files::Files<'a> for LanguageServerDatabase { type FileId = InputFile; type Name = &'a Utf8Path; type Source = &'a str; diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index f77dd6f95a..23699a8dbe 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,9 +1,9 @@ use anyhow::{Result, Error}; use serde::Deserialize; -use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDataBase}; +use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDatabase}; -fn string_diagnostics(db: &mut LanguageServerDataBase, path: &str, src: &str) -> Vec { +fn string_diagnostics(db: &mut LanguageServerDatabase, path: &str, src: &str) -> Vec { let file_path = std::path::Path::new(path); let top_mod = db.top_mod_from_file(file_path, src); db.run_on_top_mod(top_mod); diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 08a5c3a872..ba040e9ad4 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -3,7 +3,7 @@ use crossbeam_channel::{Receiver, Sender}; use lsp_server::Message; use lsp_types::notification::Notification; use lsp_types::request::Request; -use crate::db::LanguageServerDataBase; +use crate::db::LanguageServerDatabase; use crate::handlers::{ request::handle_hover, @@ -12,12 +12,12 @@ use crate::handlers::{ pub struct ServerState { pub sender: Sender, - pub db: LanguageServerDataBase, + pub db: LanguageServerDatabase, } impl ServerState { pub fn new(sender: Sender) -> Self { - ServerState { sender, db: LanguageServerDataBase::default() } + ServerState { sender, db: LanguageServerDatabase::default() } } pub fn run(&mut self, receiver: Receiver) -> Result<()> { From afccde8c287e52a807636e625ef3c559fb909f0c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 2 Aug 2023 13:37:48 +0200 Subject: [PATCH 240/678] Bump trivias when emitting an error --- crates/parser2/src/parser/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 706ed5dfb9..d6b3bccb95 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -446,6 +446,7 @@ impl Parser { /// Add the `msg` to the error list. fn error(&mut self, msg: &str) -> ErrorScope { + self.bump_trivias(); self.is_err = true; let start = self.current_pos; let end = if let Some(current_token) = self.current_token() { From 4b095f93711e58e9647229e6f304262f699b1795 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 2 Aug 2023 15:34:37 +0200 Subject: [PATCH 241/678] Add an ad-hoc parser recovery to avoid confusing error messages when a function is defined in `struct/contract` definition --- crates/parser2/src/parser/struct_.rs | 17 +++++ .../error_recovery/items/struct_.fe | 8 +++ .../error_recovery/items/struct_.snap | 66 ++++++++++++++++++- 3 files changed, 88 insertions(+), 3 deletions(-) diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index e2fdf3cc3d..232c21f342 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -3,6 +3,7 @@ use crate::SyntaxKind; use super::{ attr::parse_attr_list, define_scope, + func::FuncScope, param::{parse_generic_params_opt, parse_where_clause_opt}, token_stream::TokenStream, type_::parse_type, @@ -90,6 +91,22 @@ impl super::Parse for RecordFieldDefScope { parse_attr_list(parser); parser.bump_if(SyntaxKind::PubKw); + // Since the Fe-V2 doesn't support method definition in a struct, we add an + // ad-hoc check for the method definition in a struct to avoid the confusing + // error message. + // The reason that justifies this ad-hoc check is + // 1. This error is difficult to recover properly with the current parser + // design, and the emitted error message is confusing. + // 2. We anticipate that this error would happen often in the transition period + // to Fe-V2. + if parser.current_kind() == Some(SyntaxKind::FnKw) { + let err_scope = parser.error("function definition in struct is not allowed"); + let checkpoint = parser.enter(err_scope, None); + parser.parse(FuncScope::default(), None); + parser.leave(checkpoint); + return; + } + parser.with_next_expected_tokens( |parser| { if !parser.bump_if(SyntaxKind::Ident) { diff --git a/crates/parser2/test_files/error_recovery/items/struct_.fe b/crates/parser2/test_files/error_recovery/items/struct_.fe index f9b312c988..50f0c13817 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.fe +++ b/crates/parser2/test_files/error_recovery/items/struct_.fe @@ -5,4 +5,12 @@ where T { foo bar: i32::foo +} + +pub struct Foo { + pub fn foo() -> i32 { + return 1 + } + + x: i32 } \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index eaeab2cf22..a5a2033dad 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -3,9 +3,9 @@ source: crates/parser2/tests/error_recovery.rs expression: node input_file: crates/parser2/test_files/error_recovery/items/struct_.fe --- -Root@0..74 - ItemList@0..74 - Item@0..74 +Root@0..160 + ItemList@0..160 + Item@0..76 Struct@0..74 ItemModifier@0..3 PubKw@0..3 "pub" @@ -73,4 +73,64 @@ Root@0..74 Ident@69..72 "foo" Newline@72..73 "\n" RBrace@73..74 "}" + Newline@74..76 "\n\n" + Item@76..160 + Struct@76..160 + ItemModifier@76..79 + PubKw@76..79 "pub" + WhiteSpace@79..80 " " + StructKw@80..86 "struct" + WhiteSpace@86..87 " " + Ident@87..90 "Foo" + WhiteSpace@90..91 " " + RecordFieldDefList@91..160 + LBrace@91..92 "{" + Newline@92..93 "\n" + WhiteSpace@93..97 " " + RecordFieldDef@97..142 + PubKw@97..100 "pub" + WhiteSpace@100..101 " " + Error@101..142 + Func@101..142 + FnKw@101..103 "fn" + WhiteSpace@103..104 " " + Ident@104..107 "foo" + FuncParamList@107..109 + LParen@107..108 "(" + RParen@108..109 ")" + WhiteSpace@109..111 " " + Arrow@111..113 "->" + WhiteSpace@113..114 " " + PathType@114..117 + Path@114..117 + PathSegment@114..117 + Ident@114..117 "i32" + WhiteSpace@117..118 " " + BlockExpr@118..142 + LBrace@118..119 "{" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + ReturnStmt@128..136 + ReturnKw@128..134 "return" + WhiteSpace@134..135 " " + LitExpr@135..136 + Lit@135..136 + Int@135..136 "1" + Newline@136..137 "\n" + WhiteSpace@137..141 " " + RBrace@141..142 "}" + Newline@142..143 "\n" + WhiteSpace@143..147 " " + Newline@147..148 "\n" + WhiteSpace@148..152 " " + RecordFieldDef@152..158 + Ident@152..153 "x" + Colon@153..154 ":" + WhiteSpace@154..155 " " + PathType@155..158 + Path@155..158 + PathSegment@155..158 + Ident@155..158 "i32" + Newline@158..159 "\n" + RBrace@159..160 "}" From a0f24b1c00271c4f128caa5818641a4d01bbc50c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 4 Aug 2023 17:50:05 +0200 Subject: [PATCH 242/678] Improve marker traits usability --- crates/driver2/src/diagnostics.rs | 28 +++++++++++------------ crates/driver2/src/lib.rs | 14 +++++++----- crates/hir-analysis/tests/test_db.rs | 14 +++++++----- crates/hir/src/lib.rs | 33 ++++++++++++++-------------- 4 files changed, 47 insertions(+), 42 deletions(-) diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs index 66f3e0e812..cabede5314 100644 --- a/crates/driver2/src/diagnostics.rs +++ b/crates/driver2/src/diagnostics.rs @@ -6,22 +6,22 @@ use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ diagnostics::{LabelStyle, Severity}, - InputFile, + InputDb, InputFile, }; use hir::diagnostics::DiagnosticVoucher; use crate::{DriverDataBase, DriverDb}; pub trait ToCsDiag { - fn to_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic; + fn to_cs(&self, db: &dyn DriverDb) -> cs_diag::Diagnostic; } impl ToCsDiag for T where T: DiagnosticVoucher, { - fn to_cs(&self, db: &DriverDataBase) -> cs_diag::Diagnostic { - let complete = self.to_complete(db); + fn to_cs(&self, db: &dyn DriverDb) -> cs_diag::Diagnostic { + let complete = self.to_complete(db.as_spanned_hir_db()); let severity = convert_severity(complete.severity); let code = Some(complete.error_code.to_string()); @@ -68,21 +68,21 @@ pub fn file_line_starts(db: &dyn DriverDb, file: InputFile) -> Vec { cs::files::line_starts(file.text(db.as_input_db())).collect() } -impl<'a> cs_files::Files<'a> for DriverDataBase { +impl<'db> cs_files::Files<'db> for DriverDataBase { type FileId = InputFile; - type Name = &'a Utf8Path; - type Source = &'a str; + type Name = &'db Utf8Path; + type Source = &'db str; - fn name(&'a self, file_id: Self::FileId) -> Result { - Ok(file_id.path(self).as_path()) + fn name(&'db self, file_id: Self::FileId) -> Result { + Ok(file_id.path(self.as_input_db()).as_path()) } - fn source(&'a self, file_id: Self::FileId) -> Result { - Ok(file_id.text(self)) + fn source(&'db self, file_id: Self::FileId) -> Result { + Ok(file_id.text(self.as_input_db())) } fn line_index( - &'a self, + &'db self, file_id: Self::FileId, byte_index: usize, ) -> Result { @@ -93,7 +93,7 @@ impl<'a> cs_files::Files<'a> for DriverDataBase { } fn line_range( - &'a self, + &'db self, file_id: Self::FileId, line_index: usize, ) -> Result, cs_files::Error> { @@ -107,7 +107,7 @@ impl<'a> cs_files::Files<'a> for DriverDataBase { })?; let end = if line_index == line_starts.len() - 1 { - file_id.text(self).len() + file_id.text(self.as_input_db()).len() } else { *line_starts .get(line_index + 1) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 82cc962a53..39b58d96e9 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -31,11 +31,18 @@ pub trait DriverDb: } impl DriverDb for DB where - DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb + DB: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb { } -#[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] +#[salsa::db( + common::Jar, + hir::Jar, + hir::LowerJar, + hir::SpannedJar, + hir_analysis::Jar, + Jar +)] pub struct DriverDataBase { storage: salsa::Storage, diags: Vec>, @@ -116,9 +123,6 @@ impl DriverDataBase { } } -impl HirDb for DriverDataBase {} -impl SpannedHirDb for DriverDataBase {} -impl LowerHirDb for DriverDataBase {} impl salsa::Database for DriverDataBase { fn salsa_event(&self, _: salsa::Event) {} } diff --git a/crates/hir-analysis/tests/test_db.rs b/crates/hir-analysis/tests/test_db.rs index 92d866d1ad..31186f9e1e 100644 --- a/crates/hir-analysis/tests/test_db.rs +++ b/crates/hir-analysis/tests/test_db.rs @@ -17,13 +17,19 @@ use hir::{ hir_def::TopLevelMod, lower, span::{DynLazySpan, LazySpan}, - HirDb, LowerHirDb, SpannedHirDb, + HirDb, SpannedHirDb, }; use rustc_hash::FxHashMap; type CodeSpanFileId = usize; -#[salsa::db(common::Jar, hir::Jar, fe_hir_analysis::Jar)] +#[salsa::db( + common::Jar, + hir::Jar, + hir::SpannedJar, + hir::LowerJar, + fe_hir_analysis::Jar +)] pub struct HirAnalysisTestDb { storage: salsa::Storage, } @@ -139,10 +145,6 @@ impl Default for HirPropertyFormatter { } } -impl HirDb for HirAnalysisTestDb {} -impl SpannedHirDb for HirAnalysisTestDb {} -impl LowerHirDb for HirAnalysisTestDb {} - impl salsa::Database for HirAnalysisTestDb { fn salsa_event(&self, _: salsa::Event) {} } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index b31f28067f..9277898e81 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -60,6 +60,12 @@ pub struct Jar( external_ingots_impl, ); +#[salsa::jar(db = SpannedHirDb)] +pub struct SpannedJar(); + +#[salsa::jar(db = LowerHirDb)] +pub struct LowerJar(); + #[derive(Clone, Copy)] pub struct ParsingPass<'db> { db: &'db dyn HirDb, @@ -119,20 +125,19 @@ pub trait HirDb: salsa::DbWithJar + InputDb { >::as_jar_db::<'_>(self) } } +impl HirDb for DB where DB: salsa::DbWithJar + InputDb {} /// `LowerHirDb` is a marker trait for lowering AST to HIR items. /// All code that requires [`LowerHirDb`] is considered have a possibility to /// invalidate the cache in salsa when a revision is updated. Therefore, /// implementations relying on `LowerHirDb` are prohibited in all /// Analysis phases. -pub trait LowerHirDb: HirDb { - fn as_lower_hir_db(&self) -> &dyn LowerHirDb - where - Self: Sized, - { - self +pub trait LowerHirDb: salsa::DbWithJar + HirDb { + fn as_lower_hir_db(&self) -> &dyn LowerHirDb { + >::as_jar_db::<'_>(self) } } +impl LowerHirDb for DB where DB: salsa::DbWithJar + HirDb {} /// `SpannedHirDb` is a marker trait for extracting span-dependent information /// from HIR Items. @@ -145,14 +150,12 @@ pub trait LowerHirDb: HirDb { /// generate [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from /// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). /// See also `[LazySpan]`[`crate::span::LazySpan`] for more details. -pub trait SpannedHirDb: HirDb { - fn as_spanned_hir_db(&self) -> &dyn SpannedHirDb - where - Self: Sized, - { - self +pub trait SpannedHirDb: salsa::DbWithJar + HirDb { + fn as_spanned_hir_db(&self) -> &dyn SpannedHirDb { + >::as_jar_db::<'_>(self) } } +impl SpannedHirDb for DB where DB: salsa::DbWithJar + HirDb {} #[cfg(test)] mod test_db { @@ -168,10 +171,9 @@ mod test_db { hir_def::{scope_graph::ScopeGraph, ItemKind, TopLevelMod}, lower::{map_file_to_mod, scope_graph}, span::LazySpan, - LowerHirDb, SpannedHirDb, }; - #[salsa::db(common::Jar, crate::Jar)] + #[salsa::db(common::Jar, crate::Jar, crate::LowerJar, crate::SpannedJar)] pub(crate) struct TestDb { storage: salsa::Storage, } @@ -185,9 +187,6 @@ mod test_db { db } } - impl HirDb for TestDb {} - impl SpannedHirDb for TestDb {} - impl LowerHirDb for TestDb {} impl salsa::Database for TestDb { fn salsa_event(&self, _: salsa::Event) {} } From dd5ad3c537004275ee80f26f09a87db0130df317 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 2 Aug 2023 13:37:48 +0200 Subject: [PATCH 243/678] Bump trivias when emitting an error --- crates/parser2/src/parser/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index 706ed5dfb9..d6b3bccb95 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -446,6 +446,7 @@ impl Parser { /// Add the `msg` to the error list. fn error(&mut self, msg: &str) -> ErrorScope { + self.bump_trivias(); self.is_err = true; let start = self.current_pos; let end = if let Some(current_token) = self.current_token() { From 07fa25b399f282d9a628a8049f098a2ef40c41e8 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 2 Aug 2023 15:34:37 +0200 Subject: [PATCH 244/678] Add an ad-hoc parser recovery to avoid confusing error messages when a function is defined in `struct/contract` definition --- crates/parser2/src/parser/struct_.rs | 17 +++++ .../error_recovery/items/struct_.fe | 8 +++ .../error_recovery/items/struct_.snap | 66 ++++++++++++++++++- 3 files changed, 88 insertions(+), 3 deletions(-) diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index e2fdf3cc3d..232c21f342 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -3,6 +3,7 @@ use crate::SyntaxKind; use super::{ attr::parse_attr_list, define_scope, + func::FuncScope, param::{parse_generic_params_opt, parse_where_clause_opt}, token_stream::TokenStream, type_::parse_type, @@ -90,6 +91,22 @@ impl super::Parse for RecordFieldDefScope { parse_attr_list(parser); parser.bump_if(SyntaxKind::PubKw); + // Since the Fe-V2 doesn't support method definition in a struct, we add an + // ad-hoc check for the method definition in a struct to avoid the confusing + // error message. + // The reason that justifies this ad-hoc check is + // 1. This error is difficult to recover properly with the current parser + // design, and the emitted error message is confusing. + // 2. We anticipate that this error would happen often in the transition period + // to Fe-V2. + if parser.current_kind() == Some(SyntaxKind::FnKw) { + let err_scope = parser.error("function definition in struct is not allowed"); + let checkpoint = parser.enter(err_scope, None); + parser.parse(FuncScope::default(), None); + parser.leave(checkpoint); + return; + } + parser.with_next_expected_tokens( |parser| { if !parser.bump_if(SyntaxKind::Ident) { diff --git a/crates/parser2/test_files/error_recovery/items/struct_.fe b/crates/parser2/test_files/error_recovery/items/struct_.fe index f9b312c988..50f0c13817 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.fe +++ b/crates/parser2/test_files/error_recovery/items/struct_.fe @@ -5,4 +5,12 @@ where T { foo bar: i32::foo +} + +pub struct Foo { + pub fn foo() -> i32 { + return 1 + } + + x: i32 } \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index eaeab2cf22..a5a2033dad 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -3,9 +3,9 @@ source: crates/parser2/tests/error_recovery.rs expression: node input_file: crates/parser2/test_files/error_recovery/items/struct_.fe --- -Root@0..74 - ItemList@0..74 - Item@0..74 +Root@0..160 + ItemList@0..160 + Item@0..76 Struct@0..74 ItemModifier@0..3 PubKw@0..3 "pub" @@ -73,4 +73,64 @@ Root@0..74 Ident@69..72 "foo" Newline@72..73 "\n" RBrace@73..74 "}" + Newline@74..76 "\n\n" + Item@76..160 + Struct@76..160 + ItemModifier@76..79 + PubKw@76..79 "pub" + WhiteSpace@79..80 " " + StructKw@80..86 "struct" + WhiteSpace@86..87 " " + Ident@87..90 "Foo" + WhiteSpace@90..91 " " + RecordFieldDefList@91..160 + LBrace@91..92 "{" + Newline@92..93 "\n" + WhiteSpace@93..97 " " + RecordFieldDef@97..142 + PubKw@97..100 "pub" + WhiteSpace@100..101 " " + Error@101..142 + Func@101..142 + FnKw@101..103 "fn" + WhiteSpace@103..104 " " + Ident@104..107 "foo" + FuncParamList@107..109 + LParen@107..108 "(" + RParen@108..109 ")" + WhiteSpace@109..111 " " + Arrow@111..113 "->" + WhiteSpace@113..114 " " + PathType@114..117 + Path@114..117 + PathSegment@114..117 + Ident@114..117 "i32" + WhiteSpace@117..118 " " + BlockExpr@118..142 + LBrace@118..119 "{" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + ReturnStmt@128..136 + ReturnKw@128..134 "return" + WhiteSpace@134..135 " " + LitExpr@135..136 + Lit@135..136 + Int@135..136 "1" + Newline@136..137 "\n" + WhiteSpace@137..141 " " + RBrace@141..142 "}" + Newline@142..143 "\n" + WhiteSpace@143..147 " " + Newline@147..148 "\n" + WhiteSpace@148..152 " " + RecordFieldDef@152..158 + Ident@152..153 "x" + Colon@153..154 ":" + WhiteSpace@154..155 " " + PathType@155..158 + Path@155..158 + PathSegment@155..158 + Ident@155..158 "i32" + Newline@158..159 "\n" + RBrace@159..160 "}" From cb418174a6c661a324fc3c1744f595b10a7fecd6 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 16 Aug 2023 15:03:05 -0500 Subject: [PATCH 245/678] simpler VSCode extension debugging --- .vscode/launch.json | 21 ++++++++++++++++++ .vscode/settings.json | 6 +++++ .vscode/tasks.json | 14 ++++++++++++ .../language-server/editors/vscode/README.md | 13 +++-------- .../editors/vscode/package-lock.json | 22 +------------------ 5 files changed, 45 insertions(+), 31 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 .vscode/settings.json create mode 100644 .vscode/tasks.json diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000000..a3b8f2b3a5 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,21 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "args": [ + "--extensionDevelopmentPath=${workspaceFolder}/crates/language-server/editors/vscode", + "${workspaceFolder}/crates/test-files/fixtures/" + ], + "name": "Launch Fe VSCode Extension", + "outFiles": [ + "${workspaceFolder}/crates/language-server/editors/vscode/out/**/*.js" + ], + "preLaunchTask": "compile-vscode-extension", + "request": "launch", + "type": "extensionHost" + }, + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..e1f31fd4b7 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "editor.tabSize": 4, + "rust-analyzer.linkedProjects": [ + "./crates/language-server/Cargo.toml" + ], +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000000..471a8d4df9 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,14 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "compile-vscode-extension", + "type": "shell", + "command": "npm install && npm run compile", + "options": { + "cwd": "${workspaceFolder}/crates/language-server/editors/vscode" + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/crates/language-server/editors/vscode/README.md b/crates/language-server/editors/vscode/README.md index c3cc1bd79d..26f06e25f8 100644 --- a/crates/language-server/editors/vscode/README.md +++ b/crates/language-server/editors/vscode/README.md @@ -1,18 +1,11 @@ # Fe LSP client VSCode extension -This needs a better name. ## Development/Debugging -Before running the VSCode extension, ensure the language server is built by following the instructions in [the `language-server` crate's README.md](../../README.md). +Build the language server by following [the `language-server` crate's README.md](../../README.md). -Once you've built the language server binary, run: -```bash -npm install -npm run build -``` +Then, open the Fe codebase root workspace in VSCode and press `F5` to debug the extension using the "Launch Fe VSCode Extension" configuration. -Then open this directory in VSCode and press `F5` to run the extension and start the debugger. - -A new VSCode window will open with the Fe test fixtures directory and this extension loaded. +The VSCode extension will be compiled and a new VSCode window will open with the Fe test fixtures directory and this extension loaded. ## Building releases ### TODO \ No newline at end of file diff --git a/crates/language-server/editors/vscode/package-lock.json b/crates/language-server/editors/vscode/package-lock.json index 3a8d23e9a0..30579e385e 100644 --- a/crates/language-server/editors/vscode/package-lock.json +++ b/crates/language-server/editors/vscode/package-lock.json @@ -8,8 +8,7 @@ "name": "fe-analyzer", "version": "0.0.1", "dependencies": { - "vscode-languageclient": "^8.1.0", - "vscode-languageserver": "^8.1.0" + "vscode-languageclient": "^8.1.0" }, "devDependencies": { "@types/glob": "^8.1.0", @@ -2257,17 +2256,6 @@ "node": ">=10" } }, - "node_modules/vscode-languageserver": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-8.1.0.tgz", - "integrity": "sha512-eUt8f1z2N2IEUDBsKaNapkz7jl5QpskN2Y0G01T/ItMxBxw1fJwvtySGB9QMecatne8jFIWJGWI61dWjyTLQsw==", - "dependencies": { - "vscode-languageserver-protocol": "3.17.3" - }, - "bin": { - "installServerIntoExtension": "bin/installServerIntoExtension" - } - }, "node_modules/vscode-languageserver-protocol": { "version": "3.17.3", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz", @@ -4027,14 +4015,6 @@ } } }, - "vscode-languageserver": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-8.1.0.tgz", - "integrity": "sha512-eUt8f1z2N2IEUDBsKaNapkz7jl5QpskN2Y0G01T/ItMxBxw1fJwvtySGB9QMecatne8jFIWJGWI61dWjyTLQsw==", - "requires": { - "vscode-languageserver-protocol": "3.17.3" - } - }, "vscode-languageserver-protocol": { "version": "3.17.3", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz", From 678f68288565856716a26cceb331180894bef321 Mon Sep 17 00:00:00 2001 From: Micah Date: Sun, 20 Aug 2023 15:18:53 -0500 Subject: [PATCH 246/678] Preliminary LSP go-to util + tests --- .vscode/launch.json | 2 +- Cargo.lock | 32 ++- crates/hir/Cargo.toml | 4 +- crates/hir/src/hir_def/scope_graph.rs | 2 +- crates/language-server/Cargo.toml | 5 +- crates/language-server/src/cursor.rs | 0 crates/language-server/src/db.rs | 44 +++- crates/language-server/src/goto.rs | 200 ++++++++++++++++++ .../language-server/src/handlers/request.rs | 38 +++- crates/language-server/src/main.rs | 1 + crates/language-server/src/server.rs | 3 + crates/language-server/src/util.rs | 17 ++ crates/language-server/test_files/goto.fe | 9 + crates/language-server/test_files/goto.snap | 22 ++ .../test_files/smallest_enclosing.fe | 7 + .../test_files/smallest_enclosing.snap | 18 ++ 16 files changed, 381 insertions(+), 23 deletions(-) create mode 100644 crates/language-server/src/cursor.rs create mode 100644 crates/language-server/src/goto.rs create mode 100644 crates/language-server/test_files/goto.fe create mode 100644 crates/language-server/test_files/goto.snap create mode 100644 crates/language-server/test_files/smallest_enclosing.fe create mode 100644 crates/language-server/test_files/smallest_enclosing.snap diff --git a/.vscode/launch.json b/.vscode/launch.json index a3b8f2b3a5..940181cf5d 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -7,7 +7,7 @@ { "args": [ "--extensionDevelopmentPath=${workspaceFolder}/crates/language-server/editors/vscode", - "${workspaceFolder}/crates/test-files/fixtures/" + "${workspaceFolder}/crates/" ], "name": "Launch Fe VSCode Extension", "outFiles": [ diff --git a/Cargo.lock b/Cargo.lock index 3489ac14ff..63605c7abe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -918,7 +918,7 @@ dependencies = [ "semver 1.0.17", "smallvec", "smol_str", - "strum", + "strum 0.23.0", "wasm-bindgen-test", ] @@ -1078,6 +1078,8 @@ dependencies = [ "rustc-hash", "salsa-2022", "smallvec", + "strum 0.25.0", + "strum_macros 0.25.2", ] [[package]] @@ -1111,15 +1113,19 @@ dependencies = [ "clap 4.3.12", "codespan-reporting", "crossbeam-channel", + "dir-test", "fe-analyzer", "fe-common2", + "fe-compiler-test-utils", "fe-driver2", "fe-hir", "fe-hir-analysis", "fe-macros", + "fxhash", "indexmap", "lsp-server", "lsp-types", + "rowan", "salsa-2022", "serde", "serde_json", @@ -2719,7 +2725,16 @@ version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb" dependencies = [ - "strum_macros", + "strum_macros 0.23.1", +] + +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" +dependencies = [ + "strum_macros 0.25.2", ] [[package]] @@ -2735,6 +2750,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.27", +] + [[package]] name = "substrate-bn" version = "0.6.0" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 3de03053b0..62b00492f2 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -20,7 +20,9 @@ rustc-hash = "1.1.0" smallvec = "1.10.0" paste = "1.0" dot2 = "1.0" +strum = { version = "0.25", features = ["derive"] } +strum_macros = "0.25" common = { path = "../common2", package = "fe-common2" } parser = { path = "../parser2", package = "fe-parser2" } -macros = { path = "../macros", package = "fe-macros" } +macros = { path = "../macros", package = "fe-macros" } \ No newline at end of file diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 6f07b569b6..12bdd09b8e 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -70,7 +70,7 @@ impl ScopeGraph { } /// An reference to a `[ScopeData]` in a `ScopeGraph`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, strum::Display)] pub enum ScopeId { /// An item scope. Item(ItemKind), diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 915b2310a4..1295e03bc7 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -27,4 +27,7 @@ lsp-types = "0.94.0" serde = "1.0.162" serde_json = "1.0.96" indexmap = "1.6.2" - +rowan = "0.15.10" +fxhash = "0.2.1" +dir-test = "0.1" +fe-compiler-test-utils = { path = "../test-utils" } \ No newline at end of file diff --git a/crates/language-server/src/cursor.rs b/crates/language-server/src/cursor.rs new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 8ad820abc1..012471be2d 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -6,28 +6,30 @@ use common::{ InputDb, InputFile, InputIngot, }; use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, - lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, scope_graph::ScopeId, ItemKind}, + lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; +use rowan::cursor; + +use crate::goto::{Cursor, GotoPathMap, GotoEnclosingPath}; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); pub trait LanguageServerDb: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ -} +{ } impl LanguageServerDb for DB where DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ -} +{ } -#[salsa::db(common::Jar, hir::Jar, hir_analysis::Jar, Jar)] + +#[salsa::db(common::Jar, hir::Jar, hir::LowerJar, hir::SpannedJar, hir_analysis::Jar, Jar)] pub struct LanguageServerDatabase { storage: salsa::Storage, diags: Vec>, @@ -71,6 +73,28 @@ impl LanguageServerDatabase { map_file_to_mod(self, file) } + pub fn find_enclosing_item(&mut self, top_mod: TopLevelMod, cursor: Cursor) -> Option { + let items = top_mod.scope_graph(self.as_hir_db()).items_dfs(self.as_hir_db()); + + let mut smallest_enclosing_item = None; + let mut smallest_range_size = None; + + for item in items { + let lazy_item_span = DynLazySpan::from(item.lazy_span()); + let item_span = lazy_item_span.resolve(SpannedHirDb::as_spanned_hir_db(self)).unwrap(); + + if item_span.range.contains(cursor) { + let range_size = item_span.range.end() - item_span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_item = Some(item); + smallest_range_size = Some(range_size); + } + } + } + + return smallest_enclosing_item; + } + pub fn finalize_diags(&self) -> Vec { let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { @@ -81,9 +105,9 @@ impl LanguageServerDatabase { } } -impl HirDb for LanguageServerDatabase {} -impl SpannedHirDb for LanguageServerDatabase {} -impl LowerHirDb for LanguageServerDatabase {} +// impl HirDb for LanguageServerDatabase {} +// impl SpannedHirDb for LanguageServerDatabase {} +// impl LowerHirDb for LanguageServerDatabase {} impl salsa::Database for LanguageServerDatabase { fn salsa_event(&self, _: salsa::Event) {} } diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs new file mode 100644 index 0000000000..443e9a557f --- /dev/null +++ b/crates/language-server/src/goto.rs @@ -0,0 +1,200 @@ +use fxhash::FxHashMap; +use hir::{ + hir_def::{scope_graph::ScopeId, PathId, TopLevelMod, ItemKind}, + visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, + HirDb, +}; +use hir_analysis::name_resolution::EarlyResolvedPath; + +use crate::db::{LanguageServerDatabase, LanguageServerDb}; +use common::diagnostics::Span; +use hir::span::LazySpan; + +pub(crate) type GotoEnclosingPath = (PathId, ScopeId); +pub(crate) type GotoPathMap = FxHashMap; + +pub struct PathSpanCollector<'db> { + // You don't need to collect scope id basically. + path_map: GotoPathMap, + db: &'db dyn LanguageServerDb, +} + +impl<'db> PathSpanCollector<'db> { + pub fn new(db: &'db LanguageServerDatabase) -> Self { + Self { + path_map: FxHashMap::default(), + db, + } + } +} + +pub(crate) type Cursor = rowan::TextSize; + +impl<'db> Visitor for PathSpanCollector<'db> { + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { + let Some(span) = ctxt + .span() + .map(|lazy_span| lazy_span.resolve( + self.db.as_spanned_hir_db() + )) + .flatten() + else { + return; + }; + + let scope = ctxt.scope(); + self.path_map.insert(span, (path, scope)); + } +} + +fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option{ + let mut smallest_enclosing_path = None; + let mut smallest_range_size = None; + + for (span, enclosing_path) in path_map { + if span.range.contains(cursor) { + let range_size = span.range.end() - span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_path = Some(*enclosing_path); + smallest_range_size = Some(range_size); + } + } + } + + return smallest_enclosing_path; +} + +pub fn goto_enclosing_path(db: &mut LanguageServerDatabase, top_mod: TopLevelMod, cursor: Cursor) -> Option { + // Find the innermost item enclosing the cursor. + let item: ItemKind = db.find_enclosing_item(top_mod, cursor)?; + + let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + let mut path_collector = PathSpanCollector::new(&db); + path_collector.visit_item(&mut visitor_ctxt, item); + + let path_map = path_collector.path_map; + + // Find the path that encloses the cursor. + let goto_path = smallest_enclosing_path(cursor, &path_map)?; + + let (path_id, scope_id) = goto_path; + + // Resolve path. + let resolved_path = hir_analysis::name_resolution::resolve_path_early(db, path_id, scope_id); + + Some(resolved_path) +} + +#[cfg(test)] +mod tests { + use super::*; + use fe_compiler_test_utils::snap_test; + use dir_test::{dir_test, Fixture}; + use std::path::Path; + + fn extract_multiple_cursor_positions_from_spans(db: &mut LanguageServerDatabase, top_mod: TopLevelMod) -> Vec { + let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + let mut path_collector = PathSpanCollector::new(&db); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let path_map = path_collector.path_map; + + let mut cursors = Vec::new(); + for (span, _) in path_map { + let cursor = span.range.start(); + // println!("cursor from span: {:?}, {:?}", span, cursor); + cursors.push(cursor); + } + + cursors + } + + + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "goto*.fe" + )] + fn test_goto_enclosing_path(fixture: Fixture<&str>) { + let mut db = LanguageServerDatabase::default(); + let path = Path::new(fixture.path()); + let top_mod = db.top_mod_from_file(path, fixture.content()); + + let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); + + let mut cursor_path_map: FxHashMap = FxHashMap::default(); + + cursors.iter().for_each(|cursor| { + let resolved_path = goto_enclosing_path(&mut db, top_mod, *cursor); + + match resolved_path { + Some(path) => match path { + EarlyResolvedPath::Full(bucket) => { + let path = bucket.iter().map(|x| x.pretty_path(&db).unwrap()).collect::>() + .join("\n"); + cursor_path_map.insert(*cursor, path); + }, + EarlyResolvedPath::Partial { res, unresolved_from } => { + let path = res.pretty_path(&db).unwrap(); + cursor_path_map.insert(*cursor, path); + }, + }, + None => {}, + }; + }); + + let result = format!( + "{}\n---\n{}", + fixture.content(), + cursor_path_map.iter().map(|(cursor, path)| { + format!("cursor position: {:?}, path: {}", cursor, path) + }).collect::>().join("\n") + ); + snap_test!(result, fixture.path()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "smallest_enclosing*.fe" + )] + fn test_smallest_enclosing_path(fixture: Fixture<&str>) { + let mut db = LanguageServerDatabase::default(); + let path = Path::new(fixture.path()); + let top_mod = db.top_mod_from_file(path, fixture.content()); + + let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); + + let mut cursor_path_map: FxHashMap = FxHashMap::default(); + + cursors.iter().for_each(|cursor| { + let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + let mut path_collector = PathSpanCollector::new(&db); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let path_map = path_collector.path_map; + let enclosing_path = smallest_enclosing_path(*cursor, &path_map); + + let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early(&mut db, enclosing_path.unwrap().0, enclosing_path.unwrap().1); + + let res = match resolved_enclosing_path { + EarlyResolvedPath::Full(bucket) => { + bucket.iter().map(|x| x.pretty_path(&db).unwrap()).collect::>() + .join("\n") + }, + EarlyResolvedPath::Partial { res, unresolved_from } => { + res.pretty_path(&db).unwrap() + }, + }; + cursor_path_map.insert(*cursor, res); + }); + + let result = format!( + "{}\n---\n{}", + fixture.content(), + cursor_path_map.iter().map(|(cursor, path)| { + format!("cursor position: {:?}, path: {}", cursor, path) + }).collect::>().join("\n") + ); + snap_test!(result, fixture.path()); + } +} diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 6374dd791c..10821291f8 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,9 +1,10 @@ use std::io::BufRead; +use hir_analysis::name_resolution::EarlyResolvedPath; use lsp_server::Response; use serde::Deserialize; -use crate::state::ServerState; +use crate::{state::ServerState, goto::{goto_enclosing_path, Cursor}, util::position_to_offset}; pub(crate) fn handle_hover( state: &mut ServerState, @@ -11,27 +12,50 @@ pub(crate) fn handle_hover( ) -> Result<(), anyhow::Error> { // TODO: get more relevant information for the hover let params = lsp_types::HoverParams::deserialize(req.params)?; - let file = std::fs::File::open( - ¶ms + let file_path = ¶ms .text_document_position_params .text_document .uri - .path(), - )?; + .path(); + let file = std::fs::File::open(file_path)?; let reader = std::io::BufReader::new(file); let line = reader .lines() .nth(params.text_document_position_params.position.line as usize) .unwrap() .unwrap(); + + let file_text = std::fs::read_to_string(file_path)?; + + // let cursor: Cursor = params.text_document_position_params.position.into(); + let cursor: Cursor = position_to_offset(params.text_document_position_params.position, file_text.as_str()); + let file_path = std::path::Path::new(file_path); + let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); + let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); + + let goto_info = match goto_info { + Some(EarlyResolvedPath::Full(bucket)) => { + bucket.iter().map(|x| x.pretty_path(&state.db).unwrap()).collect::>() + .join("\n") + + }, + Some(EarlyResolvedPath::Partial { res, unresolved_from }) => { + res.pretty_path(&state.db).unwrap() + }, + None => { + String::from("No goto info available") + } + }; + let result = lsp_types::Hover { contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent::from( lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value: format!( - "### Hovering over:\n```{}```\n\n{}", + "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}", &line, - serde_json::to_string_pretty(¶ms).unwrap() + serde_json::to_string_pretty(¶ms).unwrap(), + goto_info, ), }, )), diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 83df9914c5..d49f4ac04e 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -3,6 +3,7 @@ mod state; mod db; mod util; mod diagnostics; +mod goto; use db::Jar; mod handlers { diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 76068732cd..2eeb7d298d 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -55,6 +55,9 @@ pub fn run_server() -> Result<()> { }) )?; + // print a message to the console + eprintln!("Fe language server started"); + let result = ServerState::new(connection.sender).run(connection.receiver)?; io_threads.join().unwrap(); diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index 148828662f..042140d514 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,6 +1,23 @@ use common::diagnostics::{Severity, CompleteDiagnostic, Span}; use lsp_types::Position; +pub(crate) fn position_to_offset(position: Position, text: &str) -> rowan::TextSize { + let line_offsets: Vec = text + .lines() + .scan(0, |state, line| { + let offset = *state; + *state += line.len() + 1; + Some(offset) + }) + .collect(); + + let line_offset = line_offsets[position.line as usize]; + let character_offset = position.character as usize; + + rowan::TextSize::from((line_offset + character_offset) as u32) +} + + pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { let line_offsets: Vec = text .lines() diff --git a/crates/language-server/test_files/goto.fe b/crates/language-server/test_files/goto.fe new file mode 100644 index 0000000000..eb5bbc42af --- /dev/null +++ b/crates/language-server/test_files/goto.fe @@ -0,0 +1,9 @@ +struct Foo {} +struct Bar {} + +fn main() { + struct Baz {} + let x: Foo + let y: Bar + let z: Baz +} \ No newline at end of file diff --git a/crates/language-server/test_files/goto.snap b/crates/language-server/test_files/goto.snap new file mode 100644 index 0000000000..b51cfdcd55 --- /dev/null +++ b/crates/language-server/test_files/goto.snap @@ -0,0 +1,22 @@ +--- +source: crates/language-server/src/goto.rs +assertion_line: 153 +expression: result +input_file: crates/language-server/test_files/goto.fe +--- +struct Foo {} +struct Bar {} + +fn main() { + struct Baz {} + let x: Foo + let y: Bar + let z: Baz +} +--- +cursor position: 85, path: goto::Bar +cursor position: 82, path: +cursor position: 100, path: goto::main::{fn_body}::{block0}::Baz +cursor position: 97, path: +cursor position: 70, path: goto::Foo +cursor position: 67, path: diff --git a/crates/language-server/test_files/smallest_enclosing.fe b/crates/language-server/test_files/smallest_enclosing.fe new file mode 100644 index 0000000000..fa1ae4c2ff --- /dev/null +++ b/crates/language-server/test_files/smallest_enclosing.fe @@ -0,0 +1,7 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Bar +} \ No newline at end of file diff --git a/crates/language-server/test_files/smallest_enclosing.snap b/crates/language-server/test_files/smallest_enclosing.snap new file mode 100644 index 0000000000..88e36f1a69 --- /dev/null +++ b/crates/language-server/test_files/smallest_enclosing.snap @@ -0,0 +1,18 @@ +--- +source: crates/language-server/src/goto.rs +assertion_line: 198 +expression: result +input_file: crates/language-server/test_files/smallest_enclosing.fe +--- +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Bar +} +--- +cursor position: 64, path: +cursor position: 52, path: smallest_enclosing::Foo +cursor position: 49, path: +cursor position: 67, path: smallest_enclosing::Bar From 7ce8f9f16765d57db220b07d57d1fbbee8bd4d39 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 21 Aug 2023 13:31:31 -0500 Subject: [PATCH 247/678] Basic working LSP goto definition --- crates/language-server/src/db.rs | 19 ++++--- crates/language-server/src/goto.rs | 4 +- .../src/handlers/notifications.rs | 2 +- .../language-server/src/handlers/request.rs | 47 +++++++++++++++- crates/language-server/src/server.rs | 4 +- crates/language-server/src/state.rs | 6 +++ crates/language-server/src/util.rs | 53 +++++++++++-------- 7 files changed, 102 insertions(+), 33 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 012471be2d..ae4f266789 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -6,16 +6,16 @@ use common::{ InputDb, InputFile, InputIngot, }; use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, scope_graph::ScopeId, ItemKind}, + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, ItemKind, PathId, scope_graph::ScopeId}, lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; -use rowan::cursor; +use salsa::DbWithJar; -use crate::goto::{Cursor, GotoPathMap, GotoEnclosingPath}; +use crate::{goto::Cursor, util::span_to_lsp_range}; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); @@ -94,6 +94,16 @@ impl LanguageServerDatabase { return smallest_enclosing_item; } + + // pub fn scope_to_location(&mut self, scope_id: ScopeId) -> Option { + // scope_id.name_span(self).unwrap().resolve(SpannedHirDb::as_spanned_hir_db(self)).map(|span| { + // // let span = lazy_span.resolve(SpannedHirDb::as_spanned_hir_db(self)).unwrap(); + // let file = span.file; + // let uri = lsp_types::Url::from_file_path(file.path(self)).unwrap(); + // let location = lsp_types::Location::new(uri, span_to_lsp_range(span, self)); + // location + // }) + // } pub fn finalize_diags(&self) -> Vec { let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); @@ -105,9 +115,6 @@ impl LanguageServerDatabase { } } -// impl HirDb for LanguageServerDatabase {} -// impl SpannedHirDb for LanguageServerDatabase {} -// impl LowerHirDb for LanguageServerDatabase {} impl salsa::Database for LanguageServerDatabase { fn salsa_event(&self, _: salsa::Event) {} } diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 443e9a557f..4a6e3421a7 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -75,9 +75,9 @@ pub fn goto_enclosing_path(db: &mut LanguageServerDatabase, top_mod: TopLevelMod let path_map = path_collector.path_map; // Find the path that encloses the cursor. - let goto_path = smallest_enclosing_path(cursor, &path_map)?; + let goto_starting_path = smallest_enclosing_path(cursor, &path_map)?; - let (path_id, scope_id) = goto_path; + let (path_id, scope_id) = goto_starting_path; // Resolve path. let resolved_path = hir_analysis::name_resolution::resolve_path_early(db, path_id, scope_id); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 23699a8dbe..5a534bdba9 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -32,7 +32,7 @@ pub(crate) fn handle_document_did_open( // send diagnostics using `state.send_response` for each diagnostic let diagnostics = diags.into_iter().flat_map(|diag| { - diag_to_lsp(diag, text.as_str()).iter().map(|x| x.clone()).collect::>() + diag_to_lsp(diag, &state.db).iter().map(|x| x.clone()).collect::>() }); let result = lsp_types::PublishDiagnosticsParams { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 10821291f8..f4f8d88d4c 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,10 +1,10 @@ use std::io::BufRead; -use hir_analysis::name_resolution::EarlyResolvedPath; +use hir_analysis::name_resolution::{EarlyResolvedPath, NameDerivation}; use lsp_server::Response; use serde::Deserialize; -use crate::{state::ServerState, goto::{goto_enclosing_path, Cursor}, util::position_to_offset}; +use crate::{state::ServerState, goto::{goto_enclosing_path, Cursor}, util::{position_to_offset, scope_to_lsp_location}}; pub(crate) fn handle_hover( state: &mut ServerState, @@ -70,3 +70,46 @@ pub(crate) fn handle_hover( state.send_response(response_message)?; Ok(()) } + +use lsp_types::{request::GotoDefinition, TextDocumentPositionParams, Location}; + +pub(crate) fn handle_goto_definition( + state: &mut ServerState, + req: lsp_server::Request, +) -> Result<(), anyhow::Error> { + let params = TextDocumentPositionParams::deserialize(req.params)?; + + // Convert the position to an offset in the file + let file_text = std::fs::read_to_string(params.text_document.uri.path())?; + let cursor: Cursor = position_to_offset(params.position, file_text.as_str()); + + // Get the module and the goto info + let file_path = std::path::Path::new(params.text_document.uri.path()); + let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); + let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); + + // Convert the goto info to a Location + let scope = match goto_info { + Some(EarlyResolvedPath::Full(bucket)) => { + bucket.iter().map(|x| x.scope()).last().unwrap() + }, + Some(EarlyResolvedPath::Partial { res, unresolved_from }) => { + res.scope() + }, + None => { + return Ok(()) + } + }; + + let location = scope_to_lsp_location(scope.unwrap(), &state.db); + + // Send the response + let response_message = Response { + id: req.id, + result: Some(serde_json::to_value(lsp_types::GotoDefinitionResponse::Scalar(location))?), + error: None, + }; + + state.send_response(response_message)?; + Ok(()) +} \ No newline at end of file diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 2eeb7d298d..52e6962158 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,7 +1,7 @@ use super::state::ServerState; use anyhow::Result; use lsp_server::{Connection, Notification}; -use lsp_types::{ServerCapabilities, HoverProviderCapability}; +use lsp_types::{ServerCapabilities, HoverProviderCapability, GotoCapability}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { @@ -10,6 +10,8 @@ fn server_capabilities() -> ServerCapabilities { text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind( lsp_types::TextDocumentSyncKind::FULL, )), + // goto definition + definition_provider: Some(lsp_types::OneOf::Left(true)), ..Default::default() } } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index ba040e9ad4..105fa86774 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -5,6 +5,7 @@ use lsp_types::notification::Notification; use lsp_types::request::Request; use crate::db::LanguageServerDatabase; +use crate::handlers::request::handle_goto_definition; use crate::handlers::{ request::handle_hover, notifications::handle_document_did_open @@ -46,6 +47,11 @@ impl ServerState { match req.method.as_str() { // TODO: implement actually useful hover handler lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, + // goto definition + lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, + lsp_types::request::GotoTypeDefinition::METHOD => handle_goto_definition(self, req)?, + lsp_types::request::GotoImplementation::METHOD => handle_goto_definition(self, req)?, + lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, _ => {} } diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index 042140d514..f0499367ab 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,5 +1,6 @@ -use common::diagnostics::{Severity, CompleteDiagnostic, Span}; -use lsp_types::Position; +use common::{diagnostics::{Severity, CompleteDiagnostic, Span}, InputDb}; +use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; +use lsp_types::{Position, Url}; pub(crate) fn position_to_offset(position: Position, text: &str) -> rowan::TextSize { let line_offsets: Vec = text @@ -18,7 +19,8 @@ pub(crate) fn position_to_offset(position: Position, text: &str) -> rowan::TextS } -pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { +pub(crate) fn span_to_lsp_range(span: Span, db: &dyn InputDb) -> lsp_types::Range { + let text = span.file.text(db); let line_offsets: Vec = text .lines() .scan(0, |state, line| { @@ -45,6 +47,15 @@ pub(crate) fn span_to_range(span: Span, text: &str) -> lsp_types::Range { } } +pub(crate) fn scope_to_lsp_location(scope: ScopeId, db: &dyn SpannedHirDb) -> lsp_types::Location { + let lazy_span = scope.name_span(db.as_hir_db()).unwrap(); + let span = lazy_span.resolve(db.as_spanned_hir_db()).unwrap(); + let uri = span.file.abs_path(db.as_input_db()); + let range = span_to_lsp_range(span, db.as_input_db()); + let uri = lsp_types::Url::from_file_path(uri).unwrap(); + lsp_types::Location { uri, range } +} + pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { // Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, @@ -55,22 +66,22 @@ pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeveri } } -pub(crate) fn diag_to_lsp(diag: CompleteDiagnostic, text: &str) -> Vec { - diag.sub_diagnostics - .into_iter() - .map(|sub| { - let range = span_to_range(sub.span.unwrap(), text); - lsp_types::Diagnostic { - range, - severity: Some(severity_to_lsp(diag.severity)), - code: None, - source: None, - message: diag.message.clone(), - related_information: None, - tags: None, - code_description: None, - data: None - } - }) - .collect() +pub(crate) fn diag_to_lsp(diag: CompleteDiagnostic, db: &dyn InputDb) -> Vec { + diag.sub_diagnostics + .into_iter() + .map(|sub| { + let range = span_to_lsp_range(sub.span.unwrap(), db); + lsp_types::Diagnostic { + range, + severity: Some(severity_to_lsp(diag.severity)), + code: None, + source: None, + message: diag.message.clone(), + related_information: None, + tags: None, + code_description: None, + data: None + } + }) + .collect() } \ No newline at end of file From 0fcdb8c911b2d632fb705290f7e37c3781cdba9f Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 21 Aug 2023 16:00:57 -0500 Subject: [PATCH 248/678] LSP: handle document/didChange correctly --- crates/language-server/src/db.rs | 15 +--- .../src/handlers/notifications.rs | 52 ++++++++---- .../language-server/src/handlers/request.rs | 79 +++++++++++-------- crates/language-server/src/state.rs | 33 +++++--- 4 files changed, 103 insertions(+), 76 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index ae4f266789..dc9a96bb26 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -6,16 +6,15 @@ use common::{ InputDb, InputFile, InputIngot, }; use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, ItemKind, PathId, scope_graph::ScopeId}, + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, ItemKind}, lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; -use salsa::DbWithJar; -use crate::{goto::Cursor, util::span_to_lsp_range}; +use crate::goto::Cursor; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); @@ -95,16 +94,6 @@ impl LanguageServerDatabase { return smallest_enclosing_item; } - // pub fn scope_to_location(&mut self, scope_id: ScopeId) -> Option { - // scope_id.name_span(self).unwrap().resolve(SpannedHirDb::as_spanned_hir_db(self)).map(|span| { - // // let span = lazy_span.resolve(SpannedHirDb::as_spanned_hir_db(self)).unwrap(); - // let file = span.file; - // let uri = lsp_types::Url::from_file_path(file.path(self)).unwrap(); - // let location = lsp_types::Location::new(uri, span_to_lsp_range(span, self)); - // location - // }) - // } - pub fn finalize_diags(&self) -> Vec { let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 5a534bdba9..0641c475ad 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -10,34 +10,52 @@ fn string_diagnostics(db: &mut LanguageServerDatabase, path: &str, src: &str) -> db.finalize_diags() } -// pub(crate) fn handle_document_did_change(state: &mut ServerState, req: lsp_server::Request) -> Result<(), Error> { -// todo: incremental parsing and diagnostics -// } - -pub(crate) fn handle_document_did_open( +pub(crate) fn get_diagnostics( state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; - let text = params.text_document.text; - + text: String, + uri: lsp_types::Url, +) -> Result, Error> { let diags = string_diagnostics( &mut state.db, - params.text_document.uri.to_file_path().unwrap().to_str().unwrap(), + uri.to_file_path().unwrap().to_str().unwrap(), text.as_str(), ); - state.log_info(format!("diagnostics: {:?}", diags))?; - - // send diagnostics using `state.send_response` for each diagnostic - let diagnostics = diags.into_iter().flat_map(|diag| { diag_to_lsp(diag, &state.db).iter().map(|x| x.clone()).collect::>() }); + Ok(diagnostics.collect()) +} + +pub(crate) fn handle_document_did_open( + state: &mut ServerState, + note: lsp_server::Notification, +) -> Result<(), Error> { + let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; + let text = params.text_document.text; + let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; + send_diagnostics(state, diagnostics, params.text_document.uri.clone()) +} + +pub(crate) fn handle_document_did_change( + state: &mut ServerState, + note: lsp_server::Notification, +) -> Result<(), Error> { + let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; + let text = params.content_changes[0].text.clone(); + let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; + send_diagnostics(state, diagnostics, params.text_document.uri.clone()) +} + +fn send_diagnostics( + state: &mut ServerState, + diagnostics: Vec, + uri: lsp_types::Url, +) -> Result<(), Error> { let result = lsp_types::PublishDiagnosticsParams { - uri: params.text_document.uri.clone(), - diagnostics: diagnostics.collect(), + uri: uri, + diagnostics: diagnostics, version: None, }; let response = lsp_server::Message::Notification(lsp_server::Notification { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index f4f8d88d4c..5a5792f270 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,10 +1,14 @@ use std::io::BufRead; -use hir_analysis::name_resolution::{EarlyResolvedPath, NameDerivation}; +use hir_analysis::name_resolution::EarlyResolvedPath; use lsp_server::Response; use serde::Deserialize; -use crate::{state::ServerState, goto::{goto_enclosing_path, Cursor}, util::{position_to_offset, scope_to_lsp_location}}; +use crate::{ + goto::{goto_enclosing_path, Cursor}, + state::ServerState, + util::{position_to_offset, scope_to_lsp_location}, +}; pub(crate) fn handle_hover( state: &mut ServerState, @@ -13,10 +17,10 @@ pub(crate) fn handle_hover( // TODO: get more relevant information for the hover let params = lsp_types::HoverParams::deserialize(req.params)?; let file_path = ¶ms - .text_document_position_params - .text_document - .uri - .path(); + .text_document_position_params + .text_document + .uri + .path(); let file = std::fs::File::open(file_path)?; let reader = std::io::BufReader::new(file); let line = reader @@ -28,23 +32,25 @@ pub(crate) fn handle_hover( let file_text = std::fs::read_to_string(file_path)?; // let cursor: Cursor = params.text_document_position_params.position.into(); - let cursor: Cursor = position_to_offset(params.text_document_position_params.position, file_text.as_str()); + let cursor: Cursor = position_to_offset( + params.text_document_position_params.position, + file_text.as_str(), + ); let file_path = std::path::Path::new(file_path); let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); - + let goto_info = match goto_info { - Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(|x| x.pretty_path(&state.db).unwrap()).collect::>() - .join("\n") - - }, - Some(EarlyResolvedPath::Partial { res, unresolved_from }) => { - res.pretty_path(&state.db).unwrap() - }, - None => { - String::from("No goto info available") - } + Some(EarlyResolvedPath::Full(bucket)) => bucket + .iter() + .map(|x| x.pretty_path(&state.db).unwrap()) + .collect::>() + .join("\n"), + Some(EarlyResolvedPath::Partial { + res, + unresolved_from: _, + }) => res.pretty_path(&state.db).unwrap(), + None => String::from("No goto info available"), }; let result = lsp_types::Hover { @@ -71,7 +77,7 @@ pub(crate) fn handle_hover( Ok(()) } -use lsp_types::{request::GotoDefinition, TextDocumentPositionParams, Location}; +use lsp_types::TextDocumentPositionParams; pub(crate) fn handle_goto_definition( state: &mut ServerState, @@ -87,29 +93,36 @@ pub(crate) fn handle_goto_definition( let file_path = std::path::Path::new(params.text_document.uri.path()); let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); - + // Convert the goto info to a Location - let scope = match goto_info { + let scopes = match goto_info { Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(|x| x.scope()).last().unwrap() - }, - Some(EarlyResolvedPath::Partial { res, unresolved_from }) => { - res.scope() - }, - None => { - return Ok(()) + bucket.iter().map(|x| x.scope()).collect::>() } + Some(EarlyResolvedPath::Partial { + res, + unresolved_from: _, + }) => { + vec![res.scope()] + } + None => return Ok(()), }; - - let location = scope_to_lsp_location(scope.unwrap(), &state.db); + + let locations = scopes + .into_iter() + .filter_map(|scope| scope) + .map(|scope| scope_to_lsp_location(scope, &state.db)) + .collect::>(); // Send the response let response_message = Response { id: req.id, - result: Some(serde_json::to_value(lsp_types::GotoDefinitionResponse::Scalar(location))?), + result: Some(serde_json::to_value( + lsp_types::GotoDefinitionResponse::Array(locations), + )?), error: None, }; state.send_response(response_message)?; Ok(()) -} \ No newline at end of file +} diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 105fa86774..d8e94ec59b 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,15 +1,13 @@ +use crate::db::LanguageServerDatabase; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; use lsp_server::Message; use lsp_types::notification::Notification; use lsp_types::request::Request; -use crate::db::LanguageServerDatabase; +use crate::handlers::notifications::handle_document_did_change; use crate::handlers::request::handle_goto_definition; -use crate::handlers::{ - request::handle_hover, - notifications::handle_document_did_open -}; +use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; pub struct ServerState { pub sender: Sender, @@ -18,7 +16,10 @@ pub struct ServerState { impl ServerState { pub fn new(sender: Sender) -> Self { - ServerState { sender, db: LanguageServerDatabase::default() } + ServerState { + sender, + db: LanguageServerDatabase::default(), + } } pub fn run(&mut self, receiver: Receiver) -> Result<()> { @@ -49,20 +50,26 @@ impl ServerState { lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, // goto definition lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, - lsp_types::request::GotoTypeDefinition::METHOD => handle_goto_definition(self, req)?, - lsp_types::request::GotoImplementation::METHOD => handle_goto_definition(self, req)?, + lsp_types::request::GotoTypeDefinition::METHOD => { + handle_goto_definition(self, req)? + } + lsp_types::request::GotoImplementation::METHOD => { + handle_goto_definition(self, req)? + } lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, _ => {} } - - } else if let lsp_server::Message::Notification(note) = msg { // log the notification to the console self.log_info(format!("NOTIFICATION: {:?}", note))?; - + match note.method.as_str() { - lsp_types::notification::DidOpenTextDocument::METHOD => handle_document_did_open(self, note)?, - lsp_types::notification::DidChangeTextDocument::METHOD => handle_document_did_open(self, note)?, + lsp_types::notification::DidOpenTextDocument::METHOD => { + handle_document_did_open(self, note)? + } + lsp_types::notification::DidChangeTextDocument::METHOD => { + handle_document_did_change(self, note)? + } _ => {} } } From 0ff6349d92ee1f027f5fae3fb4035e3093eae044 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 21 Aug 2023 16:02:03 -0500 Subject: [PATCH 249/678] LSP: more complex goto test --- crates/language-server/test_files/goto.fe | 7 +++++-- crates/language-server/test_files/goto.snap | 19 +++++++++++-------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/crates/language-server/test_files/goto.fe b/crates/language-server/test_files/goto.fe index eb5bbc42af..e3d2bd70fc 100644 --- a/crates/language-server/test_files/goto.fe +++ b/crates/language-server/test_files/goto.fe @@ -2,8 +2,11 @@ struct Foo {} struct Bar {} fn main() { - struct Baz {} let x: Foo let y: Bar - let z: Baz + let z: baz::Baz +} + +mod baz { + pub struct Baz {} } \ No newline at end of file diff --git a/crates/language-server/test_files/goto.snap b/crates/language-server/test_files/goto.snap index b51cfdcd55..a3956e2580 100644 --- a/crates/language-server/test_files/goto.snap +++ b/crates/language-server/test_files/goto.snap @@ -8,15 +8,18 @@ struct Foo {} struct Bar {} fn main() { - struct Baz {} let x: Foo let y: Bar - let z: Baz + let z: baz::Baz +} + +mod baz { + pub struct Baz {} } --- -cursor position: 85, path: goto::Bar -cursor position: 82, path: -cursor position: 100, path: goto::main::{fn_body}::{block0}::Baz -cursor position: 97, path: -cursor position: 70, path: goto::Foo -cursor position: 67, path: +cursor position: 64, path: +cursor position: 82, path: goto::baz::Baz +cursor position: 79, path: +cursor position: 52, path: goto::Foo +cursor position: 49, path: +cursor position: 67, path: goto::Bar From 0276cc9de2c859e72a9b06c2f23976dfbd4c498b Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 21 Aug 2023 18:45:36 -0500 Subject: [PATCH 250/678] nice LSP logger --- Cargo.lock | 1 + crates/language-server/Cargo.toml | 3 +- .../src/handlers/notifications.rs | 3 +- crates/language-server/src/server.rs | 32 +++----- crates/language-server/src/state.rs | 81 +++++++++++++++---- crates/language-server/src/util.rs | 2 +- 6 files changed, 83 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 63605c7abe..cd8bd4eeaf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1123,6 +1123,7 @@ dependencies = [ "fe-macros", "fxhash", "indexmap", + "log", "lsp-server", "lsp-types", "rowan", diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 1295e03bc7..d1d36366d1 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -30,4 +30,5 @@ indexmap = "1.6.2" rowan = "0.15.10" fxhash = "0.2.1" dir-test = "0.1" -fe-compiler-test-utils = { path = "../test-utils" } \ No newline at end of file +fe-compiler-test-utils = { path = "../test-utils" } +log = "0.4" diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 0641c475ad..0bd37e8188 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -63,7 +63,8 @@ fn send_diagnostics( params: serde_json::to_value(result).unwrap(), }); - state.sender.send(response)?; + let sender = state.sender.lock().unwrap(); + sender.send(response)?; Ok(()) } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 52e6962158..333a0663aa 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,7 +1,7 @@ use super::state::ServerState; use anyhow::Result; use lsp_server::{Connection, Notification}; -use lsp_types::{ServerCapabilities, HoverProviderCapability, GotoCapability}; +use lsp_types::{HoverProviderCapability, ServerCapabilities}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { @@ -36,31 +36,21 @@ pub fn run_server() -> Result<()> { connection.initialize_finish(request_id, initialize_result)?; // send a "hello" message to the client - connection.sender.send( - lsp_server::Message::Notification(Notification { + connection + .sender + .send(lsp_server::Message::Notification(Notification { method: String::from("window/showMessage"), params: serde_json::to_value(lsp_types::ShowMessageParams { typ: lsp_types::MessageType::INFO, message: String::from("hello from the Fe language server"), - }).unwrap() - }) - )?; - - // log a startup message - connection.sender.send( - lsp_server::Message::Notification(Notification { - method: String::from("window/logMessage"), - params: serde_json::to_value(lsp_types::LogMessageParams { - typ: lsp_types::MessageType::INFO, - message: String::from("Fe language server started"), - }).unwrap() - }) - )?; + }) + .unwrap(), + }))?; - // print a message to the console - eprintln!("Fe language server started"); - - let result = ServerState::new(connection.sender).run(connection.receiver)?; + let mut state = ServerState::new(connection.sender); + let _ = state.init_logger(log::Level::Info); + let result = state.run(connection.receiver)?; + io_threads.join().unwrap(); Ok(result) diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index d8e94ec59b..ca06d1686b 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,6 +1,10 @@ +use std::sync::{Arc, Mutex}; + use crate::db::LanguageServerDatabase; +use log::{ Record, Level, Metadata, info }; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; +use log::{LevelFilter, SetLoggerError}; use lsp_server::Message; use lsp_types::notification::Notification; use lsp_types::request::Request; @@ -10,19 +14,27 @@ use crate::handlers::request::handle_goto_definition; use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; pub struct ServerState { - pub sender: Sender, + pub sender: Arc>>, pub db: LanguageServerDatabase, } impl ServerState { pub fn new(sender: Sender) -> Self { + let sender = Arc::new(Mutex::new(sender)); ServerState { sender, db: LanguageServerDatabase::default(), } } + + fn send (&mut self, msg: Message) -> Result<()> { + let sender = self.sender.lock().unwrap(); + sender.send(msg)?; + Ok(()) + } pub fn run(&mut self, receiver: Receiver) -> Result<()> { + info!("Fe Language Server listening..."); while let Some(msg) = self.next_message(&receiver) { if let lsp_server::Message::Notification(notification) = &msg { if notification.method == lsp_types::notification::Exit::METHOD { @@ -43,7 +55,7 @@ impl ServerState { fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { if let lsp_server::Message::Request(req) = msg { - self.log_info(format!("REQUEST: {:?}", req))?; + info!("REQUEST: {:?}", req); match req.method.as_str() { // TODO: implement actually useful hover handler @@ -61,7 +73,7 @@ impl ServerState { } } else if let lsp_server::Message::Notification(note) = msg { // log the notification to the console - self.log_info(format!("NOTIFICATION: {:?}", note))?; + info!("NOTIFICATION: {:?}", note); match note.method.as_str() { lsp_types::notification::DidOpenTextDocument::METHOD => { @@ -78,21 +90,60 @@ impl ServerState { } pub(crate) fn send_response(&mut self, response: lsp_server::Response) -> Result<()> { - self.sender.send(lsp_server::Message::Response(response))?; + self.send(lsp_server::Message::Response(response))?; + Ok(()) + } + + pub fn init_logger(&self, level:Level) -> Result<(), SetLoggerError> { + let logger = LspLogger { level, sender: self.sender.clone() }; + let static_logger = Box::leak(Box::new(logger)); + log::set_logger(static_logger)?; + log::set_max_level(LevelFilter::Debug); Ok(()) } +} + - pub(crate) fn log_info(&mut self, message: String) -> Result<()> { - self.sender.send(lsp_server::Message::Notification( - lsp_server::Notification { - method: String::from("window/logMessage"), - params: serde_json::to_value(lsp_types::LogMessageParams { - typ: lsp_types::MessageType::INFO, - message: message, - }) - .unwrap(), - }, - ))?; +pub(crate) struct LspLogger { + level: Level, + sender: Arc>>, +} + +impl LspLogger { + fn send (&self, msg: Message) -> Result<()> { + let sender = self.sender.lock().unwrap(); + sender.send(msg)?; Ok(()) } } + +impl log::Log for LspLogger { + fn enabled(&self, metadata: &Metadata) -> bool { + let logger = self; + metadata.level() <= logger.level + } + + fn log(&self, record: &Record) { + if self.enabled(record.metadata()) { + let message = format!("{} - {}", record.level(), record.args()); + let _ = self.send(lsp_server::Message::Notification( + lsp_server::Notification { + method: String::from("window/logMessage"), + params: serde_json::to_value(lsp_types::LogMessageParams { + typ: match record.level() { + Level::Error => lsp_types::MessageType::ERROR, + Level::Warn => lsp_types::MessageType::WARNING, + Level::Info => lsp_types::MessageType::INFO, + Level::Debug => lsp_types::MessageType::LOG, + Level::Trace => lsp_types::MessageType::LOG, + }, + message: message, + }) + .unwrap(), + }, + )); + } + } + + fn flush(&self) {} +} \ No newline at end of file diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index f0499367ab..ac1b1ec859 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,6 +1,6 @@ use common::{diagnostics::{Severity, CompleteDiagnostic, Span}, InputDb}; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; -use lsp_types::{Position, Url}; +use lsp_types::Position; pub(crate) fn position_to_offset(position: Position, text: &str) -> rowan::TextSize { let line_offsets: Vec = text From 52126c3a39c80e90bf41555433f3a4f126739b45 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 23 Aug 2023 15:21:44 -0500 Subject: [PATCH 251/678] error handling in lsp diagnostics --- crates/language-server/src/goto.rs | 1 - .../language-server/src/handlers/request.rs | 9 ++- crates/language-server/src/util.rs | 77 ++++++++++--------- 3 files changed, 45 insertions(+), 42 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 4a6e3421a7..02996cad1a 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -14,7 +14,6 @@ pub(crate) type GotoEnclosingPath = (PathId, ScopeId); pub(crate) type GotoPathMap = FxHashMap; pub struct PathSpanCollector<'db> { - // You don't need to collect scope id basically. path_map: GotoPathMap, db: &'db dyn LanguageServerDb, } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 5a5792f270..f268fa4d6f 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -7,7 +7,7 @@ use serde::Deserialize; use crate::{ goto::{goto_enclosing_path, Cursor}, state::ServerState, - util::{position_to_offset, scope_to_lsp_location}, + util::{to_offset_from_position, to_lsp_location_from_scope}, }; pub(crate) fn handle_hover( @@ -32,7 +32,7 @@ pub(crate) fn handle_hover( let file_text = std::fs::read_to_string(file_path)?; // let cursor: Cursor = params.text_document_position_params.position.into(); - let cursor: Cursor = position_to_offset( + let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); @@ -87,7 +87,7 @@ pub(crate) fn handle_goto_definition( // Convert the position to an offset in the file let file_text = std::fs::read_to_string(params.text_document.uri.path())?; - let cursor: Cursor = position_to_offset(params.position, file_text.as_str()); + let cursor: Cursor = to_offset_from_position(params.position, file_text.as_str()); // Get the module and the goto info let file_path = std::path::Path::new(params.text_document.uri.path()); @@ -111,7 +111,8 @@ pub(crate) fn handle_goto_definition( let locations = scopes .into_iter() .filter_map(|scope| scope) - .map(|scope| scope_to_lsp_location(scope, &state.db)) + .map(|scope| to_lsp_location_from_scope(scope, &state.db)) + .filter_map(|location| location.ok()) .collect::>(); // Send the response diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index ac1b1ec859..5ceb5ddf82 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,68 +1,62 @@ use common::{diagnostics::{Severity, CompleteDiagnostic, Span}, InputDb}; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; +use log::error; use lsp_types::Position; -pub(crate) fn position_to_offset(position: Position, text: &str) -> rowan::TextSize { - let line_offsets: Vec = text - .lines() + +pub(crate) fn calculate_line_offsets(text: &str) -> Vec { + text.lines() .scan(0, |state, line| { let offset = *state; *state += line.len() + 1; Some(offset) }) - .collect(); + .collect() +} +pub(crate) fn to_offset_from_position(position: Position, text: &str) -> rowan::TextSize { + let line_offsets: Vec = calculate_line_offsets(text); let line_offset = line_offsets[position.line as usize]; let character_offset = position.character as usize; rowan::TextSize::from((line_offset + character_offset) as u32) } - -pub(crate) fn span_to_lsp_range(span: Span, db: &dyn InputDb) -> lsp_types::Range { +pub(crate) fn to_lsp_range_from_span(span: Span, db: &dyn InputDb) -> Result> { let text = span.file.text(db); - let line_offsets: Vec = text - .lines() - .scan(0, |state, line| { - let offset = *state; - *state += line.len() + 1; - Some(offset) - }) - .collect(); + let line_offsets = calculate_line_offsets(text); let start_line = line_offsets .binary_search(&span.range.start().into()) - .unwrap_or_else(|x| x - 1); + .map_err(|_| "Failed to find start line")?; let end_line = line_offsets .binary_search(&span.range.end().into()) - .unwrap_or_else(|x| x - 1); + .map_err(|_| "Failed to find end line")?; let start_character: usize = usize::from(span.range.start()) - line_offsets[start_line]; let end_character: usize = usize::from(span.range.end()) - line_offsets[end_line]; - lsp_types::Range { + Ok(lsp_types::Range { start: Position::new(start_line as u32, start_character as u32), end: Position::new(end_line as u32, end_character as u32), - } + }) } -pub(crate) fn scope_to_lsp_location(scope: ScopeId, db: &dyn SpannedHirDb) -> lsp_types::Location { - let lazy_span = scope.name_span(db.as_hir_db()).unwrap(); - let span = lazy_span.resolve(db.as_spanned_hir_db()).unwrap(); +pub(crate) fn to_lsp_location_from_scope(scope: ScopeId, db: &dyn SpannedHirDb) -> Result> { + let lazy_span = scope.name_span(db.as_hir_db()).ok_or("Failed to get name span")?; + let span = lazy_span.resolve(db.as_spanned_hir_db()).ok_or("Failed to resolve span")?; let uri = span.file.abs_path(db.as_input_db()); - let range = span_to_lsp_range(span, db.as_input_db()); - let uri = lsp_types::Url::from_file_path(uri).unwrap(); - lsp_types::Location { uri, range } + let range = to_lsp_range_from_span(span, db.as_input_db())?; + let uri = lsp_types::Url::from_file_path(uri).map_err(|_| "Failed to convert path to URL")?; + Ok(lsp_types::Location { uri, range }) } pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { - // Severity::Bug => lsp_types::DiagnosticSeverity::ERROR, Severity::Error => lsp_types::DiagnosticSeverity::ERROR, Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, Severity::Note => lsp_types::DiagnosticSeverity::HINT, - // Severity::Help => lsp_types::DiagnosticSeverity::INFORMATION, } } @@ -70,18 +64,27 @@ pub(crate) fn diag_to_lsp(diag: CompleteDiagnostic, db: &dyn InputDb) -> Vec + Some(lsp_types::Diagnostic { + range, + severity: Some(severity_to_lsp(diag.severity)), + code: None, + source: None, + message: sub.message.clone(), + related_information: None, + tags: None, + code_description: None, + data: None // for code actions + }), + Err(_) => { + error!("Failed to convert span to range"); + None + } } }) + .filter_map(|x| x) .collect() } \ No newline at end of file From a273aac60c45de5c28b01a6ea3606e2cce24661f Mon Sep 17 00:00:00 2001 From: Sean Billig Date: Mon, 21 Aug 2023 18:32:37 -0700 Subject: [PATCH 252/678] Support record-type enum variants --- crates/hir/src/hir_def/item.rs | 13 +- crates/hir/src/hir_def/types.rs | 8 +- crates/hir/src/lib.rs | 1 + crates/hir/src/lower/item.rs | 13 +- crates/hir/src/lower/types.rs | 22 +- crates/hir/src/span/item.rs | 18 +- crates/hir/src/visitor.rs | 61 +-- crates/parser2/src/ast/item.rs | 43 ++- crates/parser2/src/parser/item.rs | 2 + .../test_files/syntax_node/items/enums.fe | 12 +- .../test_files/syntax_node/items/enums.snap | 347 ++++++++++-------- 11 files changed, 339 insertions(+), 201 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 05c0370eb4..34292dac04 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -23,8 +23,8 @@ use crate::{ use super::{ scope_graph::{ScopeGraph, ScopeId}, - AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TypeId, - UseAlias, WhereClauseId, + AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TupleTypeId, + TypeId, UseAlias, WhereClauseId, }; #[derive( @@ -687,7 +687,14 @@ pub struct VariantDefListId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct VariantDef { pub name: Partial, - pub ty: Option, + pub kind: VariantKind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum VariantKind { + Unit, + Tuple(TupleTypeId), + Record(FieldDefListId), } #[salsa::interned] diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index 468adaefc5..e46c880295 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -13,7 +13,7 @@ pub enum TypeKind { Path(Partial, GenericArgListId), SelfType, /// The `Vec` contains the types of the tuple elements. - Tuple(Vec>), + Tuple(TupleTypeId), /// The first `TypeId` is the element type, the second `Body` is the length. Array(Partial, Partial), } @@ -23,3 +23,9 @@ pub struct TraitRef { pub path: Partial, pub generic_args: GenericArgListId, } + +#[salsa::interned] +pub struct TupleTypeId { + #[return_ref] + pub data: Vec>, +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 9277898e81..7a343eaaec 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -47,6 +47,7 @@ pub struct Jar( hir_def::VariantDefListId, hir_def::ImplItemListId, hir_def::TypeId, + hir_def::TupleTypeId, hir_def::UsePathId, /// Accumulated diagnostics. ParseErrorAccumulator, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index c622b5bc34..84a869e3bd 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -2,8 +2,8 @@ use parser::ast::{self, prelude::*}; use crate::{ hir_def::{ - item::*, AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, TraitRef, TypeId, - WhereClauseId, + item::*, AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, TraitRef, + TupleTypeId, TypeId, WhereClauseId, }, span::HirOrigin, }; @@ -412,8 +412,11 @@ impl VariantDefListId { impl VariantDef { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::VariantDef) -> Self { let name = IdentId::lower_token_partial(ctxt, ast.name()); - let ty = ast.ty().map(|ty| TypeId::lower_ast(ctxt, ty)); - - Self { name, ty } + let kind = match ast.kind() { + ast::VariantKind::Unit => VariantKind::Unit, + ast::VariantKind::Tuple(t) => VariantKind::Tuple(TupleTypeId::lower_ast(ctxt, t)), + ast::VariantKind::Record(r) => VariantKind::Record(FieldDefListId::lower_ast(ctxt, r)), + }; + Self { name, kind } } } diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 3d85c25733..4699b27dda 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -1,6 +1,8 @@ use parser::ast::{self, prelude::*}; -use crate::hir_def::{Body, GenericArgListId, Partial, PathId, TraitRef, TypeId, TypeKind}; +use crate::hir_def::{ + Body, GenericArgListId, Partial, PathId, TraitRef, TupleTypeId, TypeId, TypeKind, +}; use super::FileLowerCtxt; @@ -20,13 +22,7 @@ impl TypeId { ast::TypeKind::SelfType(_) => TypeKind::SelfType, - ast::TypeKind::Tuple(ty) => { - let mut elem_tys = Vec::new(); - for elem in ty { - elem_tys.push(Some(TypeId::lower_ast(ctxt, elem)).into()); - } - TypeKind::Tuple(elem_tys) - } + ast::TypeKind::Tuple(ty) => TypeKind::Tuple(TupleTypeId::lower_ast(ctxt, ty)), ast::TypeKind::Array(ty) => { let elem_ty = Self::lower_ast_partial(ctxt, ty.elem_ty()); @@ -49,6 +45,16 @@ impl TypeId { } } +impl TupleTypeId { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TupleType) -> Self { + let mut elem_tys = Vec::new(); + for elem in ast { + elem_tys.push(Some(TypeId::lower_ast(ctxt, elem)).into()); + } + TupleTypeId::new(ctxt.db(), elem_tys) + } +} + impl TraitRef { pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::PathType) -> Self { let path = PathId::lower_ast_partial(ctxt, ast.path()); diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 7f7b9cfdf6..88bf660239 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -17,7 +17,7 @@ use super::{ define_lazy_span_node, params::{LazyFuncParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, transition::SpanTransitionChain, - types::{LazyPathTypeSpan, LazyTySpan}, + types::{LazyPathTypeSpan, LazyTupleTypeSpan, LazyTySpan}, use_tree::LazyUseAliasSpan, }; @@ -286,7 +286,8 @@ define_lazy_span_node!( (name, name), } @node { - (ty, ty, LazyTySpan), + (fields, fields, LazyFieldDefListSpan), + (tuple_type, tuple_type, LazyTupleTypeSpan), } ); @@ -315,7 +316,7 @@ mod tests { mod foo { fn bar() {} } - + mod baz { fn qux() {} } @@ -332,7 +333,7 @@ mod tests { let mut db = TestDb::default(); let text = r#" - + mod foo { fn bar() {} } @@ -435,6 +436,10 @@ mod tests { enum Foo { Bar Baz(u32, i32) + Bux { + x: i8 + y: u8 + } }"#; let enum_ = db.expect_item::(text); @@ -445,10 +450,13 @@ mod tests { let variants = enum_span.variants(); let variant_1 = variants.variant(0); let variant_2 = variants.variant(1); + let variant_3 = variants.variant(2); assert_eq!("Bar", db.text_at(top_mod, &variant_1.name())); assert_eq!("Baz", db.text_at(top_mod, &variant_2.name())); - assert_eq!("(u32, i32)", db.text_at(top_mod, &variant_2.ty())); + assert_eq!("(u32, i32)", db.text_at(top_mod, &variant_2.tuple_type())); + assert_eq!("Bux", db.text_at(top_mod, &variant_3.name())); + assert!(db.text_at(top_mod, &variant_3.fields()).contains("x: i8")); } #[test] diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index df303de50b..75669ea388 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -6,8 +6,9 @@ use crate::{ FieldDef, FieldDefListId, FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, - StmtId, Struct, TopLevelMod, Trait, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, - UsePathId, UsePathSegment, VariantDef, VariantDefListId, WhereClauseId, WherePredicate, + StmtId, Struct, TopLevelMod, Trait, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, + Use, UseAlias, UsePathId, UsePathSegment, VariantDef, VariantDefListId, VariantKind, + WhereClauseId, WherePredicate, }, span::{lazy_spans::*, transition::ChainRoot, SpanDowncast}, HirDb, @@ -281,6 +282,10 @@ pub trait Visitor { walk_ty(self, ctxt, ty) } + fn visit_tuple_type(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTupleTypeSpan>, ty: TupleTypeId) { + walk_tuple_type(self, ctxt, ty) + } + #[allow(unused_variables)] fn visit_lit(&mut self, ctxt: &mut VisitorCtxt<'_, LazyLitSpan>, lit: LitKind) {} @@ -1559,13 +1564,15 @@ pub fn walk_variant_def( ) } - if let Some(ty) = variant.ty { - ctxt.with_new_ctxt( - |span| span.ty_moved(), - |ctxt| { - visitor.visit_ty(ctxt, ty); - }, - ) + match variant.kind { + VariantKind::Unit => {} + VariantKind::Tuple(t) => ctxt.with_new_ctxt( + |span| span.tuple_type_moved(), + |ctxt| visitor.visit_tuple_type(ctxt, t), + ), + VariantKind::Record(_) => { + todo!() + } } } @@ -1638,21 +1645,9 @@ where }, ), - TypeKind::Tuple(elems) => ctxt.with_new_ctxt( + TypeKind::Tuple(t) => ctxt.with_new_ctxt( |span| span.into_tuple_type(), - |ctxt| { - for (i, elem) in elems.iter().enumerate() { - let Some(elem) = elem.to_opt() else { - continue; - }; - ctxt.with_new_ctxt( - |span| span.elem_ty_moved(i), - |ctxt| { - visitor.visit_ty(ctxt, elem); - }, - ) - } - }, + |ctxt| walk_tuple_type(visitor, ctxt, t), ), TypeKind::Array(elem, body) => ctxt.with_new_ctxt( @@ -1676,6 +1671,26 @@ where } } +pub fn walk_tuple_type( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTupleTypeSpan>, + ty: TupleTypeId, +) where + V: Visitor + ?Sized, +{ + for (i, elem) in ty.data(ctxt.db()).iter().enumerate() { + let Some(elem) = elem.to_opt() else { + continue; + }; + ctxt.with_new_ctxt( + |span| span.elem_ty_moved(i), + |ctxt| { + visitor.visit_ty(ctxt, elem); + }, + ) + } +} + pub fn walk_type_bound_list( visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyTypeBoundListSpan>, diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 5b6d1256a1..b163beaa38 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -1,4 +1,4 @@ -use super::ast_node; +use super::{ast_node, TupleType}; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; use rowan::ast::{support, AstNode}; @@ -355,12 +355,29 @@ impl VariantDef { support::token(self.syntax(), SK::Ident) } - /// Returns the type of the variant. - /// `(i32, u32)` in `Foo(i32, u32)` - /// Currently only tuple variants are supported. - pub fn ty(&self) -> Option { + /// Returns the kind of the variant. + pub fn kind(&self) -> VariantKind { support::child(self.syntax()) + .map(VariantKind::Tuple) + .or_else(|| support::child(self.syntax()).map(VariantKind::Record)) + .unwrap_or(VariantKind::Unit) } + + /// Returns the variant's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } + + pub fn tuple_type(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum VariantKind { + Unit, + Tuple(TupleType), + Record(RecordFieldDefList), } ast_node! { @@ -486,7 +503,7 @@ mod tests { #[test] #[wasm_bindgen_test] fn func() { - let source = r#" + let source = r#" /// This is doc comment #evm pub unsafe fn foo(_ x: T, from u: U) -> (T, U) where T: Trait2 { return } @@ -573,6 +590,10 @@ mod tests { pub enum Foo where T: Trait2 { Bar Baz(T, U) + Bux { + x: i8 + y: i8 + } } "#; let e: Enum = parse_item(source); @@ -583,17 +604,21 @@ mod tests { match count { 0 => { assert_eq!(variant.name().unwrap().text(), "Bar"); - assert!(variant.ty().is_none()); + assert_eq!(variant.kind(), VariantKind::Unit); } 1 => { assert_eq!(variant.name().unwrap().text(), "Baz"); - assert!(matches!(variant.ty().unwrap().kind(), TypeKind::Tuple(_))); + assert!(matches!(variant.kind(), VariantKind::Tuple(_))); + } + 2 => { + assert_eq!(variant.name().unwrap().text(), "Bux"); + assert!(matches!(variant.kind(), VariantKind::Record(_))); } _ => unreachable!(), } count += 1; } - assert_eq!(count, 2); + assert_eq!(count, 3); } #[test] diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 25f11e2a55..2a53527c25 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -300,6 +300,8 @@ impl super::Parse for VariantDefScope { if parser.current_kind() == Some(SyntaxKind::LParen) { parser.parse(TupleTypeScope::default(), None); + } else if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(RecordFieldDefListScope::default(), None); } } } diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe index 32e563010c..dce3c33309 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.fe +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -5,14 +5,22 @@ enum Basic { Tup(i32, u32) } -enum Option +enum RecordVariants { + Rectangle { + w: u32 + h: u32 + } + Circle { r: u32 } +} + +enum Option where T: Clone { Some(T) None } -enum BoundEnum +enum BoundEnum where Foo::Bar: Trait { AddMul(T) diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index ff5101ebf2..24071d5db0 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- -Root@0..220 - ItemList@0..220 +Root@0..322 + ItemList@0..322 Item@0..15 Enum@0..13 EnumKw@0..4 "enum" @@ -47,151 +47,208 @@ Root@0..220 Newline@54..55 "\n" RBrace@55..56 "}" Newline@56..58 "\n\n" - Item@58..119 - Enum@58..117 + Item@58..162 + Enum@58..160 EnumKw@58..62 "enum" WhiteSpace@62..63 " " - Ident@63..69 "Option" - GenericParamList@69..72 - Lt@69..70 "<" - TypeGenericParam@70..71 - Ident@70..71 "T" - Gt@71..72 ">" - WhiteSpace@72..73 " " - Newline@73..74 "\n" - WhiteSpace@74..78 " " - WhereClause@78..93 - WhereKw@78..83 "where" - WhiteSpace@83..84 " " - WherePredicate@84..93 - PathType@84..85 - Path@84..85 - PathSegment@84..85 - Ident@84..85 "T" - TypeBoundList@85..92 - Colon@85..86 ":" - WhiteSpace@86..87 " " - TypeBound@87..92 - Path@87..92 - PathSegment@87..92 - Ident@87..92 "Clone" - Newline@92..93 "\n" - VariantDefList@93..117 - LBrace@93..94 "{" - Newline@94..95 "\n" - WhiteSpace@95..99 " " - VariantDef@99..106 - Ident@99..103 "Some" - TupleType@103..106 - LParen@103..104 "(" - PathType@104..105 - Path@104..105 - PathSegment@104..105 - Ident@104..105 "T" - RParen@105..106 ")" - Newline@106..107 "\n" - WhiteSpace@107..111 " " - VariantDef@111..115 - Ident@111..115 "None" - Newline@115..116 "\n" - RBrace@116..117 "}" - Newline@117..119 "\n\n" - Item@119..220 - Enum@119..220 - EnumKw@119..123 "enum" - WhiteSpace@123..124 " " - Ident@124..133 "BoundEnum" - GenericParamList@133..162 - Lt@133..134 "<" - TypeGenericParam@134..146 - Ident@134..135 "T" - TypeBoundList@135..146 - Colon@135..136 ":" - WhiteSpace@136..137 " " - TypeBound@137..140 - Path@137..140 - PathSegment@137..140 - Ident@137..140 "Add" - WhiteSpace@140..141 " " - Plus@141..142 "+" - WhiteSpace@142..143 " " - TypeBound@143..146 - Path@143..146 - PathSegment@143..146 - Ident@143..146 "Mul" - WhiteSpace@146..147 " " - Comma@147..148 "," - WhiteSpace@148..149 " " - TypeGenericParam@149..161 - Ident@149..150 "U" - TypeBoundList@150..161 - Colon@150..151 ":" - WhiteSpace@151..152 " " - TypeBound@152..155 - Path@152..155 - PathSegment@152..155 - Ident@152..155 "Sub" - WhiteSpace@155..156 " " - Plus@156..157 "+" - WhiteSpace@157..158 " " - TypeBound@158..161 - Path@158..161 - PathSegment@158..161 - Ident@158..161 "Div" - Gt@161..162 ">" - WhiteSpace@162..163 " " - Newline@163..164 "\n" - WhereClause@164..189 - WhereKw@164..169 "where" - WhiteSpace@169..170 " " - WherePredicate@170..189 - PathType@170..181 - Path@170..178 - PathSegment@170..173 - Ident@170..173 "Foo" - Colon2@173..175 "::" - PathSegment@175..178 - Ident@175..178 "Bar" - GenericArgList@178..181 - Lt@178..179 "<" - TypeGenericArg@179..180 - PathType@179..180 - Path@179..180 - PathSegment@179..180 - Ident@179..180 "T" - Gt@180..181 ">" - TypeBoundList@181..188 - Colon@181..182 ":" - WhiteSpace@182..183 " " - TypeBound@183..188 - Path@183..188 - PathSegment@183..188 - Ident@183..188 "Trait" - Newline@188..189 "\n" - VariantDefList@189..220 - LBrace@189..190 "{" - Newline@190..191 "\n" - WhiteSpace@191..195 " " - VariantDef@195..204 - Ident@195..201 "AddMul" - TupleType@201..204 - LParen@201..202 "(" - PathType@202..203 - Path@202..203 - PathSegment@202..203 - Ident@202..203 "T" - RParen@203..204 ")" - Newline@204..205 "\n" - WhiteSpace@205..209 " " - VariantDef@209..218 - Ident@209..215 "SubDiv" - TupleType@215..218 - LParen@215..216 "(" - PathType@216..217 - Path@216..217 - PathSegment@216..217 - Ident@216..217 "U" - RParen@217..218 ")" + Ident@63..77 "RecordVariants" + WhiteSpace@77..78 " " + VariantDefList@78..160 + LBrace@78..79 "{" + Newline@79..80 "\n" + WhiteSpace@80..85 " " + VariantDef@85..135 + Ident@85..94 "Rectangle" + WhiteSpace@94..95 " " + RecordFieldDefList@95..135 + LBrace@95..96 "{" + Newline@96..97 "\n" + WhiteSpace@97..106 " " + RecordFieldDef@106..112 + Ident@106..107 "w" + Colon@107..108 ":" + WhiteSpace@108..109 " " + PathType@109..112 + Path@109..112 + PathSegment@109..112 + Ident@109..112 "u32" + Newline@112..113 "\n" + WhiteSpace@113..122 " " + RecordFieldDef@122..128 + Ident@122..123 "h" + Colon@123..124 ":" + WhiteSpace@124..125 " " + PathType@125..128 + Path@125..128 + PathSegment@125..128 + Ident@125..128 "u32" + Newline@128..129 "\n" + WhiteSpace@129..134 " " + RBrace@134..135 "}" + Newline@135..136 "\n" + WhiteSpace@136..141 " " + VariantDef@141..158 + Ident@141..147 "Circle" + WhiteSpace@147..148 " " + RecordFieldDefList@148..158 + LBrace@148..149 "{" + WhiteSpace@149..150 " " + RecordFieldDef@150..156 + Ident@150..151 "r" + Colon@151..152 ":" + WhiteSpace@152..153 " " + PathType@153..156 + Path@153..156 + PathSegment@153..156 + Ident@153..156 "u32" + WhiteSpace@156..157 " " + RBrace@157..158 "}" + Newline@158..159 "\n" + RBrace@159..160 "}" + Newline@160..162 "\n\n" + Item@162..222 + Enum@162..220 + EnumKw@162..166 "enum" + WhiteSpace@166..167 " " + Ident@167..173 "Option" + GenericParamList@173..176 + Lt@173..174 "<" + TypeGenericParam@174..175 + Ident@174..175 "T" + Gt@175..176 ">" + Newline@176..177 "\n" + WhiteSpace@177..181 " " + WhereClause@181..196 + WhereKw@181..186 "where" + WhiteSpace@186..187 " " + WherePredicate@187..196 + PathType@187..188 + Path@187..188 + PathSegment@187..188 + Ident@187..188 "T" + TypeBoundList@188..195 + Colon@188..189 ":" + WhiteSpace@189..190 " " + TypeBound@190..195 + Path@190..195 + PathSegment@190..195 + Ident@190..195 "Clone" + Newline@195..196 "\n" + VariantDefList@196..220 + LBrace@196..197 "{" + Newline@197..198 "\n" + WhiteSpace@198..202 " " + VariantDef@202..209 + Ident@202..206 "Some" + TupleType@206..209 + LParen@206..207 "(" + PathType@207..208 + Path@207..208 + PathSegment@207..208 + Ident@207..208 "T" + RParen@208..209 ")" + Newline@209..210 "\n" + WhiteSpace@210..214 " " + VariantDef@214..218 + Ident@214..218 "None" Newline@218..219 "\n" RBrace@219..220 "}" + Newline@220..222 "\n\n" + Item@222..322 + Enum@222..322 + EnumKw@222..226 "enum" + WhiteSpace@226..227 " " + Ident@227..236 "BoundEnum" + GenericParamList@236..265 + Lt@236..237 "<" + TypeGenericParam@237..249 + Ident@237..238 "T" + TypeBoundList@238..249 + Colon@238..239 ":" + WhiteSpace@239..240 " " + TypeBound@240..243 + Path@240..243 + PathSegment@240..243 + Ident@240..243 "Add" + WhiteSpace@243..244 " " + Plus@244..245 "+" + WhiteSpace@245..246 " " + TypeBound@246..249 + Path@246..249 + PathSegment@246..249 + Ident@246..249 "Mul" + WhiteSpace@249..250 " " + Comma@250..251 "," + WhiteSpace@251..252 " " + TypeGenericParam@252..264 + Ident@252..253 "U" + TypeBoundList@253..264 + Colon@253..254 ":" + WhiteSpace@254..255 " " + TypeBound@255..258 + Path@255..258 + PathSegment@255..258 + Ident@255..258 "Sub" + WhiteSpace@258..259 " " + Plus@259..260 "+" + WhiteSpace@260..261 " " + TypeBound@261..264 + Path@261..264 + PathSegment@261..264 + Ident@261..264 "Div" + Gt@264..265 ">" + Newline@265..266 "\n" + WhereClause@266..291 + WhereKw@266..271 "where" + WhiteSpace@271..272 " " + WherePredicate@272..291 + PathType@272..283 + Path@272..280 + PathSegment@272..275 + Ident@272..275 "Foo" + Colon2@275..277 "::" + PathSegment@277..280 + Ident@277..280 "Bar" + GenericArgList@280..283 + Lt@280..281 "<" + TypeGenericArg@281..282 + PathType@281..282 + Path@281..282 + PathSegment@281..282 + Ident@281..282 "T" + Gt@282..283 ">" + TypeBoundList@283..290 + Colon@283..284 ":" + WhiteSpace@284..285 " " + TypeBound@285..290 + Path@285..290 + PathSegment@285..290 + Ident@285..290 "Trait" + Newline@290..291 "\n" + VariantDefList@291..322 + LBrace@291..292 "{" + Newline@292..293 "\n" + WhiteSpace@293..297 " " + VariantDef@297..306 + Ident@297..303 "AddMul" + TupleType@303..306 + LParen@303..304 "(" + PathType@304..305 + Path@304..305 + PathSegment@304..305 + Ident@304..305 "T" + RParen@305..306 ")" + Newline@306..307 "\n" + WhiteSpace@307..311 " " + VariantDef@311..320 + Ident@311..317 "SubDiv" + TupleType@317..320 + LParen@317..318 "(" + PathType@318..319 + Path@318..319 + PathSegment@318..319 + Ident@318..319 "U" + RParen@319..320 ")" + Newline@320..321 "\n" + RBrace@321..322 "}" From 06b5dda7b574cc3879ba22c7d337716312033158 Mon Sep 17 00:00:00 2001 From: Sean Billig Date: Sat, 26 Aug 2023 22:00:06 -0700 Subject: [PATCH 253/678] Require comma field separators in field list --- crates/parser2/src/ast/item.rs | 7 +- crates/parser2/src/parser/expr.rs | 2 +- crates/parser2/src/parser/mod.rs | 10 + crates/parser2/src/parser/struct_.rs | 11 +- .../test_files/syntax_node/items/contract.fe | 6 +- .../syntax_node/items/contract.snap | 63 +- .../test_files/syntax_node/items/enums.fe | 5 +- .../test_files/syntax_node/items/enums.snap | 388 ++++---- .../test_files/syntax_node/structs/attr.fe | 2 +- .../test_files/syntax_node/structs/attr.snap | 71 +- .../syntax_node/structs/generics.fe | 26 +- .../syntax_node/structs/generics.snap | 856 +++++++++--------- .../syntax_node/structs/tupel_field.fe | 4 +- .../syntax_node/structs/tupel_field.snap | 102 ++- .../name_resolution/conflict_field.fe | 4 +- .../name_resolution/conflict_field.snap | 6 +- .../name_resolution/conflict_generics.fe | 2 +- .../name_resolution/conflict_generics.snap | 2 +- .../name_resolution/path_missing_generics.fe | 8 +- .../fixtures/name_resolution/path_shadow.fe | 2 +- 20 files changed, 800 insertions(+), 777 deletions(-) diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index b163beaa38..dee9b4df37 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -462,7 +462,11 @@ mod tests { let mut parser = Parser::new(lexer); parser.parse(ItemListScope::default(), None); - let item_list = ItemList::cast(parser.finish_to_node().0).unwrap(); + let (node, errs) = parser.finish_to_node(); + for e in errs { + eprintln!("{:?}", e); + } + let item_list = ItemList::cast(node).unwrap(); let mut items = item_list.into_iter().collect::>(); assert_eq!(items.len(), 1); items.pop().unwrap().kind().unwrap().try_into().unwrap() @@ -531,6 +535,7 @@ mod tests { } "#; let s: Struct = parse_item(source); + dbg!(&s); assert_eq!(s.name().unwrap().text(), "Foo"); let mut count = 0; for field in s.fields().unwrap() { diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs index 0065b2815c..fab0086259 100644 --- a/crates/parser2/src/parser/expr.rs +++ b/crates/parser2/src/parser/expr.rs @@ -35,7 +35,7 @@ fn parse_expr_with_min_bp( loop { let Some(kind) = parser.current_kind() else { - break + break; }; // Parse postfix operators. diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs index d6b3bccb95..f81a75aa4d 100644 --- a/crates/parser2/src/parser/mod.rs +++ b/crates/parser2/src/parser/mod.rs @@ -463,6 +463,16 @@ impl Parser { ErrorScope::default() } + fn error_at_current_pos(&mut self, msg: &str) -> ErrorScope { + let pos = self.current_pos; + let range = TextRange::new(pos, pos); + self.errors.push(ParseError { + range, + msg: msg.to_string(), + }); + ErrorScope::default() + } + /// Returns `true` if the parser is in the dry run mode. fn is_dry_run(&self) -> bool { !self.dry_run_states.is_empty() diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 232c21f342..254771fc98 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -54,19 +54,20 @@ define_scope! { impl super::Parse for RecordFieldDefListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::LBrace); + parser.set_newline_as_trivia(true); loop { - parser.set_newline_as_trivia(true); if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() { break; } + parser.parse(RecordFieldDefScope::default(), None); - parser.set_newline_as_trivia(false); - if !parser.bump_if(SyntaxKind::Newline) + + if !parser.bump_if(SyntaxKind::Comma) && parser.current_kind() != Some(SyntaxKind::RBrace) { - parser.error_and_recover("expected newline after field definition", None); + parser.error_at_current_pos("expected comma after field definition"); } } @@ -118,7 +119,7 @@ impl super::Parse for RecordFieldDefScope { if parser.bump_if(SyntaxKind::Colon) { parser.with_next_expected_tokens( |parser| parse_type(parser, None), - &[SyntaxKind::Newline, SyntaxKind::RBrace], + &[SyntaxKind::Comma, SyntaxKind::Newline, SyntaxKind::RBrace], ); } else { parser.error_and_recover("expected `name: type` for the field definition", None); diff --git a/crates/parser2/test_files/syntax_node/items/contract.fe b/crates/parser2/test_files/syntax_node/items/contract.fe index ef29266b0a..3b65d7f965 100644 --- a/crates/parser2/test_files/syntax_node/items/contract.fe +++ b/crates/parser2/test_files/syntax_node/items/contract.fe @@ -1,7 +1,7 @@ contract Empty {} pub contract C { - x: i32 - y: u256 - z: MyStruct::Encodable + x: i32, + y: u256, + z: MyStruct::Encodable, } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/contract.snap b/crates/parser2/test_files/syntax_node/items/contract.snap index 61421f5eb6..2119b58bd2 100644 --- a/crates/parser2/test_files/syntax_node/items/contract.snap +++ b/crates/parser2/test_files/syntax_node/items/contract.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/contract.fe --- -Root@0..87 - ItemList@0..87 +Root@0..90 + ItemList@0..90 Item@0..19 Contract@0..17 ContractKw@0..8 "contract" @@ -15,8 +15,8 @@ Root@0..87 LBrace@15..16 "{" RBrace@16..17 "}" Newline@17..19 "\n\n" - Item@19..87 - Contract@19..87 + Item@19..90 + Contract@19..90 ItemModifier@19..22 PubKw@19..22 "pub" WhiteSpace@22..23 " " @@ -24,7 +24,7 @@ Root@0..87 WhiteSpace@31..32 " " Ident@32..33 "C" WhiteSpace@33..34 " " - RecordFieldDefList@34..87 + RecordFieldDefList@34..90 LBrace@34..35 "{" Newline@35..36 "\n" WhiteSpace@36..40 " " @@ -36,29 +36,32 @@ Root@0..87 Path@43..46 PathSegment@43..46 Ident@43..46 "i32" - Newline@46..47 "\n" - WhiteSpace@47..51 " " - RecordFieldDef@51..58 - Ident@51..52 "y" - Colon@52..53 ":" - WhiteSpace@53..54 " " - PathType@54..58 - Path@54..58 - PathSegment@54..58 - Ident@54..58 "u256" - Newline@58..59 "\n" - WhiteSpace@59..63 " " - RecordFieldDef@63..85 - Ident@63..64 "z" - Colon@64..65 ":" - WhiteSpace@65..66 " " - PathType@66..85 - Path@66..85 - PathSegment@66..74 - Ident@66..74 "MyStruct" - Colon2@74..76 "::" - PathSegment@76..85 - Ident@76..85 "Encodable" - Newline@85..86 "\n" - RBrace@86..87 "}" + Comma@46..47 "," + Newline@47..48 "\n" + WhiteSpace@48..52 " " + RecordFieldDef@52..59 + Ident@52..53 "y" + Colon@53..54 ":" + WhiteSpace@54..55 " " + PathType@55..59 + Path@55..59 + PathSegment@55..59 + Ident@55..59 "u256" + Comma@59..60 "," + Newline@60..61 "\n" + WhiteSpace@61..65 " " + RecordFieldDef@65..87 + Ident@65..66 "z" + Colon@66..67 ":" + WhiteSpace@67..68 " " + PathType@68..87 + Path@68..87 + PathSegment@68..76 + Ident@68..76 "MyStruct" + Colon2@76..78 "::" + PathSegment@78..87 + Ident@78..87 "Encodable" + Comma@87..88 "," + Newline@88..89 "\n" + RBrace@89..90 "}" diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe index dce3c33309..e57ded1cb1 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.fe +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -6,10 +6,7 @@ enum Basic { } enum RecordVariants { - Rectangle { - w: u32 - h: u32 - } + Rectangle { w: u32, h: u32 } Circle { r: u32 } } diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 24071d5db0..87b42882dc 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- -Root@0..322 - ItemList@0..322 +Root@0..300 + ItemList@0..300 Item@0..15 Enum@0..13 EnumKw@0..4 "enum" @@ -47,208 +47,206 @@ Root@0..322 Newline@54..55 "\n" RBrace@55..56 "}" Newline@56..58 "\n\n" - Item@58..162 - Enum@58..160 + Item@58..140 + Enum@58..138 EnumKw@58..62 "enum" WhiteSpace@62..63 " " Ident@63..77 "RecordVariants" WhiteSpace@77..78 " " - VariantDefList@78..160 + VariantDefList@78..138 LBrace@78..79 "{" Newline@79..80 "\n" WhiteSpace@80..85 " " - VariantDef@85..135 + VariantDef@85..113 Ident@85..94 "Rectangle" WhiteSpace@94..95 " " - RecordFieldDefList@95..135 + RecordFieldDefList@95..113 LBrace@95..96 "{" - Newline@96..97 "\n" - WhiteSpace@97..106 " " - RecordFieldDef@106..112 - Ident@106..107 "w" - Colon@107..108 ":" - WhiteSpace@108..109 " " - PathType@109..112 - Path@109..112 - PathSegment@109..112 - Ident@109..112 "u32" - Newline@112..113 "\n" - WhiteSpace@113..122 " " - RecordFieldDef@122..128 - Ident@122..123 "h" - Colon@123..124 ":" - WhiteSpace@124..125 " " - PathType@125..128 - Path@125..128 - PathSegment@125..128 - Ident@125..128 "u32" - Newline@128..129 "\n" - WhiteSpace@129..134 " " - RBrace@134..135 "}" - Newline@135..136 "\n" - WhiteSpace@136..141 " " - VariantDef@141..158 - Ident@141..147 "Circle" - WhiteSpace@147..148 " " - RecordFieldDefList@148..158 - LBrace@148..149 "{" - WhiteSpace@149..150 " " - RecordFieldDef@150..156 - Ident@150..151 "r" - Colon@151..152 ":" - WhiteSpace@152..153 " " - PathType@153..156 - Path@153..156 - PathSegment@153..156 - Ident@153..156 "u32" - WhiteSpace@156..157 " " - RBrace@157..158 "}" - Newline@158..159 "\n" - RBrace@159..160 "}" - Newline@160..162 "\n\n" - Item@162..222 - Enum@162..220 - EnumKw@162..166 "enum" - WhiteSpace@166..167 " " - Ident@167..173 "Option" - GenericParamList@173..176 - Lt@173..174 "<" - TypeGenericParam@174..175 - Ident@174..175 "T" - Gt@175..176 ">" - Newline@176..177 "\n" - WhiteSpace@177..181 " " - WhereClause@181..196 - WhereKw@181..186 "where" - WhiteSpace@186..187 " " - WherePredicate@187..196 - PathType@187..188 - Path@187..188 - PathSegment@187..188 - Ident@187..188 "T" - TypeBoundList@188..195 - Colon@188..189 ":" - WhiteSpace@189..190 " " - TypeBound@190..195 - Path@190..195 - PathSegment@190..195 - Ident@190..195 "Clone" - Newline@195..196 "\n" - VariantDefList@196..220 - LBrace@196..197 "{" - Newline@197..198 "\n" - WhiteSpace@198..202 " " - VariantDef@202..209 - Ident@202..206 "Some" - TupleType@206..209 - LParen@206..207 "(" - PathType@207..208 - Path@207..208 - PathSegment@207..208 - Ident@207..208 "T" - RParen@208..209 ")" - Newline@209..210 "\n" - WhiteSpace@210..214 " " - VariantDef@214..218 - Ident@214..218 "None" - Newline@218..219 "\n" - RBrace@219..220 "}" - Newline@220..222 "\n\n" - Item@222..322 - Enum@222..322 - EnumKw@222..226 "enum" - WhiteSpace@226..227 " " - Ident@227..236 "BoundEnum" - GenericParamList@236..265 - Lt@236..237 "<" - TypeGenericParam@237..249 - Ident@237..238 "T" - TypeBoundList@238..249 - Colon@238..239 ":" - WhiteSpace@239..240 " " - TypeBound@240..243 - Path@240..243 - PathSegment@240..243 - Ident@240..243 "Add" - WhiteSpace@243..244 " " - Plus@244..245 "+" - WhiteSpace@245..246 " " - TypeBound@246..249 - Path@246..249 - PathSegment@246..249 - Ident@246..249 "Mul" + WhiteSpace@96..97 " " + RecordFieldDef@97..103 + Ident@97..98 "w" + Colon@98..99 ":" + WhiteSpace@99..100 " " + PathType@100..103 + Path@100..103 + PathSegment@100..103 + Ident@100..103 "u32" + Comma@103..104 "," + WhiteSpace@104..105 " " + RecordFieldDef@105..111 + Ident@105..106 "h" + Colon@106..107 ":" + WhiteSpace@107..108 " " + PathType@108..111 + Path@108..111 + PathSegment@108..111 + Ident@108..111 "u32" + WhiteSpace@111..112 " " + RBrace@112..113 "}" + Newline@113..114 "\n" + WhiteSpace@114..119 " " + VariantDef@119..136 + Ident@119..125 "Circle" + WhiteSpace@125..126 " " + RecordFieldDefList@126..136 + LBrace@126..127 "{" + WhiteSpace@127..128 " " + RecordFieldDef@128..134 + Ident@128..129 "r" + Colon@129..130 ":" + WhiteSpace@130..131 " " + PathType@131..134 + Path@131..134 + PathSegment@131..134 + Ident@131..134 "u32" + WhiteSpace@134..135 " " + RBrace@135..136 "}" + Newline@136..137 "\n" + RBrace@137..138 "}" + Newline@138..140 "\n\n" + Item@140..200 + Enum@140..198 + EnumKw@140..144 "enum" + WhiteSpace@144..145 " " + Ident@145..151 "Option" + GenericParamList@151..154 + Lt@151..152 "<" + TypeGenericParam@152..153 + Ident@152..153 "T" + Gt@153..154 ">" + Newline@154..155 "\n" + WhiteSpace@155..159 " " + WhereClause@159..174 + WhereKw@159..164 "where" + WhiteSpace@164..165 " " + WherePredicate@165..174 + PathType@165..166 + Path@165..166 + PathSegment@165..166 + Ident@165..166 "T" + TypeBoundList@166..173 + Colon@166..167 ":" + WhiteSpace@167..168 " " + TypeBound@168..173 + Path@168..173 + PathSegment@168..173 + Ident@168..173 "Clone" + Newline@173..174 "\n" + VariantDefList@174..198 + LBrace@174..175 "{" + Newline@175..176 "\n" + WhiteSpace@176..180 " " + VariantDef@180..187 + Ident@180..184 "Some" + TupleType@184..187 + LParen@184..185 "(" + PathType@185..186 + Path@185..186 + PathSegment@185..186 + Ident@185..186 "T" + RParen@186..187 ")" + Newline@187..188 "\n" + WhiteSpace@188..192 " " + VariantDef@192..196 + Ident@192..196 "None" + Newline@196..197 "\n" + RBrace@197..198 "}" + Newline@198..200 "\n\n" + Item@200..300 + Enum@200..300 + EnumKw@200..204 "enum" + WhiteSpace@204..205 " " + Ident@205..214 "BoundEnum" + GenericParamList@214..243 + Lt@214..215 "<" + TypeGenericParam@215..227 + Ident@215..216 "T" + TypeBoundList@216..227 + Colon@216..217 ":" + WhiteSpace@217..218 " " + TypeBound@218..221 + Path@218..221 + PathSegment@218..221 + Ident@218..221 "Add" + WhiteSpace@221..222 " " + Plus@222..223 "+" + WhiteSpace@223..224 " " + TypeBound@224..227 + Path@224..227 + PathSegment@224..227 + Ident@224..227 "Mul" + WhiteSpace@227..228 " " + Comma@228..229 "," + WhiteSpace@229..230 " " + TypeGenericParam@230..242 + Ident@230..231 "U" + TypeBoundList@231..242 + Colon@231..232 ":" + WhiteSpace@232..233 " " + TypeBound@233..236 + Path@233..236 + PathSegment@233..236 + Ident@233..236 "Sub" + WhiteSpace@236..237 " " + Plus@237..238 "+" + WhiteSpace@238..239 " " + TypeBound@239..242 + Path@239..242 + PathSegment@239..242 + Ident@239..242 "Div" + Gt@242..243 ">" + Newline@243..244 "\n" + WhereClause@244..269 + WhereKw@244..249 "where" WhiteSpace@249..250 " " - Comma@250..251 "," - WhiteSpace@251..252 " " - TypeGenericParam@252..264 - Ident@252..253 "U" - TypeBoundList@253..264 - Colon@253..254 ":" - WhiteSpace@254..255 " " - TypeBound@255..258 - Path@255..258 - PathSegment@255..258 - Ident@255..258 "Sub" - WhiteSpace@258..259 " " - Plus@259..260 "+" - WhiteSpace@260..261 " " - TypeBound@261..264 - Path@261..264 - PathSegment@261..264 - Ident@261..264 "Div" - Gt@264..265 ">" - Newline@265..266 "\n" - WhereClause@266..291 - WhereKw@266..271 "where" - WhiteSpace@271..272 " " - WherePredicate@272..291 - PathType@272..283 - Path@272..280 - PathSegment@272..275 - Ident@272..275 "Foo" - Colon2@275..277 "::" - PathSegment@277..280 - Ident@277..280 "Bar" - GenericArgList@280..283 - Lt@280..281 "<" - TypeGenericArg@281..282 - PathType@281..282 - Path@281..282 - PathSegment@281..282 - Ident@281..282 "T" - Gt@282..283 ">" - TypeBoundList@283..290 - Colon@283..284 ":" - WhiteSpace@284..285 " " - TypeBound@285..290 - Path@285..290 - PathSegment@285..290 - Ident@285..290 "Trait" - Newline@290..291 "\n" - VariantDefList@291..322 - LBrace@291..292 "{" - Newline@292..293 "\n" - WhiteSpace@293..297 " " - VariantDef@297..306 - Ident@297..303 "AddMul" - TupleType@303..306 - LParen@303..304 "(" - PathType@304..305 - Path@304..305 - PathSegment@304..305 - Ident@304..305 "T" - RParen@305..306 ")" - Newline@306..307 "\n" - WhiteSpace@307..311 " " - VariantDef@311..320 - Ident@311..317 "SubDiv" - TupleType@317..320 - LParen@317..318 "(" - PathType@318..319 - Path@318..319 - PathSegment@318..319 - Ident@318..319 "U" - RParen@319..320 ")" - Newline@320..321 "\n" - RBrace@321..322 "}" + WherePredicate@250..269 + PathType@250..261 + Path@250..258 + PathSegment@250..253 + Ident@250..253 "Foo" + Colon2@253..255 "::" + PathSegment@255..258 + Ident@255..258 "Bar" + GenericArgList@258..261 + Lt@258..259 "<" + TypeGenericArg@259..260 + PathType@259..260 + Path@259..260 + PathSegment@259..260 + Ident@259..260 "T" + Gt@260..261 ">" + TypeBoundList@261..268 + Colon@261..262 ":" + WhiteSpace@262..263 " " + TypeBound@263..268 + Path@263..268 + PathSegment@263..268 + Ident@263..268 "Trait" + Newline@268..269 "\n" + VariantDefList@269..300 + LBrace@269..270 "{" + Newline@270..271 "\n" + WhiteSpace@271..275 " " + VariantDef@275..284 + Ident@275..281 "AddMul" + TupleType@281..284 + LParen@281..282 "(" + PathType@282..283 + Path@282..283 + PathSegment@282..283 + Ident@282..283 "T" + RParen@283..284 ")" + Newline@284..285 "\n" + WhiteSpace@285..289 " " + VariantDef@289..298 + Ident@289..295 "SubDiv" + TupleType@295..298 + LParen@295..296 "(" + PathType@296..297 + Path@296..297 + PathSegment@296..297 + Ident@296..297 "U" + RParen@297..298 ")" + Newline@298..299 "\n" + RBrace@299..300 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/attr.fe b/crates/parser2/test_files/syntax_node/structs/attr.fe index b0af646fe5..e1f1dc750c 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.fe +++ b/crates/parser2/test_files/syntax_node/structs/attr.fe @@ -4,7 +4,7 @@ /// DocComment2 pub struct StructAttr { /// This is `x` - x: foo::Bar + x: foo::Bar, /// This is `y` #cfg(target: evm) y: i32 diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap index 9b8e1c8aa7..f65a60c566 100644 --- a/crates/parser2/test_files/syntax_node/structs/attr.snap +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -3,10 +3,10 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/structs/attr.fe --- -Root@0..170 - ItemList@0..170 - Item@0..170 - Struct@0..170 +Root@0..171 + ItemList@0..171 + Item@0..171 + Struct@0..171 AttrList@0..56 DocCommentAttr@0..15 DocComment@0..15 "/// DocComment1" @@ -27,7 +27,7 @@ Root@0..170 WhiteSpace@66..67 " " Ident@67..77 "StructAttr" WhiteSpace@77..78 " " - RecordFieldDefList@78..170 + RecordFieldDefList@78..171 LBrace@78..79 "{" Newline@79..80 "\n" WhiteSpace@80..84 " " @@ -47,34 +47,35 @@ Root@0..170 Colon2@110..112 "::" PathSegment@112..115 Ident@112..115 "Bar" - Newline@115..116 "\n" - WhiteSpace@116..120 " " - RecordFieldDef@120..168 - AttrList@120..158 - DocCommentAttr@120..135 - DocComment@120..135 "/// This is `y`" - Newline@135..136 "\n" - WhiteSpace@136..140 " " - Attr@140..157 - Pound@140..141 "#" - Ident@141..144 "cfg" - AttrArgList@144..157 - LParen@144..145 "(" - AttrArg@145..156 - Ident@145..151 "target" - Colon@151..152 ":" - WhiteSpace@152..153 " " - Ident@153..156 "evm" - RParen@156..157 ")" - Newline@157..158 "\n" - WhiteSpace@158..162 " " - Ident@162..163 "y" - Colon@163..164 ":" - WhiteSpace@164..165 " " - PathType@165..168 - Path@165..168 - PathSegment@165..168 - Ident@165..168 "i32" - Newline@168..169 "\n" - RBrace@169..170 "}" + Comma@115..116 "," + Newline@116..117 "\n" + WhiteSpace@117..121 " " + RecordFieldDef@121..169 + AttrList@121..159 + DocCommentAttr@121..136 + DocComment@121..136 "/// This is `y`" + Newline@136..137 "\n" + WhiteSpace@137..141 " " + Attr@141..158 + Pound@141..142 "#" + Ident@142..145 "cfg" + AttrArgList@145..158 + LParen@145..146 "(" + AttrArg@146..157 + Ident@146..152 "target" + Colon@152..153 ":" + WhiteSpace@153..154 " " + Ident@154..157 "evm" + RParen@157..158 ")" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + Ident@163..164 "y" + Colon@164..165 ":" + WhiteSpace@165..166 " " + PathType@166..169 + Path@166..169 + PathSegment@166..169 + Ident@166..169 "i32" + Newline@169..170 "\n" + RBrace@170..171 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe index c808d82bf3..af3c862084 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.fe +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -1,18 +1,18 @@ -pub struct StructWithGenericParam +pub struct StructWithGenericParam { - x: S - y: T - z: U + x: S, + y: T, + z: U, } - + pub struct StructWithGenericParam2< S, T: foo::Trait, U > { - x: *(S, *i32) - y: T - z: U + x: *(S, *i32), + y: T, + z: U, } pub struct StructWithGenericParam3< @@ -24,15 +24,15 @@ pub struct StructWithGenericParam3< Option: Trait1 + Trait2 Result: Trait2 + Trait3 { - x: S - y: T - z: U + x: S, + y: T, + z: U, } -pub struct MyArr +pub struct MyArr where (T, U): Trait + Trait { - __inner: [T; N] + __inner: [T; N], __inner2: (T, U) } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index 100902ec57..c8b58976b4 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -3,10 +3,10 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/structs/generics.fe --- -Root@0..553 - ItemList@0..553 - Item@0..75 - Struct@0..74 +Root@0..560 + ItemList@0..560 + Item@0..78 + Struct@0..76 ItemModifier@0..3 PubKw@0..3 "pub" WhiteSpace@3..4 " " @@ -26,20 +26,20 @@ Root@0..553 TypeGenericParam@40..41 Ident@40..41 "U" Gt@41..42 ">" - WhiteSpace@42..43 " " - Newline@43..44 "\n" - RecordFieldDefList@44..74 - LBrace@44..45 "{" - Newline@45..46 "\n" - WhiteSpace@46..50 " " - RecordFieldDef@50..54 - Ident@50..51 "x" - Colon@51..52 ":" - WhiteSpace@52..53 " " - PathType@53..54 - Path@53..54 - PathSegment@53..54 - Ident@53..54 "S" + Newline@42..43 "\n" + RecordFieldDefList@43..76 + LBrace@43..44 "{" + Newline@44..45 "\n" + WhiteSpace@45..49 " " + RecordFieldDef@49..53 + Ident@49..50 "x" + Colon@50..51 ":" + WhiteSpace@51..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "S" + Comma@53..54 "," Newline@54..55 "\n" WhiteSpace@55..59 " " RecordFieldDef@59..63 @@ -50,413 +50,419 @@ Root@0..553 Path@62..63 PathSegment@62..63 Ident@62..63 "T" - Newline@63..64 "\n" - WhiteSpace@64..68 " " - RecordFieldDef@68..72 - Ident@68..69 "z" - Colon@69..70 ":" - WhiteSpace@70..71 " " - PathType@71..72 - Path@71..72 - PathSegment@71..72 - Ident@71..72 "U" - Newline@72..73 "\n" - RBrace@73..74 "}" - Newline@74..75 "\n" - WhiteSpace@75..76 " " - Newline@76..77 "\n" - Item@77..188 - Struct@77..186 - ItemModifier@77..80 - PubKw@77..80 "pub" - WhiteSpace@80..81 " " - StructKw@81..87 "struct" - WhiteSpace@87..88 " " - Ident@88..111 "StructWithGenericParam2" - GenericParamList@111..146 - Lt@111..112 "<" - Newline@112..113 "\n" - WhiteSpace@113..117 " " - TypeGenericParam@117..118 - Ident@117..118 "S" - Comma@118..119 "," - Newline@119..120 "\n" - WhiteSpace@120..124 " " - TypeGenericParam@124..137 - Ident@124..125 "T" - TypeBoundList@125..137 - Colon@125..126 ":" - WhiteSpace@126..127 " " - TypeBound@127..137 - Path@127..137 - PathSegment@127..130 - Ident@127..130 "foo" - Colon2@130..132 "::" - PathSegment@132..137 - Ident@132..137 "Trait" - Comma@137..138 "," - Newline@138..139 "\n" - WhiteSpace@139..143 " " - TypeGenericParam@143..144 - Ident@143..144 "U" - Newline@144..145 "\n" - Gt@145..146 ">" - WhiteSpace@146..147 " " - RecordFieldDefList@147..186 - LBrace@147..148 "{" - Newline@148..149 "\n" - WhiteSpace@149..153 " " - RecordFieldDef@153..166 - Ident@153..154 "x" - Colon@154..155 ":" - WhiteSpace@155..156 " " - PtrType@156..166 - Star@156..157 "*" - TupleType@157..166 - LParen@157..158 "(" - PathType@158..159 - Path@158..159 - PathSegment@158..159 - Ident@158..159 "S" - Comma@159..160 "," - WhiteSpace@160..161 " " - PtrType@161..165 - Star@161..162 "*" - PathType@162..165 - Path@162..165 - PathSegment@162..165 - Ident@162..165 "i32" - RParen@165..166 ")" - Newline@166..167 "\n" - WhiteSpace@167..171 " " - RecordFieldDef@171..175 - Ident@171..172 "y" - Colon@172..173 ":" - WhiteSpace@173..174 " " - PathType@174..175 - Path@174..175 - PathSegment@174..175 - Ident@174..175 "T" - Newline@175..176 "\n" - WhiteSpace@176..180 " " - RecordFieldDef@180..184 - Ident@180..181 "z" - Colon@181..182 ":" - WhiteSpace@182..183 " " - PathType@183..184 - Path@183..184 - PathSegment@183..184 - Ident@183..184 "U" - Newline@184..185 "\n" - RBrace@185..186 "}" - Newline@186..188 "\n\n" - Item@188..406 - Struct@188..404 - ItemModifier@188..191 - PubKw@188..191 "pub" - WhiteSpace@191..192 " " - StructKw@192..198 "struct" - WhiteSpace@198..199 " " - Ident@199..222 "StructWithGenericParam3" - GenericParamList@222..282 - Lt@222..223 "<" - Newline@223..224 "\n" - WhiteSpace@224..228 " " - TypeGenericParam@228..254 - Ident@228..229 "S" - TypeBoundList@229..254 - Colon@229..230 ":" - WhiteSpace@230..231 " " - TypeBound@231..241 - Path@231..241 - PathSegment@231..234 - Ident@231..234 "foo" - Colon2@234..236 "::" - PathSegment@236..241 - Ident@236..241 "Trait" - WhiteSpace@241..242 " " - Plus@242..243 "+" - WhiteSpace@243..244 " " - TypeBound@244..254 - Path@244..254 - PathSegment@244..247 - Ident@244..247 "bar" - Colon2@247..249 "::" - PathSegment@249..254 - Ident@249..254 "Trait" - Comma@254..255 "," - Newline@255..256 "\n" - WhiteSpace@256..260 " " - TypeGenericParam@260..261 - Ident@260..261 "T" - Comma@261..262 "," - Newline@262..263 "\n" - WhiteSpace@263..267 " " - TypeGenericParam@267..280 - Ident@267..268 "U" - TypeBoundList@268..280 - Colon@268..269 ":" - WhiteSpace@269..270 " " - TypeBound@270..280 - Path@270..280 - PathSegment@270..273 - Ident@270..273 "bar" - Colon2@273..275 "::" - PathSegment@275..280 - Ident@275..280 "Trait" - Newline@280..281 "\n" - Gt@281..282 ">" - WhiteSpace@282..283 " " - WhereClause@283..374 - WhereKw@283..288 "where" - Newline@288..289 "\n" - WhiteSpace@289..293 " " - WherePredicate@293..312 - PathType@293..294 - Path@293..294 - PathSegment@293..294 - Ident@293..294 "T" - TypeBoundList@294..311 - Colon@294..295 ":" - WhiteSpace@295..296 " " - TypeBound@296..302 - Path@296..302 - PathSegment@296..302 - Ident@296..302 "Trait1" - WhiteSpace@302..303 " " - Plus@303..304 "+" - WhiteSpace@304..305 " " - TypeBound@305..311 - Path@305..311 - PathSegment@305..311 - Ident@305..311 "Trait2" - Newline@311..312 "\n" - WhiteSpace@312..316 " " - WherePredicate@316..343 - PathType@316..325 - Path@316..322 - PathSegment@316..322 - Ident@316..322 "Option" - GenericArgList@322..325 - Lt@322..323 "<" - TypeGenericArg@323..324 - PathType@323..324 - Path@323..324 - PathSegment@323..324 - Ident@323..324 "T" - Gt@324..325 ">" - TypeBoundList@325..342 - Colon@325..326 ":" - WhiteSpace@326..327 " " - TypeBound@327..333 - Path@327..333 - PathSegment@327..333 - Ident@327..333 "Trait1" - WhiteSpace@333..334 " " - Plus@334..335 "+" - WhiteSpace@335..336 " " - TypeBound@336..342 - Path@336..342 - PathSegment@336..342 - Ident@336..342 "Trait2" - Newline@342..343 "\n" - WhiteSpace@343..347 " " - WherePredicate@347..374 - PathType@347..356 - Path@347..353 - PathSegment@347..353 - Ident@347..353 "Result" - GenericArgList@353..356 - Lt@353..354 "<" - TypeGenericArg@354..355 - PathType@354..355 - Path@354..355 - PathSegment@354..355 - Ident@354..355 "U" - Gt@355..356 ">" - TypeBoundList@356..373 - Colon@356..357 ":" - WhiteSpace@357..358 " " - TypeBound@358..364 - Path@358..364 - PathSegment@358..364 - Ident@358..364 "Trait2" - WhiteSpace@364..365 " " - Plus@365..366 "+" - WhiteSpace@366..367 " " - TypeBound@367..373 - Path@367..373 - PathSegment@367..373 - Ident@367..373 "Trait3" - Newline@373..374 "\n" - RecordFieldDefList@374..404 - LBrace@374..375 "{" - Newline@375..376 "\n" - WhiteSpace@376..380 " " - RecordFieldDef@380..384 - Ident@380..381 "x" - Colon@381..382 ":" - WhiteSpace@382..383 " " - PathType@383..384 - Path@383..384 - PathSegment@383..384 - Ident@383..384 "S" - Newline@384..385 "\n" - WhiteSpace@385..389 " " - RecordFieldDef@389..393 - Ident@389..390 "y" - Colon@390..391 ":" - WhiteSpace@391..392 " " - PathType@392..393 - Path@392..393 - PathSegment@392..393 - Ident@392..393 "T" - Newline@393..394 "\n" - WhiteSpace@394..398 " " - RecordFieldDef@398..402 - Ident@398..399 "z" - Colon@399..400 ":" - WhiteSpace@400..401 " " - PathType@401..402 - Path@401..402 - PathSegment@401..402 - Ident@401..402 "U" - Newline@402..403 "\n" - RBrace@403..404 "}" - Newline@404..406 "\n\n" - Item@406..553 - Struct@406..553 - ItemModifier@406..409 - PubKw@406..409 "pub" - WhiteSpace@409..410 " " - StructKw@410..416 "struct" + Comma@63..64 "," + Newline@64..65 "\n" + WhiteSpace@65..69 " " + RecordFieldDef@69..73 + Ident@69..70 "z" + Colon@70..71 ":" + WhiteSpace@71..72 " " + PathType@72..73 + Path@72..73 + PathSegment@72..73 + Ident@72..73 "U" + Comma@73..74 "," + Newline@74..75 "\n" + RBrace@75..76 "}" + Newline@76..78 "\n\n" + Item@78..192 + Struct@78..190 + ItemModifier@78..81 + PubKw@78..81 "pub" + WhiteSpace@81..82 " " + StructKw@82..88 "struct" + WhiteSpace@88..89 " " + Ident@89..112 "StructWithGenericParam2" + GenericParamList@112..147 + Lt@112..113 "<" + Newline@113..114 "\n" + WhiteSpace@114..118 " " + TypeGenericParam@118..119 + Ident@118..119 "S" + Comma@119..120 "," + Newline@120..121 "\n" + WhiteSpace@121..125 " " + TypeGenericParam@125..138 + Ident@125..126 "T" + TypeBoundList@126..138 + Colon@126..127 ":" + WhiteSpace@127..128 " " + TypeBound@128..138 + Path@128..138 + PathSegment@128..131 + Ident@128..131 "foo" + Colon2@131..133 "::" + PathSegment@133..138 + Ident@133..138 "Trait" + Comma@138..139 "," + Newline@139..140 "\n" + WhiteSpace@140..144 " " + TypeGenericParam@144..145 + Ident@144..145 "U" + Newline@145..146 "\n" + Gt@146..147 ">" + WhiteSpace@147..148 " " + RecordFieldDefList@148..190 + LBrace@148..149 "{" + Newline@149..150 "\n" + WhiteSpace@150..154 " " + RecordFieldDef@154..167 + Ident@154..155 "x" + Colon@155..156 ":" + WhiteSpace@156..157 " " + PtrType@157..167 + Star@157..158 "*" + TupleType@158..167 + LParen@158..159 "(" + PathType@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "S" + Comma@160..161 "," + WhiteSpace@161..162 " " + PtrType@162..166 + Star@162..163 "*" + PathType@163..166 + Path@163..166 + PathSegment@163..166 + Ident@163..166 "i32" + RParen@166..167 ")" + Comma@167..168 "," + Newline@168..169 "\n" + WhiteSpace@169..173 " " + RecordFieldDef@173..177 + Ident@173..174 "y" + Colon@174..175 ":" + WhiteSpace@175..176 " " + PathType@176..177 + Path@176..177 + PathSegment@176..177 + Ident@176..177 "T" + Comma@177..178 "," + Newline@178..179 "\n" + WhiteSpace@179..183 " " + RecordFieldDef@183..187 + Ident@183..184 "z" + Colon@184..185 ":" + WhiteSpace@185..186 " " + PathType@186..187 + Path@186..187 + PathSegment@186..187 + Ident@186..187 "U" + Comma@187..188 "," + Newline@188..189 "\n" + RBrace@189..190 "}" + Newline@190..192 "\n\n" + Item@192..413 + Struct@192..411 + ItemModifier@192..195 + PubKw@192..195 "pub" + WhiteSpace@195..196 " " + StructKw@196..202 "struct" + WhiteSpace@202..203 " " + Ident@203..226 "StructWithGenericParam3" + GenericParamList@226..286 + Lt@226..227 "<" + Newline@227..228 "\n" + WhiteSpace@228..232 " " + TypeGenericParam@232..258 + Ident@232..233 "S" + TypeBoundList@233..258 + Colon@233..234 ":" + WhiteSpace@234..235 " " + TypeBound@235..245 + Path@235..245 + PathSegment@235..238 + Ident@235..238 "foo" + Colon2@238..240 "::" + PathSegment@240..245 + Ident@240..245 "Trait" + WhiteSpace@245..246 " " + Plus@246..247 "+" + WhiteSpace@247..248 " " + TypeBound@248..258 + Path@248..258 + PathSegment@248..251 + Ident@248..251 "bar" + Colon2@251..253 "::" + PathSegment@253..258 + Ident@253..258 "Trait" + Comma@258..259 "," + Newline@259..260 "\n" + WhiteSpace@260..264 " " + TypeGenericParam@264..265 + Ident@264..265 "T" + Comma@265..266 "," + Newline@266..267 "\n" + WhiteSpace@267..271 " " + TypeGenericParam@271..284 + Ident@271..272 "U" + TypeBoundList@272..284 + Colon@272..273 ":" + WhiteSpace@273..274 " " + TypeBound@274..284 + Path@274..284 + PathSegment@274..277 + Ident@274..277 "bar" + Colon2@277..279 "::" + PathSegment@279..284 + Ident@279..284 "Trait" + Newline@284..285 "\n" + Gt@285..286 ">" + WhiteSpace@286..287 " " + WhereClause@287..378 + WhereKw@287..292 "where" + Newline@292..293 "\n" + WhiteSpace@293..297 " " + WherePredicate@297..316 + PathType@297..298 + Path@297..298 + PathSegment@297..298 + Ident@297..298 "T" + TypeBoundList@298..315 + Colon@298..299 ":" + WhiteSpace@299..300 " " + TypeBound@300..306 + Path@300..306 + PathSegment@300..306 + Ident@300..306 "Trait1" + WhiteSpace@306..307 " " + Plus@307..308 "+" + WhiteSpace@308..309 " " + TypeBound@309..315 + Path@309..315 + PathSegment@309..315 + Ident@309..315 "Trait2" + Newline@315..316 "\n" + WhiteSpace@316..320 " " + WherePredicate@320..347 + PathType@320..329 + Path@320..326 + PathSegment@320..326 + Ident@320..326 "Option" + GenericArgList@326..329 + Lt@326..327 "<" + TypeGenericArg@327..328 + PathType@327..328 + Path@327..328 + PathSegment@327..328 + Ident@327..328 "T" + Gt@328..329 ">" + TypeBoundList@329..346 + Colon@329..330 ":" + WhiteSpace@330..331 " " + TypeBound@331..337 + Path@331..337 + PathSegment@331..337 + Ident@331..337 "Trait1" + WhiteSpace@337..338 " " + Plus@338..339 "+" + WhiteSpace@339..340 " " + TypeBound@340..346 + Path@340..346 + PathSegment@340..346 + Ident@340..346 "Trait2" + Newline@346..347 "\n" + WhiteSpace@347..351 " " + WherePredicate@351..378 + PathType@351..360 + Path@351..357 + PathSegment@351..357 + Ident@351..357 "Result" + GenericArgList@357..360 + Lt@357..358 "<" + TypeGenericArg@358..359 + PathType@358..359 + Path@358..359 + PathSegment@358..359 + Ident@358..359 "U" + Gt@359..360 ">" + TypeBoundList@360..377 + Colon@360..361 ":" + WhiteSpace@361..362 " " + TypeBound@362..368 + Path@362..368 + PathSegment@362..368 + Ident@362..368 "Trait2" + WhiteSpace@368..369 " " + Plus@369..370 "+" + WhiteSpace@370..371 " " + TypeBound@371..377 + Path@371..377 + PathSegment@371..377 + Ident@371..377 "Trait3" + Newline@377..378 "\n" + RecordFieldDefList@378..411 + LBrace@378..379 "{" + Newline@379..380 "\n" + WhiteSpace@380..384 " " + RecordFieldDef@384..388 + Ident@384..385 "x" + Colon@385..386 ":" + WhiteSpace@386..387 " " + PathType@387..388 + Path@387..388 + PathSegment@387..388 + Ident@387..388 "S" + Comma@388..389 "," + Newline@389..390 "\n" + WhiteSpace@390..394 " " + RecordFieldDef@394..398 + Ident@394..395 "y" + Colon@395..396 ":" + WhiteSpace@396..397 " " + PathType@397..398 + Path@397..398 + PathSegment@397..398 + Ident@397..398 "T" + Comma@398..399 "," + Newline@399..400 "\n" + WhiteSpace@400..404 " " + RecordFieldDef@404..408 + Ident@404..405 "z" + Colon@405..406 ":" + WhiteSpace@406..407 " " + PathType@407..408 + Path@407..408 + PathSegment@407..408 + Ident@407..408 "U" + Comma@408..409 "," + Newline@409..410 "\n" + RBrace@410..411 "}" + Newline@411..413 "\n\n" + Item@413..560 + Struct@413..560 + ItemModifier@413..416 + PubKw@413..416 "pub" WhiteSpace@416..417 " " - Ident@417..422 "MyArr" - GenericParamList@422..459 - Lt@422..423 "<" - TypeGenericParam@423..439 - Ident@423..424 "T" - TypeBoundList@424..439 - Colon@424..425 ":" - WhiteSpace@425..426 " " - TypeBound@426..439 - Path@426..439 - PathSegment@426..429 - Ident@426..429 "std" - Colon2@429..431 "::" - PathSegment@431..434 - Ident@431..434 "ops" - Colon2@434..436 "::" - PathSegment@436..439 - Ident@436..439 "Add" - Comma@439..440 "," - WhiteSpace@440..441 " " - TypeGenericParam@441..442 - Ident@441..442 "U" - Comma@442..443 "," - WhiteSpace@443..444 " " - ConstGenericParam@444..458 - ConstKw@444..449 "const" - WhiteSpace@449..450 " " - Ident@450..451 "N" - Colon@451..452 ":" - WhiteSpace@452..453 " " - PathType@453..458 - Path@453..458 - PathSegment@453..458 - Ident@453..458 "usize" - Gt@458..459 ">" - WhiteSpace@459..460 " " - Newline@460..461 "\n" - WhiteSpace@461..465 " " - WhereClause@465..509 - WhereKw@465..470 "where" - Newline@470..471 "\n" - WhiteSpace@471..479 " " - WherePredicate@479..509 - TupleType@479..485 - LParen@479..480 "(" - PathType@480..481 - Path@480..481 - PathSegment@480..481 - Ident@480..481 "T" - Comma@481..482 "," - WhiteSpace@482..483 " " - PathType@483..484 - Path@483..484 - PathSegment@483..484 - Ident@483..484 "U" - RParen@484..485 ")" - TypeBoundList@485..508 - Colon@485..486 ":" - WhiteSpace@486..487 " " - TypeBound@487..492 - Path@487..492 - PathSegment@487..492 - Ident@487..492 "Trait" + StructKw@417..423 "struct" + WhiteSpace@423..424 " " + Ident@424..429 "MyArr" + GenericParamList@429..466 + Lt@429..430 "<" + TypeGenericParam@430..446 + Ident@430..431 "T" + TypeBoundList@431..446 + Colon@431..432 ":" + WhiteSpace@432..433 " " + TypeBound@433..446 + Path@433..446 + PathSegment@433..436 + Ident@433..436 "std" + Colon2@436..438 "::" + PathSegment@438..441 + Ident@438..441 "ops" + Colon2@441..443 "::" + PathSegment@443..446 + Ident@443..446 "Add" + Comma@446..447 "," + WhiteSpace@447..448 " " + TypeGenericParam@448..449 + Ident@448..449 "U" + Comma@449..450 "," + WhiteSpace@450..451 " " + ConstGenericParam@451..465 + ConstKw@451..456 "const" + WhiteSpace@456..457 " " + Ident@457..458 "N" + Colon@458..459 ":" + WhiteSpace@459..460 " " + PathType@460..465 + Path@460..465 + PathSegment@460..465 + Ident@460..465 "usize" + Gt@465..466 ">" + Newline@466..467 "\n" + WhiteSpace@467..471 " " + WhereClause@471..515 + WhereKw@471..476 "where" + Newline@476..477 "\n" + WhiteSpace@477..485 " " + WherePredicate@485..515 + TupleType@485..491 + LParen@485..486 "(" + PathType@486..487 + Path@486..487 + PathSegment@486..487 + Ident@486..487 "T" + Comma@487..488 "," + WhiteSpace@488..489 " " + PathType@489..490 + Path@489..490 + PathSegment@489..490 + Ident@489..490 "U" + RParen@490..491 ")" + TypeBoundList@491..514 + Colon@491..492 ":" WhiteSpace@492..493 " " - Plus@493..494 "+" - WhiteSpace@494..495 " " - TypeBound@495..508 - Path@495..500 - PathSegment@495..500 - Ident@495..500 "Trait" - GenericArgList@500..508 - Lt@500..501 "<" - TypeGenericArg@501..504 - PathType@501..504 - Path@501..504 - PathSegment@501..504 - Ident@501..504 "i32" - Comma@504..505 "," - WhiteSpace@505..506 " " - TypeGenericArg@506..507 - PathType@506..507 - Path@506..507 - PathSegment@506..507 - Ident@506..507 "Y" - Gt@507..508 ">" - Newline@508..509 "\n" - RecordFieldDefList@509..553 - LBrace@509..510 "{" - Newline@510..511 "\n" - WhiteSpace@511..515 " " - RecordFieldDef@515..530 - Ident@515..522 "__inner" - Colon@522..523 ":" - WhiteSpace@523..524 " " - ArrayType@524..530 - LBracket@524..525 "[" - PathType@525..526 - Path@525..526 - PathSegment@525..526 - Ident@525..526 "T" - SemiColon@526..527 ";" - WhiteSpace@527..528 " " - PathExpr@528..529 - Path@528..529 - PathSegment@528..529 - Ident@528..529 "N" - RBracket@529..530 "]" - Newline@530..531 "\n" - WhiteSpace@531..535 " " - RecordFieldDef@535..551 - Ident@535..543 "__inner2" - Colon@543..544 ":" - WhiteSpace@544..545 " " - TupleType@545..551 - LParen@545..546 "(" - PathType@546..547 - Path@546..547 - PathSegment@546..547 - Ident@546..547 "T" - Comma@547..548 "," - WhiteSpace@548..549 " " - PathType@549..550 - Path@549..550 - PathSegment@549..550 - Ident@549..550 "U" - RParen@550..551 ")" - Newline@551..552 "\n" - RBrace@552..553 "}" + TypeBound@493..498 + Path@493..498 + PathSegment@493..498 + Ident@493..498 "Trait" + WhiteSpace@498..499 " " + Plus@499..500 "+" + WhiteSpace@500..501 " " + TypeBound@501..514 + Path@501..506 + PathSegment@501..506 + Ident@501..506 "Trait" + GenericArgList@506..514 + Lt@506..507 "<" + TypeGenericArg@507..510 + PathType@507..510 + Path@507..510 + PathSegment@507..510 + Ident@507..510 "i32" + Comma@510..511 "," + WhiteSpace@511..512 " " + TypeGenericArg@512..513 + PathType@512..513 + Path@512..513 + PathSegment@512..513 + Ident@512..513 "Y" + Gt@513..514 ">" + Newline@514..515 "\n" + RecordFieldDefList@515..560 + LBrace@515..516 "{" + Newline@516..517 "\n" + WhiteSpace@517..521 " " + RecordFieldDef@521..536 + Ident@521..528 "__inner" + Colon@528..529 ":" + WhiteSpace@529..530 " " + ArrayType@530..536 + LBracket@530..531 "[" + PathType@531..532 + Path@531..532 + PathSegment@531..532 + Ident@531..532 "T" + SemiColon@532..533 ";" + WhiteSpace@533..534 " " + PathExpr@534..535 + Path@534..535 + PathSegment@534..535 + Ident@534..535 "N" + RBracket@535..536 "]" + Comma@536..537 "," + Newline@537..538 "\n" + WhiteSpace@538..542 " " + RecordFieldDef@542..558 + Ident@542..550 "__inner2" + Colon@550..551 ":" + WhiteSpace@551..552 " " + TupleType@552..558 + LParen@552..553 "(" + PathType@553..554 + Path@553..554 + PathSegment@553..554 + Ident@553..554 "T" + Comma@554..555 "," + WhiteSpace@555..556 " " + PathType@556..557 + Path@556..557 + PathSegment@556..557 + Ident@556..557 "U" + RParen@557..558 ")" + Newline@558..559 "\n" + RBrace@559..560 "}" diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.fe b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe index bd5ea442d3..fbfd4abe89 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.fe +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe @@ -1,9 +1,9 @@ struct StructWithTupleField { - x: (i32, u32) + x: (i32, u32), y: ( i32, foo::Bar, u32 - ) + ), z: () } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap index 48ea9f1660..5f2eb756a1 100644 --- a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -3,15 +3,15 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/structs/tupel_field.fe --- -Root@0..117 - ItemList@0..117 - Item@0..117 - Struct@0..117 +Root@0..119 + ItemList@0..119 + Item@0..119 + Struct@0..119 StructKw@0..6 "struct" WhiteSpace@6..7 " " Ident@7..27 "StructWithTupleField" WhiteSpace@27..28 " " - RecordFieldDefList@28..117 + RecordFieldDefList@28..119 LBrace@28..29 "{" Newline@29..30 "\n" WhiteSpace@30..34 " " @@ -32,49 +32,51 @@ Root@0..117 PathSegment@43..46 Ident@43..46 "u32" RParen@46..47 ")" - Newline@47..48 "\n" - WhiteSpace@48..52 " " - RecordFieldDef@52..105 - Ident@52..53 "y" - Colon@53..54 ":" - WhiteSpace@54..55 " " - TupleType@55..105 - LParen@55..56 "(" - Newline@56..57 "\n" - WhiteSpace@57..65 " " - PathType@65..68 - Path@65..68 - PathSegment@65..68 - Ident@65..68 "i32" - Comma@68..69 "," - Newline@69..70 "\n" - WhiteSpace@70..78 " " - PathType@78..86 - Path@78..86 - PathSegment@78..81 - Ident@78..81 "foo" - Colon2@81..83 "::" - PathSegment@83..86 - Ident@83..86 "Bar" - Comma@86..87 "," - Newline@87..88 "\n" - WhiteSpace@88..96 " " - PathType@96..99 - Path@96..99 - PathSegment@96..99 - Ident@96..99 "u32" - Newline@99..100 "\n" - WhiteSpace@100..104 " " - RParen@104..105 ")" - Newline@105..106 "\n" - WhiteSpace@106..110 " " - RecordFieldDef@110..115 - Ident@110..111 "z" - Colon@111..112 ":" - WhiteSpace@112..113 " " - TupleType@113..115 - LParen@113..114 "(" - RParen@114..115 ")" - Newline@115..116 "\n" - RBrace@116..117 "}" + Comma@47..48 "," + Newline@48..49 "\n" + WhiteSpace@49..53 " " + RecordFieldDef@53..106 + Ident@53..54 "y" + Colon@54..55 ":" + WhiteSpace@55..56 " " + TupleType@56..106 + LParen@56..57 "(" + Newline@57..58 "\n" + WhiteSpace@58..66 " " + PathType@66..69 + Path@66..69 + PathSegment@66..69 + Ident@66..69 "i32" + Comma@69..70 "," + Newline@70..71 "\n" + WhiteSpace@71..79 " " + PathType@79..87 + Path@79..87 + PathSegment@79..82 + Ident@79..82 "foo" + Colon2@82..84 "::" + PathSegment@84..87 + Ident@84..87 "Bar" + Comma@87..88 "," + Newline@88..89 "\n" + WhiteSpace@89..97 " " + PathType@97..100 + Path@97..100 + PathSegment@97..100 + Ident@97..100 "u32" + Newline@100..101 "\n" + WhiteSpace@101..105 " " + RParen@105..106 ")" + Comma@106..107 "," + Newline@107..108 "\n" + WhiteSpace@108..112 " " + RecordFieldDef@112..117 + Ident@112..113 "z" + Colon@113..114 ":" + WhiteSpace@114..115 " " + TupleType@115..117 + LParen@115..116 "(" + RParen@116..117 ")" + Newline@117..118 "\n" + RBrace@118..119 "}" diff --git a/crates/uitest/fixtures/name_resolution/conflict_field.fe b/crates/uitest/fixtures/name_resolution/conflict_field.fe index d51f1da9d0..6fcc98c509 100644 --- a/crates/uitest/fixtures/name_resolution/conflict_field.fe +++ b/crates/uitest/fixtures/name_resolution/conflict_field.fe @@ -1,4 +1,4 @@ pub struct MyS { - x: i32 - x: u32 + x: i32, + x: u32, } \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict_field.snap b/crates/uitest/fixtures/name_resolution/conflict_field.snap index ef92860faf..9b73251e60 100644 --- a/crates/uitest/fixtures/name_resolution/conflict_field.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_field.snap @@ -1,14 +1,14 @@ --- -source: crates/uitest/src/lib.rs +source: crates/uitest/tests/name_resolution.rs expression: diags input_file: crates/uitest/fixtures/name_resolution/conflict_field.fe --- error[2-0001]: `x` conflicts with other definitions ┌─ conflict_field.fe:2:5 │ -2 │ x: i32 +2 │ x: i32, │ ^ `x` is defined here -3 │ x: u32 +3 │ x: u32, │ - `x` is redefined here diff --git a/crates/uitest/fixtures/name_resolution/conflict_generics.fe b/crates/uitest/fixtures/name_resolution/conflict_generics.fe index 218dc861ac..2422e2b0a7 100644 --- a/crates/uitest/fixtures/name_resolution/conflict_generics.fe +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.fe @@ -1,4 +1,4 @@ pub struct MyS { - x: T + x: T, y: U } \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict_generics.snap b/crates/uitest/fixtures/name_resolution/conflict_generics.snap index c2949fd85c..e989d213b5 100644 --- a/crates/uitest/fixtures/name_resolution/conflict_generics.snap +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.snap @@ -18,7 +18,7 @@ error[2-0004]: `T` is ambiguous │ - - candidate `#1` │ │ │ candidate `#0` -2 │ x: T +2 │ x: T, │ ^ `T` is ambiguous diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe index 59d31614bd..25d6d0e534 100644 --- a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe @@ -1,12 +1,12 @@ pub trait Trait {} -pub struct MyS +pub struct MyS where T: Trait U: Trait Z: Trait -{ - t: T - u: U +{ + t: T, + u: U, z: Z } diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.fe b/crates/uitest/fixtures/name_resolution/path_shadow.fe index 70d9c6124a..d7c47dd7d0 100644 --- a/crates/uitest/fixtures/name_resolution/path_shadow.fe +++ b/crates/uitest/fixtures/name_resolution/path_shadow.fe @@ -2,6 +2,6 @@ pub trait T {} pub struct MyS where U: T { - t: T + t: T, u: U } \ No newline at end of file From 01f1b65fd536857719c5c0af5bd79b539372be44 Mon Sep 17 00:00:00 2001 From: Sean Billig Date: Sun, 27 Aug 2023 19:25:21 -0700 Subject: [PATCH 254/678] Add parser error uitest for missing struct field separator --- Cargo.lock | 1 + crates/uitest/Cargo.toml | 1 + .../parser/struct_field_missing_comma.fe | 5 ++ .../parser/struct_field_missing_comma.snap | 12 +++++ crates/uitest/tests/parser.rs | 46 +++++++++++++++++++ 5 files changed, 65 insertions(+) create mode 100644 crates/uitest/fixtures/parser/struct_field_missing_comma.fe create mode 100644 crates/uitest/fixtures/parser/struct_field_missing_comma.snap create mode 100644 crates/uitest/tests/parser.rs diff --git a/Cargo.lock b/Cargo.lock index fa17d3bb37..436e7e603e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -660,6 +660,7 @@ dependencies = [ "dir-test", "fe-compiler-test-utils", "fe-driver2", + "fe-hir", "wasm-bindgen-test", ] diff --git a/crates/uitest/Cargo.toml b/crates/uitest/Cargo.toml index 08323f4203..e0f9df2978 100644 --- a/crates/uitest/Cargo.toml +++ b/crates/uitest/Cargo.toml @@ -10,6 +10,7 @@ publish = false [dependencies] driver = { path = "../driver2", package = "fe-driver2" } +hir = { path = "../hir", package = "fe-hir" } fe-compiler-test-utils = { path = "../test-utils" } dir-test = "0.1" wasm-bindgen-test = "0.3" diff --git a/crates/uitest/fixtures/parser/struct_field_missing_comma.fe b/crates/uitest/fixtures/parser/struct_field_missing_comma.fe new file mode 100644 index 0000000000..2a644665c7 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_field_missing_comma.fe @@ -0,0 +1,5 @@ +struct S { + x: u8 + y: i8 + ,z: i8, +} diff --git a/crates/uitest/fixtures/parser/struct_field_missing_comma.snap b/crates/uitest/fixtures/parser/struct_field_missing_comma.snap new file mode 100644 index 0000000000..e5b1bdc3c0 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_field_missing_comma.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/struct_field_missing_comma.fe +--- +error[1-0001]: expected comma after field definition + ┌─ struct_field_missing_comma.fe:2:10 + │ +2 │ x: u8 + │ ^ expected comma after field definition + + diff --git a/crates/uitest/tests/parser.rs b/crates/uitest/tests/parser.rs new file mode 100644 index 0000000000..03663a31db --- /dev/null +++ b/crates/uitest/tests/parser.rs @@ -0,0 +1,46 @@ +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use driver::DriverDataBase; +use fe_compiler_test_utils::snap_test; +use hir::{analysis_pass::AnalysisPassManager, ParsingPass}; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/parser", + glob: "*.fe" +)] +fn run_parser(fixture: Fixture<&str>) { + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_file_with_pass_manager(top_mod, init_parser_pass); + let diags = driver.format_diags(); + snap_test!(diags, fixture.path()); +} + +fn init_parser_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager +} + +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/name_resolution", + glob: "*.fe", + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn run_parser(fixture: Fixture<&str>) { + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_file_with_pass_manager(top_mod, init_parser_pass); + } +} From 69a989189ae322c6748f6885b1a257d748260a42 Mon Sep 17 00:00:00 2001 From: Grant Wuerker Date: Fri, 25 Aug 2023 10:47:55 -0600 Subject: [PATCH 255/678] clippy --- crates/analyzer/src/db.rs | 1 + crates/analyzer/src/db/queries/module.rs | 2 +- crates/codegen/src/db.rs | 1 + crates/codegen/src/yul/runtime/mod.rs | 2 +- crates/common/src/db.rs | 1 + crates/mir/src/db.rs | 1 + crates/mir/src/lower/function.rs | 2 +- crates/parser/src/grammar/types.rs | 1 + 8 files changed, 8 insertions(+), 3 deletions(-) diff --git a/crates/analyzer/src/db.rs b/crates/analyzer/src/db.rs index a8646e5c51..7ecf6078d1 100644 --- a/crates/analyzer/src/db.rs +++ b/crates/analyzer/src/db.rs @@ -1,3 +1,4 @@ +#![allow(clippy::arc_with_non_send_sync)] use crate::{ context::{Analysis, Constant, FunctionBody}, errors::{ConstEvalError, TypeError}, diff --git a/crates/analyzer/src/db/queries/module.rs b/crates/analyzer/src/db/queries/module.rs index f397cdb0d1..057e6a7267 100644 --- a/crates/analyzer/src/db/queries/module.rs +++ b/crates/analyzer/src/db/queries/module.rs @@ -564,7 +564,7 @@ pub fn module_used_item_map( ) .value; - items.extend(Rc::try_unwrap(prelude_items).unwrap().into_iter()); + items.extend(Rc::try_unwrap(prelude_items).unwrap()); } Analysis::new(Rc::new(items), diagnostics.into()) diff --git a/crates/codegen/src/db.rs b/crates/codegen/src/db.rs index de52a2a681..ce036795a3 100644 --- a/crates/codegen/src/db.rs +++ b/crates/codegen/src/db.rs @@ -1,3 +1,4 @@ +#![allow(clippy::arc_with_non_send_sync)] use std::rc::Rc; use fe_abi::{contract::AbiContract, event::AbiEvent, function::AbiFunction, types::AbiType}; diff --git a/crates/codegen/src/yul/runtime/mod.rs b/crates/codegen/src/yul/runtime/mod.rs index 5cb6fb7a64..a658b4b4aa 100644 --- a/crates/codegen/src/yul/runtime/mod.rs +++ b/crates/codegen/src/yul/runtime/mod.rs @@ -383,7 +383,7 @@ impl RuntimeProvider for DefaultRuntimeProvider { } let deref_ty = ptr_ty.deref(db.upcast()); - let args = std::iter::once(ptr).chain(args.into_iter()).collect(); + let args = std::iter::once(ptr).chain(args).collect(); let legalized_ty = db.codegen_legalized_type(ptr_ty); if deref_ty.is_enum(db.upcast()) { let mut name = format!("enum_init_{}", ptr_ty.0); diff --git a/crates/common/src/db.rs b/crates/common/src/db.rs index 0b06596423..51a9afceb0 100644 --- a/crates/common/src/db.rs +++ b/crates/common/src/db.rs @@ -1,3 +1,4 @@ +#![allow(clippy::arc_with_non_send_sync)] use crate::files::{File, SourceFileId, Utf8Path}; use codespan_reporting as cs; use salsa; diff --git a/crates/mir/src/db.rs b/crates/mir/src/db.rs index 8c160a31ee..fc930318de 100644 --- a/crates/mir/src/db.rs +++ b/crates/mir/src/db.rs @@ -1,3 +1,4 @@ +#![allow(clippy::arc_with_non_send_sync)] use std::{collections::BTreeMap, rc::Rc}; use fe_analyzer::{ diff --git a/crates/mir/src/lower/function.rs b/crates/mir/src/lower/function.rs index a48d229522..0ae4ffbc92 100644 --- a/crates/mir/src/lower/function.rs +++ b/crates/mir/src/lower/function.rs @@ -1082,7 +1082,7 @@ impl<'db, 'a> BodyLowerHelper<'db, 'a> { let enum_args = if data_ty.is_unit(self.db) { vec![tag, self.make_unit()] } else { - std::iter::once(tag).chain(args.into_iter()).collect() + std::iter::once(tag).chain(args).collect() }; self.builder.aggregate_construct(ty, enum_args, source) } diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index f4b8c9c169..d557e04c43 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -83,6 +83,7 @@ pub fn parse_struct_def( )) } +#[allow(clippy::unnecessary_literal_unwrap)] /// Parse a [`ModuleStmt::Enum`]. /// # Panics /// Panics if the next token isn't [`TokenKind::Enum`]. From cdc03e6efa43d62da7bdd51d4f45deaef8556a54 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 29 Aug 2023 11:04:23 -0500 Subject: [PATCH 256/678] language server workspace file cache sketch --- Cargo.lock | 10 ++ crates/language-server/Cargo.toml | 1 + crates/language-server/src/goto.rs | 4 +- .../src/handlers/notifications.rs | 14 ++ crates/language-server/src/main.rs | 1 + crates/language-server/src/workspace.rs | 164 ++++++++++++++++++ .../test_files/messy/dangling.fe | 0 .../test_files/messy/foo/bar/fe.toml | 0 .../test_files/messy/foo/bar/src/main.fe | 0 .../test_files/nested_ingots/fe.toml | 0 .../nested_ingots/ingots/foo/fe.toml | 0 .../nested_ingots/ingots/foo/src/main.fe | 0 .../test_files/nested_ingots/src/lib.fe | 0 .../test_files/single_ingot/fe.toml | 0 .../test_files/single_ingot/src/lib.fe | 0 15 files changed, 192 insertions(+), 2 deletions(-) create mode 100644 crates/language-server/src/workspace.rs create mode 100644 crates/language-server/test_files/messy/dangling.fe create mode 100644 crates/language-server/test_files/messy/foo/bar/fe.toml create mode 100644 crates/language-server/test_files/messy/foo/bar/src/main.fe create mode 100644 crates/language-server/test_files/nested_ingots/fe.toml create mode 100644 crates/language-server/test_files/nested_ingots/ingots/foo/fe.toml create mode 100644 crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe create mode 100644 crates/language-server/test_files/nested_ingots/src/lib.fe create mode 100644 crates/language-server/test_files/single_ingot/fe.toml create mode 100644 crates/language-server/test_files/single_ingot/src/lib.fe diff --git a/Cargo.lock b/Cargo.lock index cd8bd4eeaf..bdc0e64f7d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1126,6 +1126,7 @@ dependencies = [ "log", "lsp-server", "lsp-types", + "patricia_tree", "rowan", "salsa-2022", "serde", @@ -1940,6 +1941,15 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +[[package]] +name = "patricia_tree" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "457d51c8a92e0fe2c45cd1abf6c64e5be012097756462f956483f3149fcd9649" +dependencies = [ + "bitflags", +] + [[package]] name = "percent-encoding" version = "2.3.0" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index d1d36366d1..f5d5ec7fce 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -32,3 +32,4 @@ fxhash = "0.2.1" dir-test = "0.1" fe-compiler-test-utils = { path = "../test-utils" } log = "0.4" +patricia_tree = "0.6.2" diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 02996cad1a..05bb988c9c 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -133,7 +133,7 @@ mod tests { .join("\n"); cursor_path_map.insert(*cursor, path); }, - EarlyResolvedPath::Partial { res, unresolved_from } => { + EarlyResolvedPath::Partial { res, unresolved_from: _ } => { let path = res.pretty_path(&db).unwrap(); cursor_path_map.insert(*cursor, path); }, @@ -180,7 +180,7 @@ mod tests { bucket.iter().map(|x| x.pretty_path(&db).unwrap()).collect::>() .join("\n") }, - EarlyResolvedPath::Partial { res, unresolved_from } => { + EarlyResolvedPath::Partial { res, unresolved_from: _ } => { res.pretty_path(&db).unwrap() }, }; diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 0bd37e8188..014f1a976f 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -48,6 +48,20 @@ pub(crate) fn handle_document_did_change( send_diagnostics(state, diagnostics, params.text_document.uri.clone()) } +pub(crate) fn handle_workspace_did_change_folders( + state: &mut ServerState, + note: lsp_server::Notification, +) -> Result<(), Error> { + let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; + // let response_message = lsp_server::Response { + // id: lsp_server::RequestId::Num(0), + // result: None, + // error: None, + // }; + // state.send_response(response_message)?; + Ok(()) +} + fn send_diagnostics( state: &mut ServerState, diagnostics: Vec, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index d49f4ac04e..279ecd3a64 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -4,6 +4,7 @@ mod db; mod util; mod diagnostics; mod goto; +mod workspace; use db::Jar; mod handlers { diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs new file mode 100644 index 0000000000..c66b40c5a7 --- /dev/null +++ b/crates/language-server/src/workspace.rs @@ -0,0 +1,164 @@ +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, +}; + +use common::{ + input::{IngotKind, Version}, + InputFile, InputIngot, +}; +use patricia_tree::StringPatriciaMap; + +use crate::db::LanguageServerDatabase; + +const FE_CONFIG_SUFFIX: &str = "fe.toml"; + +trait IngotFileContext { + fn get_input_for_file_path(&self, path: &str) -> Option; + fn get_ingot_for_file_path(&self, path: &str) -> Option; +} + +struct Ingot { + local: InputIngot, + external: InputIngot, +} + +pub struct LocalIngotContext { + db: LanguageServerDatabase, + // cache `InputIngot` for path + ingot: InputIngot, + external_ingots: StringPatriciaMap, + // cache `InputFile` for path + files: StringPatriciaMap, +} + +fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { + let ingot_path = ingot_path + .strip_suffix(&FE_CONFIG_SUFFIX) + .unwrap_or(ingot_path); + file_path.starts_with(ingot_path) +} + +fn get_containing_ingot<'a, T>(ingots: &'a StringPatriciaMap, path: &'a str) -> Option { + ingots + .get_longest_common_prefix(path) + .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) + .map(|(_, ingot)| *ingot) +} + +impl LocalIngotContext { + pub fn new(db: LanguageServerDatabase, config_path: &str) -> Option { + let ingot = InputIngot::new( + &db, + config_path, + IngotKind::Local, + Version::new(0, 0, 0), + BTreeSet::new(), + ); + Some(Self { + db, + ingot, + external_ingots: StringPatriciaMap::new(), + files: StringPatriciaMap::new(), + }) + } +} + +impl IngotFileContext for LocalIngotContext { + fn get_input_for_file_path(&self, path: &str) -> Option { + self.files.get(path).map_or_else( + || { + let ingot = self.get_ingot_for_file_path(path)?; + let file = InputFile::new(&self.db, ingot, path.into(), "".into()); + self.files.insert(path.to_string(), file.clone()); + Some(file) + }, + |file| Some(*file), + ) + } + + fn get_ingot_for_file_path(&self, path: &str) -> Option { + get_containing_ingot(&self.external_ingots, path).or_else(|| Some(self.ingot.clone())) + } +} + +struct StandaloneIngotContext { + db: LanguageServerDatabase, + ingots: StringPatriciaMap, + files: StringPatriciaMap, +} + +impl StandaloneIngotContext { + pub fn new(db: LanguageServerDatabase) -> Self { + Self { + db, + ingots: StringPatriciaMap::new(), + files: StringPatriciaMap::new(), + } + } +} + +impl IngotFileContext for StandaloneIngotContext { + fn get_input_for_file_path(&self, path: &str) -> Option { + self.files.get(path).map_or_else( + || { + let ingot = self.get_ingot_for_file_path(path)?; + let file = InputFile::new(&self.db, ingot, path.into(), "".into()); + self.files.insert(path.to_string(), file.clone()); + Some(file) + }, + |file| Some(*file), + ) + } + + fn get_ingot_for_file_path(&self, path: &str) -> Option { + get_containing_ingot(&self.ingots, path) + } +} + +struct Workspace { + db: LanguageServerDatabase, + ingot_contexts: StringPatriciaMap, + standalone_ingot_contexts: StandaloneIngotContext, +} + +impl Workspace { + pub fn new(db: LanguageServerDatabase) -> Self { + Self { + db, + ingot_contexts: StringPatriciaMap::new(), + standalone_ingot_contexts: StandaloneIngotContext::new(db), + } + } + + pub fn get_ingot_context(&self, config_path: &str) -> Option { + self.ingot_contexts.get(config_path).map_or_else( + || { + let ingot = InputIngot::new( + &self.db, + config_path, + IngotKind::Local, + Version::new(0, 0, 0), + BTreeSet::new(), + ); + let context = LocalIngotContext::new(self.db, config_path); + self.ingot_contexts + .insert(config_path.to_string(), context.unwrap()); + context + }, + |context| Some(*context), + ) + } + + pub fn get_ingot_for_file_path(&self, path: &str) -> Option { + get_containing_ingot(&self.ingot_contexts, path).map_or_else( + || self.standalone_ingot_contexts.get_ingot_for_file_path(path), + |ingot| Some(ingot.ingot), + ) + } + + pub fn get_input_file_for_file_path(&self, path: &str) -> Option { + self.get_ingot_for_file_path(path) + .map_or_else(|| None, |ingot| Some(ingot.root_file(&self.db))) + } +} diff --git a/crates/language-server/test_files/messy/dangling.fe b/crates/language-server/test_files/messy/dangling.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/messy/foo/bar/fe.toml b/crates/language-server/test_files/messy/foo/bar/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/messy/foo/bar/src/main.fe b/crates/language-server/test_files/messy/foo/bar/src/main.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/fe.toml b/crates/language-server/test_files/nested_ingots/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/ingots/foo/fe.toml b/crates/language-server/test_files/nested_ingots/ingots/foo/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe b/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/src/lib.fe b/crates/language-server/test_files/nested_ingots/src/lib.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/single_ingot/fe.toml b/crates/language-server/test_files/single_ingot/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe new file mode 100644 index 0000000000..e69de29bb2 From d9191cbcec86a1aa8c91f61ba23bb52a9b488691 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 30 Aug 2023 12:33:02 -0500 Subject: [PATCH 257/678] language server workspace building --- crates/language-server/src/workspace.rs | 106 ++++++++++-------------- 1 file changed, 45 insertions(+), 61 deletions(-) diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index c66b40c5a7..a14e9238ca 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -1,7 +1,4 @@ -use std::{ - collections::BTreeSet, - path::{Path, PathBuf}, -}; +use std::collections::BTreeSet; use common::{ input::{IngotKind, Version}, @@ -14,8 +11,8 @@ use crate::db::LanguageServerDatabase; const FE_CONFIG_SUFFIX: &str = "fe.toml"; trait IngotFileContext { - fn get_input_for_file_path(&self, path: &str) -> Option; - fn get_ingot_for_file_path(&self, path: &str) -> Option; + fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option; + fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option; } struct Ingot { @@ -24,10 +21,8 @@ struct Ingot { } pub struct LocalIngotContext { - db: LanguageServerDatabase, - // cache `InputIngot` for path ingot: InputIngot, - external_ingots: StringPatriciaMap, + // external_ingots: StringPatriciaMap, // cache `InputFile` for path files: StringPatriciaMap, } @@ -39,11 +34,11 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -fn get_containing_ingot<'a, T>(ingots: &'a StringPatriciaMap, path: &'a str) -> Option { +fn get_containing_ingot<'a, T>(ingots: &'a StringPatriciaMap, path: &'a str) -> Option<&'a T> { ingots .get_longest_common_prefix(path) .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) - .map(|(_, ingot)| *ingot) + .map(|(_, ingot)| ingot) } impl LocalIngotContext { @@ -56,42 +51,40 @@ impl LocalIngotContext { BTreeSet::new(), ); Some(Self { - db, ingot, - external_ingots: StringPatriciaMap::new(), + // external_ingots: StringPatriciaMap::new(), files: StringPatriciaMap::new(), }) } } impl IngotFileContext for LocalIngotContext { - fn get_input_for_file_path(&self, path: &str) -> Option { - self.files.get(path).map_or_else( + fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + let ingot = self.get_ingot_for_file_path(db, path)?; + let input = self.files.get(path).map_or_else( || { - let ingot = self.get_ingot_for_file_path(path)?; - let file = InputFile::new(&self.db, ingot, path.into(), "".into()); - self.files.insert(path.to_string(), file.clone()); + let file = InputFile::new(db, ingot, path.into(), "".into()); Some(file) }, |file| Some(*file), - ) + ); + self.files.insert(path.to_string(), input.unwrap()); + input } - fn get_ingot_for_file_path(&self, path: &str) -> Option { - get_containing_ingot(&self.external_ingots, path).or_else(|| Some(self.ingot.clone())) + fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + Some(self.ingot) } } struct StandaloneIngotContext { - db: LanguageServerDatabase, ingots: StringPatriciaMap, files: StringPatriciaMap, } impl StandaloneIngotContext { - pub fn new(db: LanguageServerDatabase) -> Self { + pub fn new() -> Self { Self { - db, ingots: StringPatriciaMap::new(), files: StringPatriciaMap::new(), } @@ -99,66 +92,57 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn get_input_for_file_path(&self, path: &str) -> Option { - self.files.get(path).map_or_else( + fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + let ingot = self.get_ingot_for_file_path(db, path)?; + let input = self.files.get(path).map_or_else( || { - let ingot = self.get_ingot_for_file_path(path)?; - let file = InputFile::new(&self.db, ingot, path.into(), "".into()); - self.files.insert(path.to_string(), file.clone()); + let file = InputFile::new(db, ingot, path.into(), "".into()); Some(file) }, |file| Some(*file), - ) + ); + self.files.insert(path.to_string(), input.unwrap()); + input } - fn get_ingot_for_file_path(&self, path: &str) -> Option { - get_containing_ingot(&self.ingots, path) + fn get_ingot_for_file_path(&mut self, _db: &LanguageServerDatabase, path: &str) -> Option { + get_containing_ingot(&self.ingots, path).as_deref().copied() } } -struct Workspace { - db: LanguageServerDatabase, +pub(crate) struct Workspace { ingot_contexts: StringPatriciaMap, standalone_ingot_contexts: StandaloneIngotContext, } impl Workspace { - pub fn new(db: LanguageServerDatabase) -> Self { + pub fn new() -> Self { Self { - db, ingot_contexts: StringPatriciaMap::new(), - standalone_ingot_contexts: StandaloneIngotContext::new(db), + standalone_ingot_contexts: StandaloneIngotContext::new(), } } - pub fn get_ingot_context(&self, config_path: &str) -> Option { - self.ingot_contexts.get(config_path).map_or_else( - || { - let ingot = InputIngot::new( - &self.db, - config_path, - IngotKind::Local, - Version::new(0, 0, 0), - BTreeSet::new(), - ); - let context = LocalIngotContext::new(self.db, config_path); - self.ingot_contexts - .insert(config_path.to_string(), context.unwrap()); - context - }, - |context| Some(*context), - ) + pub fn get_ingot_context(&mut self, db: LanguageServerDatabase, config_path: &str) -> Option<&LocalIngotContext> { + if self.ingot_contexts.contains_key(config_path) { + return self.ingot_contexts.get(config_path); + } else { + let ingot_context = LocalIngotContext::new(db, config_path)?; + self.ingot_contexts.insert(config_path.to_string(), ingot_context); + return self.ingot_contexts.get(config_path); + } } - pub fn get_ingot_for_file_path(&self, path: &str) -> Option { - get_containing_ingot(&self.ingot_contexts, path).map_or_else( - || self.standalone_ingot_contexts.get_ingot_for_file_path(path), - |ingot| Some(ingot.ingot), + pub fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); + ctx.map_or_else( + || self.standalone_ingot_contexts.get_ingot_for_file_path(db, path), + |ingot_context| Some(ingot_context.ingot.clone()), ) } - pub fn get_input_file_for_file_path(&self, path: &str) -> Option { - self.get_ingot_for_file_path(path) - .map_or_else(|| None, |ingot| Some(ingot.root_file(&self.db))) + pub fn get_input_file_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + self.get_ingot_for_file_path(db, path) + .map_or_else(|| None, |ingot| Some(ingot.root_file(db))) } } From 7be890bd2b6683e3cc8704c22c9800964d724166 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 31 Aug 2023 22:16:32 -0500 Subject: [PATCH 258/678] language server workspace compiling + tests failing --- crates/language-server/src/db.rs | 30 ++----- crates/language-server/src/goto.rs | 26 +++--- .../src/handlers/notifications.rs | 15 ++-- .../language-server/src/handlers/request.rs | 4 +- crates/language-server/src/state.rs | 7 +- crates/language-server/src/workspace.rs | 87 ++++++++++++------- 6 files changed, 89 insertions(+), 80 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index dc9a96bb26..31c4b7000a 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -1,4 +1,6 @@ -use std::{collections::BTreeSet, path}; +use std::{collections::BTreeSet, path, ops::DerefMut, borrow::BorrowMut}; + +use super::workspace; use common::{ diagnostics::CompleteDiagnostic, @@ -13,8 +15,9 @@ use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; +use patricia_tree::StringPatriciaMap; -use crate::goto::Cursor; +use crate::{goto::Cursor, workspace::{LocalIngotContext, StandaloneIngotContext, IngotFileContext, get_containing_ingot}}; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); @@ -50,28 +53,6 @@ impl LanguageServerDatabase { }; } - pub fn top_mod_from_file(&mut self, file_path: &path::Path, source: &str) -> TopLevelMod { - let kind = IngotKind::StandAlone; - - // We set the ingot version to 0.0.0 for stand-alone file. - let version = Version::new(0, 0, 0); - let root_file = file_path; - let ingot = InputIngot::new( - self, - file_path.parent().unwrap().as_os_str().to_str().unwrap(), - kind, - version, - BTreeSet::new(), - ); - - let file_name = root_file.file_name().unwrap().to_str().unwrap(); - let file = InputFile::new(self, ingot, file_name.into(), source.to_string()); - ingot.set_root_file(self, file); - ingot.set_files(self, [file].into()); - - map_file_to_mod(self, file) - } - pub fn find_enclosing_item(&mut self, top_mod: TopLevelMod, cursor: Cursor) -> Option { let items = top_mod.scope_graph(self.as_hir_db()).items_dfs(self.as_hir_db()); @@ -102,6 +83,7 @@ impl LanguageServerDatabase { }); diags } + } impl salsa::Database for LanguageServerDatabase { diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 05bb988c9c..a6d2eba966 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -86,6 +86,8 @@ pub fn goto_enclosing_path(db: &mut LanguageServerDatabase, top_mod: TopLevelMod #[cfg(test)] mod tests { + use crate::workspace::Workspace; + use super::*; use fe_compiler_test_utils::snap_test; use dir_test::{dir_test, Fixture}; @@ -115,26 +117,27 @@ mod tests { glob: "goto*.fe" )] fn test_goto_enclosing_path(fixture: Fixture<&str>) { - let mut db = LanguageServerDatabase::default(); + let mut db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = db.top_mod_from_file(path, fixture.content()); + let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()); let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); cursors.iter().for_each(|cursor| { - let resolved_path = goto_enclosing_path(&mut db, top_mod, *cursor); + let resolved_path = goto_enclosing_path(db, top_mod, *cursor); match resolved_path { Some(path) => match path { EarlyResolvedPath::Full(bucket) => { - let path = bucket.iter().map(|x| x.pretty_path(&db).unwrap()).collect::>() + let path = bucket.iter().map(|x| x.pretty_path(db).unwrap()).collect::>() .join("\n"); cursor_path_map.insert(*cursor, path); }, EarlyResolvedPath::Partial { res, unresolved_from: _ } => { - let path = res.pretty_path(&db).unwrap(); + let path = res.pretty_path(db).unwrap(); cursor_path_map.insert(*cursor, path); }, }, @@ -157,11 +160,12 @@ mod tests { glob: "smallest_enclosing*.fe" )] fn test_smallest_enclosing_path(fixture: Fixture<&str>) { - let mut db = LanguageServerDatabase::default(); + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = db.top_mod_from_file(path, fixture.content()); + let top_mod = workspace.top_mod_from_file(db, path, fixture.content()); - let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); @@ -173,15 +177,15 @@ mod tests { let path_map = path_collector.path_map; let enclosing_path = smallest_enclosing_path(*cursor, &path_map); - let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early(&mut db, enclosing_path.unwrap().0, enclosing_path.unwrap().1); + let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early(db, enclosing_path.unwrap().0, enclosing_path.unwrap().1); let res = match resolved_enclosing_path { EarlyResolvedPath::Full(bucket) => { - bucket.iter().map(|x| x.pretty_path(&db).unwrap()).collect::>() + bucket.iter().map(|x| x.pretty_path(db).unwrap()).collect::>() .join("\n") }, EarlyResolvedPath::Partial { res, unresolved_from: _ } => { - res.pretty_path(&db).unwrap() + res.pretty_path(db).unwrap() }, }; cursor_path_map.insert(*cursor, res); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 014f1a976f..1f1b92a567 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -3,9 +3,11 @@ use serde::Deserialize; use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDatabase}; -fn string_diagnostics(db: &mut LanguageServerDatabase, path: &str, src: &str) -> Vec { +fn string_diagnostics(state: &mut ServerState, path: &str, src: &str) -> Vec { + let db = &mut state.db; + let workspace = &mut state.workspace; let file_path = std::path::Path::new(path); - let top_mod = db.top_mod_from_file(file_path, src); + let top_mod = workspace.top_mod_from_file(db, file_path, src); db.run_on_top_mod(top_mod); db.finalize_diags() } @@ -16,7 +18,7 @@ pub(crate) fn get_diagnostics( uri: lsp_types::Url, ) -> Result, Error> { let diags = string_diagnostics( - &mut state.db, + state, uri.to_file_path().unwrap().to_str().unwrap(), text.as_str(), ); @@ -53,12 +55,7 @@ pub(crate) fn handle_workspace_did_change_folders( note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; - // let response_message = lsp_server::Response { - // id: lsp_server::RequestId::Num(0), - // result: None, - // error: None, - // }; - // state.send_response(response_message)?; + Ok(()) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index f268fa4d6f..a4a9c95d86 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -37,7 +37,7 @@ pub(crate) fn handle_hover( file_text.as_str(), ); let file_path = std::path::Path::new(file_path); - let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); + let top_mod = state.workspace.top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match goto_info { @@ -91,7 +91,7 @@ pub(crate) fn handle_goto_definition( // Get the module and the goto info let file_path = std::path::Path::new(params.text_document.uri.path()); - let top_mod = state.db.top_mod_from_file(file_path, file_text.as_str()); + let top_mod = state.workspace.top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index ca06d1686b..37f1a4aea5 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,6 +1,7 @@ use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; +use crate::workspace::Workspace; use log::{ Record, Level, Metadata, info }; use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; @@ -14,8 +15,9 @@ use crate::handlers::request::handle_goto_definition; use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; pub struct ServerState { - pub sender: Arc>>, - pub db: LanguageServerDatabase, + pub(crate) sender: Arc>>, + pub(crate) db: LanguageServerDatabase, + pub(crate) workspace: Workspace, } impl ServerState { @@ -24,6 +26,7 @@ impl ServerState { ServerState { sender, db: LanguageServerDatabase::default(), + workspace: Workspace::default(), } } diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index a14e9238ca..fe1b27f080 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -1,18 +1,19 @@ -use std::collections::BTreeSet; +use std::{collections::BTreeSet, path::Path}; use common::{ input::{IngotKind, Version}, InputFile, InputIngot, }; +use hir::{hir_def::TopLevelMod, lower::map_file_to_mod}; use patricia_tree::StringPatriciaMap; use crate::db::LanguageServerDatabase; const FE_CONFIG_SUFFIX: &str = "fe.toml"; -trait IngotFileContext { - fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option; - fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option; +pub(crate) trait IngotFileContext { + fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option; + fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option; } struct Ingot { @@ -20,11 +21,12 @@ struct Ingot { external: InputIngot, } -pub struct LocalIngotContext { - ingot: InputIngot, +// derive `Copy` for `Ingot` because `StringPatriciaMap` requires `Copy` for its value type. +pub(crate) struct LocalIngotContext { + pub ingot: InputIngot, // external_ingots: StringPatriciaMap, // cache `InputFile` for path - files: StringPatriciaMap, + pub files: StringPatriciaMap, } fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { @@ -34,17 +36,17 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -fn get_containing_ingot<'a, T>(ingots: &'a StringPatriciaMap, path: &'a str) -> Option<&'a T> { +pub(crate) fn get_containing_ingot<'a, T>(ingots: &'a mut StringPatriciaMap, path: &'a str) -> Option<&'a mut T> { ingots - .get_longest_common_prefix(path) + .get_longest_common_prefix_mut(path) .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) .map(|(_, ingot)| ingot) } impl LocalIngotContext { - pub fn new(db: LanguageServerDatabase, config_path: &str) -> Option { + pub fn new(db: &LanguageServerDatabase, config_path: &str) -> Option { let ingot = InputIngot::new( - &db, + db, config_path, IngotKind::Local, Version::new(0, 0, 0), @@ -59,7 +61,7 @@ impl LocalIngotContext { } impl IngotFileContext for LocalIngotContext { - fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { let ingot = self.get_ingot_for_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { @@ -72,12 +74,12 @@ impl IngotFileContext for LocalIngotContext { input } - fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { Some(self.ingot) } } -struct StandaloneIngotContext { +pub(crate) struct StandaloneIngotContext { ingots: StringPatriciaMap, files: StringPatriciaMap, } @@ -92,7 +94,7 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn get_input_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { + fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { let ingot = self.get_ingot_for_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { @@ -105,25 +107,25 @@ impl IngotFileContext for StandaloneIngotContext { input } - fn get_ingot_for_file_path(&mut self, _db: &LanguageServerDatabase, path: &str) -> Option { - get_containing_ingot(&self.ingots, path).as_deref().copied() + fn get_ingot_for_file_path(&mut self, _db: &mut LanguageServerDatabase, path: &str) -> Option { + get_containing_ingot(&mut self.ingots, path).as_deref().copied() } } pub(crate) struct Workspace { - ingot_contexts: StringPatriciaMap, - standalone_ingot_contexts: StandaloneIngotContext, + pub(crate) ingot_contexts: StringPatriciaMap, + pub(crate) standalone_ingot_context: StandaloneIngotContext, } impl Workspace { - pub fn new() -> Self { + pub fn default() -> Self { Self { ingot_contexts: StringPatriciaMap::new(), - standalone_ingot_contexts: StandaloneIngotContext::new(), + standalone_ingot_context: StandaloneIngotContext::new(), } } - pub fn get_ingot_context(&mut self, db: LanguageServerDatabase, config_path: &str) -> Option<&LocalIngotContext> { + pub fn get_ingot_context(&mut self, db: &LanguageServerDatabase, config_path: &str) -> Option<&LocalIngotContext> { if self.ingot_contexts.contains_key(config_path) { return self.ingot_contexts.get(config_path); } else { @@ -133,16 +135,37 @@ impl Workspace { } } - pub fn get_ingot_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { - let ctx = get_containing_ingot(&self.ingot_contexts, path); - ctx.map_or_else( - || self.standalone_ingot_contexts.get_ingot_for_file_path(db, path), - |ingot_context| Some(ingot_context.ingot.clone()), - ) + pub fn top_mod_from_file(&mut self, db: &mut LanguageServerDatabase, file_path: &Path, source: &str) -> TopLevelMod { + // let workspace = &mut self.workspace; + // create a new scope in which `self` is not mutable: + let file = self.get_input_for_file_path(db, file_path.to_str().unwrap()).unwrap(); + file.set_text(db).to(source.to_string()); + // use salsa2022 setter to set the file's `text` field + let ingot = file.ingot(db); + let mut files = ingot.files(db).clone(); + files.insert(file); + ingot.set_files(db, files); + map_file_to_mod(db, file) } - pub fn get_input_file_for_file_path(&mut self, db: &LanguageServerDatabase, path: &str) -> Option { - self.get_ingot_for_file_path(db, path) - .map_or_else(|| None, |ingot| Some(ingot.root_file(db))) - } } + +impl IngotFileContext for Workspace { + fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { + let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + if ctx.is_some() { + Some(ctx.unwrap().get_input_for_file_path(db, path).unwrap()) + } else { + (&mut self.standalone_ingot_context).get_input_for_file_path(db, path) + } + } + + fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { + let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + if ctx.is_some() { + Some(ctx.unwrap().get_ingot_for_file_path(db, path).unwrap()) + } else { + (&mut self.standalone_ingot_context).get_ingot_for_file_path(db, path) + } + } +} \ No newline at end of file From 76b896134d517edf46e52be08fffda73c886064d Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 1 Sep 2023 19:46:01 -0500 Subject: [PATCH 259/678] language server workspace cache passing tests --- crates/language-server/src/db.rs | 12 ++--- crates/language-server/src/workspace.rs | 71 ++++++++++++++++++------- 2 files changed, 56 insertions(+), 27 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 31c4b7000a..8c9f122fb9 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -1,23 +1,17 @@ -use std::{collections::BTreeSet, path, ops::DerefMut, borrow::BorrowMut}; - -use super::workspace; - use common::{ diagnostics::CompleteDiagnostic, - input::{IngotKind, Version}, - InputDb, InputFile, InputIngot, + InputDb, }; use hir::{ analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, ItemKind}, - lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, + HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; -use patricia_tree::StringPatriciaMap; -use crate::{goto::Cursor, workspace::{LocalIngotContext, StandaloneIngotContext, IngotFileContext, get_containing_ingot}}; +use crate::goto::Cursor; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::diagnostics::file_line_starts); diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index fe1b27f080..dd0416227f 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -16,16 +16,8 @@ pub(crate) trait IngotFileContext { fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option; } -struct Ingot { - local: InputIngot, - external: InputIngot, -} - -// derive `Copy` for `Ingot` because `StringPatriciaMap` requires `Copy` for its value type. pub(crate) struct LocalIngotContext { pub ingot: InputIngot, - // external_ingots: StringPatriciaMap, - // cache `InputFile` for path pub files: StringPatriciaMap, } @@ -54,7 +46,6 @@ impl LocalIngotContext { ); Some(Self { ingot, - // external_ingots: StringPatriciaMap::new(), files: StringPatriciaMap::new(), }) } @@ -96,19 +87,34 @@ impl StandaloneIngotContext { impl IngotFileContext for StandaloneIngotContext { fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { let ingot = self.get_ingot_for_file_path(db, path)?; - let input = self.files.get(path).map_or_else( + let input_file = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), "".into()); Some(file) }, |file| Some(*file), ); - self.files.insert(path.to_string(), input.unwrap()); - input + ingot.set_files(db, [input_file.unwrap()].into()); + ingot.set_root_file(db, input_file.unwrap()); + self.files.insert(path.to_string(), input_file.unwrap()); + input_file } fn get_ingot_for_file_path(&mut self, _db: &mut LanguageServerDatabase, path: &str) -> Option { - get_containing_ingot(&mut self.ingots, path).as_deref().copied() + get_containing_ingot(&mut self.ingots, path).as_deref().copied().map_or_else( + || { + let ingot = InputIngot::new( + _db, + path, + IngotKind::StandAlone, + Version::new(0, 0, 0), + BTreeSet::new(), + ); + self.ingots.insert(path.to_string(), ingot); + Some(ingot) + }, + |ingot| Some(ingot), + ) } } @@ -136,11 +142,8 @@ impl Workspace { } pub fn top_mod_from_file(&mut self, db: &mut LanguageServerDatabase, file_path: &Path, source: &str) -> TopLevelMod { - // let workspace = &mut self.workspace; - // create a new scope in which `self` is not mutable: let file = self.get_input_for_file_path(db, file_path.to_str().unwrap()).unwrap(); file.set_text(db).to(source.to_string()); - // use salsa2022 setter to set the file's `text` field let ingot = file.ingot(db); let mut files = ingot.files(db).clone(); files.insert(file); @@ -153,8 +156,8 @@ impl Workspace { impl IngotFileContext for Workspace { fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { let ctx = get_containing_ingot(&mut self.ingot_contexts, path); - if ctx.is_some() { - Some(ctx.unwrap().get_input_for_file_path(db, path).unwrap()) + if let Some(ctx) = ctx { + ctx.get_input_for_file_path(db, path) } else { (&mut self.standalone_ingot_context).get_input_for_file_path(db, path) } @@ -168,4 +171,36 @@ impl IngotFileContext for Workspace { (&mut self.standalone_ingot_context).get_ingot_for_file_path(db, path) } } +} + +#[cfg(test)] +mod tests { + use crate::workspace::{IngotFileContext, Workspace}; + + use super::StandaloneIngotContext; + + #[test] + fn test_standalone_context() { + let mut db = crate::db::LanguageServerDatabase::default(); + let file_path = "tests/data/ingot1/src/main.fe"; + + let ctx = &mut StandaloneIngotContext::new(); + let file = ctx.get_input_for_file_path(&mut db, file_path); + + assert!(file.is_some()); + + let ingot = ctx.get_ingot_for_file_path(&mut db, file_path); + assert!(ingot.is_some()); + assert_eq!(ingot.unwrap().kind(&mut db), common::input::IngotKind::StandAlone); + assert_eq!(ingot.unwrap(), file.unwrap().ingot(&mut db)); + } + + #[test] + fn test_workspace_standalone_ingot() { + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + let file_path = "tests/data/ingot1/src/main.fe"; + let file = workspace.get_input_for_file_path(&mut db, file_path); + assert!(file.is_some()); + } } \ No newline at end of file From 10ca1a0d1e9f5a72d7c19ecc90b85ad603128fa2 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Sep 2023 13:37:45 -0500 Subject: [PATCH 260/678] LSP test: find related ingot for .fe file --- crates/language-server/src/workspace.rs | 214 ++++++++++++++++++------ 1 file changed, 167 insertions(+), 47 deletions(-) diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index dd0416227f..0465c5d7b2 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -10,10 +10,21 @@ use patricia_tree::StringPatriciaMap; use crate::db::LanguageServerDatabase; const FE_CONFIG_SUFFIX: &str = "fe.toml"; +fn ingot_directory_key(path: &str) -> String { + path.strip_suffix(FE_CONFIG_SUFFIX).unwrap_or(path).to_string() +} pub(crate) trait IngotFileContext { - fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option; - fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option; + fn input_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option; + fn ingot_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option; } pub(crate) struct LocalIngotContext { @@ -28,7 +39,10 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -pub(crate) fn get_containing_ingot<'a, T>(ingots: &'a mut StringPatriciaMap, path: &'a str) -> Option<&'a mut T> { +pub(crate) fn get_containing_ingot<'a, T>( + ingots: &'a mut StringPatriciaMap, + path: &'a str, +) -> Option<&'a mut T> { ingots .get_longest_common_prefix_mut(path) .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) @@ -52,8 +66,12 @@ impl LocalIngotContext { } impl IngotFileContext for LocalIngotContext { - fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { - let ingot = self.get_ingot_for_file_path(db, path)?; + fn input_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { + let ingot = self.ingot_from_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), "".into()); @@ -65,7 +83,11 @@ impl IngotFileContext for LocalIngotContext { input } - fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { + fn ingot_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { Some(self.ingot) } } @@ -85,8 +107,12 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { - let ingot = self.get_ingot_for_file_path(db, path)?; + fn input_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { + let ingot = self.ingot_from_file_path(db, path)?; let input_file = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), "".into()); @@ -100,21 +126,28 @@ impl IngotFileContext for StandaloneIngotContext { input_file } - fn get_ingot_for_file_path(&mut self, _db: &mut LanguageServerDatabase, path: &str) -> Option { - get_containing_ingot(&mut self.ingots, path).as_deref().copied().map_or_else( - || { - let ingot = InputIngot::new( - _db, - path, - IngotKind::StandAlone, - Version::new(0, 0, 0), - BTreeSet::new(), - ); - self.ingots.insert(path.to_string(), ingot); - Some(ingot) - }, - |ingot| Some(ingot), - ) + fn ingot_from_file_path( + &mut self, + _db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { + get_containing_ingot(&mut self.ingots, path) + .as_deref() + .copied() + .map_or_else( + || { + let ingot = InputIngot::new( + _db, + path, + IngotKind::StandAlone, + Version::new(0, 0, 0), + BTreeSet::new(), + ); + self.ingots.insert(path.to_string(), ingot); + Some(ingot) + }, + |ingot| Some(ingot), + ) } } @@ -131,18 +164,33 @@ impl Workspace { } } - pub fn get_ingot_context(&mut self, db: &LanguageServerDatabase, config_path: &str) -> Option<&LocalIngotContext> { - if self.ingot_contexts.contains_key(config_path) { - return self.ingot_contexts.get(config_path); + pub fn ingot_context_from_config_path( + &mut self, + db: &LanguageServerDatabase, + config_path: &str, + ) -> Option<&LocalIngotContext> { + let key = &ingot_directory_key(config_path); + if self.ingot_contexts.contains_key(key) { + return self.ingot_contexts.get(key); } else { let ingot_context = LocalIngotContext::new(db, config_path)?; - self.ingot_contexts.insert(config_path.to_string(), ingot_context); - return self.ingot_contexts.get(config_path); + self.ingot_contexts + // .insert(config_path.to_string(), ingot_context); + // instead chop off the trailing fe.toml + .insert(key, ingot_context); + return self.ingot_contexts.get(key); } } - pub fn top_mod_from_file(&mut self, db: &mut LanguageServerDatabase, file_path: &Path, source: &str) -> TopLevelMod { - let file = self.get_input_for_file_path(db, file_path.to_str().unwrap()).unwrap(); + pub fn top_mod_from_file( + &mut self, + db: &mut LanguageServerDatabase, + file_path: &Path, + source: &str, + ) -> TopLevelMod { + let file = self + .input_from_file_path(db, file_path.to_str().unwrap()) + .unwrap(); file.set_text(db).to(source.to_string()); let ingot = file.ingot(db); let mut files = ingot.files(db).clone(); @@ -150,48 +198,58 @@ impl Workspace { ingot.set_files(db, files); map_file_to_mod(db, file) } - } impl IngotFileContext for Workspace { - fn get_input_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { + fn input_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { let ctx = get_containing_ingot(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.get_input_for_file_path(db, path) + ctx.input_from_file_path(db, path) } else { - (&mut self.standalone_ingot_context).get_input_for_file_path(db, path) + (&mut self.standalone_ingot_context).input_from_file_path(db, path) } } - fn get_ingot_for_file_path(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Option { + fn ingot_from_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { let ctx = get_containing_ingot(&mut self.ingot_contexts, path); if ctx.is_some() { - Some(ctx.unwrap().get_ingot_for_file_path(db, path).unwrap()) + Some(ctx.unwrap().ingot_from_file_path(db, path).unwrap()) } else { - (&mut self.standalone_ingot_context).get_ingot_for_file_path(db, path) + (&mut self.standalone_ingot_context).ingot_from_file_path(db, path) } } } #[cfg(test)] mod tests { - use crate::workspace::{IngotFileContext, Workspace}; + use crate::workspace::{IngotFileContext, Workspace, get_containing_ingot}; use super::StandaloneIngotContext; - + #[test] fn test_standalone_context() { let mut db = crate::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; - + let ctx = &mut StandaloneIngotContext::new(); - let file = ctx.get_input_for_file_path(&mut db, file_path); - + let file = ctx.input_from_file_path(&mut db, file_path); + assert!(file.is_some()); - - let ingot = ctx.get_ingot_for_file_path(&mut db, file_path); + + let ingot = ctx.ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); - assert_eq!(ingot.unwrap().kind(&mut db), common::input::IngotKind::StandAlone); + assert_eq!( + ingot.unwrap().kind(&mut db), + common::input::IngotKind::StandAlone + ); assert_eq!(ingot.unwrap(), file.unwrap().ingot(&mut db)); } @@ -200,7 +258,69 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.get_input_for_file_path(&mut db, file_path); + let file = workspace.input_from_file_path(&mut db, file_path); + assert!(file.is_some()); + } + + #[test] + fn test_get_containing_ingot() { + let config_path = "tests/data/ingot1/fe.toml"; + let mut workspace = Workspace::default(); + + let _ingot_context_ingot = { + let ingot_context = workspace + .ingot_context_from_config_path(&mut crate::db::LanguageServerDatabase::default(), config_path); + + assert!(ingot_context.is_some()); + ingot_context.map(|ctx| ctx.ingot) + }; + + assert!(workspace.ingot_contexts.len() == 1); + + let file_path = "tests/data/ingot1/src/main.fe"; + assert!(workspace.ingot_contexts.get_longest_common_prefix(file_path).is_some()); + + let containing_ingot = get_containing_ingot(&mut workspace.ingot_contexts, file_path); + + assert!(containing_ingot.as_deref().is_some()); + + + let ingot = workspace + .ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); + assert!(ingot.is_some()); + } + + #[test] + fn test_workspace_local_ingot() { + let config_path = "tests/data/ingot1/fe.toml"; + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + + let ingot_context_ingot = { + let ingot_context = workspace + .ingot_context_from_config_path(&mut db, config_path); + + assert!(ingot_context.is_some()); + ingot_context.map(|ctx| ctx.ingot) + }; + + let file_path = "tests/data/ingot1/src/main.fe"; + let file = workspace + .input_from_file_path(&mut db, file_path); assert!(file.is_some()); + + let ingot = workspace + .ingot_from_file_path(&mut db, file_path); + assert!(ingot.is_some()); + + assert_eq!( + ingot_context_ingot.unwrap().kind(&mut db), + common::input::IngotKind::Local + ); + assert_eq!( + ingot.unwrap().kind(&mut db), + common::input::IngotKind::Local + ); + assert_eq!(ingot_context_ingot.unwrap(), ingot.unwrap()); } -} \ No newline at end of file +} From dc0f12c21a848fde10e1fe6b8f1fe0ac907469a2 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Sep 2023 15:18:17 -0500 Subject: [PATCH 261/678] preliminary LSP workspace sync utils --- Cargo.lock | 1 + crates/language-server/Cargo.toml | 1 + crates/language-server/src/workspace.rs | 168 +++++++++++++++--- .../nested_ingots/ingots/foo/src/main.fe | 1 + 4 files changed, 143 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bdc0e64f7d..c799c519ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1122,6 +1122,7 @@ dependencies = [ "fe-hir-analysis", "fe-macros", "fxhash", + "glob", "indexmap", "log", "lsp-server", diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index f5d5ec7fce..0099c5ee8d 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -33,3 +33,4 @@ dir-test = "0.1" fe-compiler-test-utils = { path = "../test-utils" } log = "0.4" patricia_tree = "0.6.2" +glob = "0.3.1" diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 0465c5d7b2..4e75501ab7 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -10,8 +10,10 @@ use patricia_tree::StringPatriciaMap; use crate::db::LanguageServerDatabase; const FE_CONFIG_SUFFIX: &str = "fe.toml"; -fn ingot_directory_key(path: &str) -> String { - path.strip_suffix(FE_CONFIG_SUFFIX).unwrap_or(path).to_string() +fn ingot_directory_key(path: String) -> String { + path.strip_suffix(FE_CONFIG_SUFFIX) + .unwrap_or(&path) + .to_string() } pub(crate) trait IngotFileContext { @@ -168,20 +170,72 @@ impl Workspace { &mut self, db: &LanguageServerDatabase, config_path: &str, - ) -> Option<&LocalIngotContext> { - let key = &ingot_directory_key(config_path); + ) -> Option<&mut LocalIngotContext> { + let key = &ingot_directory_key(config_path.into()); if self.ingot_contexts.contains_key(key) { - return self.ingot_contexts.get(key); + return self.ingot_contexts.get_mut(key); } else { let ingot_context = LocalIngotContext::new(db, config_path)?; self.ingot_contexts // .insert(config_path.to_string(), ingot_context); // instead chop off the trailing fe.toml .insert(key, ingot_context); - return self.ingot_contexts.get(key); + return self.ingot_contexts.get_mut(key); } } + fn sync_local_ingots(&mut self, db: &mut LanguageServerDatabase, path: &str) -> () { + let paths = &glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + .unwrap() + .map(|p| p.unwrap().to_str().unwrap().to_string()) + .map(ingot_directory_key) + .collect::>(); + + for path in paths { + self.ingot_context_from_config_path(db, path); + } + + let existing_keys: Vec = self.ingot_contexts.keys().collect(); + + let keys_to_remove: Vec = existing_keys + .iter() + .filter(|key| !paths.contains(key)) + .map(|path| path.into()) + .collect(); + + for key in keys_to_remove { + self.ingot_contexts.remove(ingot_directory_key(key)); + } + } + + fn sync_ingot_files(&mut self, db: &mut LanguageServerDatabase, config_path: &str) -> () { + assert!(config_path.ends_with(FE_CONFIG_SUFFIX)); + + let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); + let paths = &glob::glob(&format!("{}/**/*.fe", ingot_root)) + .unwrap() + .map(|p| p.unwrap().to_str().unwrap().to_string()) + .collect::>(); + + let ingot_context = self + .ingot_context_from_config_path(db, config_path) + .unwrap(); + let ingot_context_files = &ingot_context.files.keys().collect::>(); + ingot_context_files.iter().for_each(|path| { + if !paths.contains(&path) { + ingot_context.files.remove(path); + } + }); + + paths.iter().for_each(|path| { + if !ingot_context_files.contains(&path) { + let file = ingot_context.input_from_file_path(db, path); + let contents = std::fs::read_to_string(path).unwrap(); + file.unwrap().set_text(db).to(contents); + } + }); + } + pub fn top_mod_from_file( &mut self, db: &mut LanguageServerDatabase, @@ -230,7 +284,7 @@ impl IngotFileContext for Workspace { #[cfg(test)] mod tests { - use crate::workspace::{IngotFileContext, Workspace, get_containing_ingot}; + use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace}; use super::StandaloneIngotContext; @@ -261,29 +315,33 @@ mod tests { let file = workspace.input_from_file_path(&mut db, file_path); assert!(file.is_some()); } - + #[test] fn test_get_containing_ingot() { let config_path = "tests/data/ingot1/fe.toml"; let mut workspace = Workspace::default(); - - let _ingot_context_ingot = { - let ingot_context = workspace - .ingot_context_from_config_path(&mut crate::db::LanguageServerDatabase::default(), config_path); - + + let _ingot_context_ingot = { + let ingot_context = workspace.ingot_context_from_config_path( + &mut crate::db::LanguageServerDatabase::default(), + config_path, + ); + assert!(ingot_context.is_some()); ingot_context.map(|ctx| ctx.ingot) }; - + assert!(workspace.ingot_contexts.len() == 1); - + let file_path = "tests/data/ingot1/src/main.fe"; - assert!(workspace.ingot_contexts.get_longest_common_prefix(file_path).is_some()); - + assert!(workspace + .ingot_contexts + .get_longest_common_prefix(file_path) + .is_some()); + let containing_ingot = get_containing_ingot(&mut workspace.ingot_contexts, file_path); - - assert!(containing_ingot.as_deref().is_some()); + assert!(containing_ingot.as_deref().is_some()); let ingot = workspace .ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); @@ -296,23 +354,20 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); - let ingot_context_ingot = { - let ingot_context = workspace - .ingot_context_from_config_path(&mut db, config_path); - + let ingot_context_ingot = { + let ingot_context = workspace.ingot_context_from_config_path(&mut db, config_path); + assert!(ingot_context.is_some()); ingot_context.map(|ctx| ctx.ingot) }; let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace - .input_from_file_path(&mut db, file_path); + let file = workspace.input_from_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = workspace - .ingot_from_file_path(&mut db, file_path); + let ingot = workspace.ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); - + assert_eq!( ingot_context_ingot.unwrap().kind(&mut db), common::input::IngotKind::Local @@ -323,4 +378,61 @@ mod tests { ); assert_eq!(ingot_context_ingot.unwrap(), ingot.unwrap()); } + + #[test] + fn test_sync_nested_ingots() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let path = format!("{}/test_files/nested_ingots", crate_dir); + assert!( + glob::glob(&format!("{}/**/{}", path, super::FE_CONFIG_SUFFIX)) + .unwrap() + .count() + == 2 + ); + + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &path); + + assert!(workspace.ingot_contexts.len() == 2); + } + + #[test] + fn test_sync_ingot_files() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let path = format!("{}/test_files/nested_ingots", crate_dir); + assert!( + glob::glob(&format!("{}/**/{}", path, super::FE_CONFIG_SUFFIX)) + .unwrap() + .count() + == 2 + ); + + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &path); + + assert!(workspace.ingot_contexts.len() == 2); + + let foo_config = format!("{}/ingots/foo/{}", path, super::FE_CONFIG_SUFFIX); + workspace.sync_ingot_files(&mut db, &foo_config); + + let foo_context = workspace + .ingot_context_from_config_path(&db, &foo_config) + .unwrap(); + + assert!(foo_context.files.len() == 1); + + let foo_files = foo_context.files.keys().collect::>(); + for file in foo_files { + let contents = std::fs::read_to_string(&file).unwrap(); + let file = foo_context + .input_from_file_path(&mut db, &file) + .unwrap(); + + assert!(*file.text(&mut db) == contents); + } + } } diff --git a/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe b/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe index e69de29bb2..5b5a7b8335 100644 --- a/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe +++ b/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe @@ -0,0 +1 @@ +let foo = 1; \ No newline at end of file From 7b5a4cdcd010faa2053971a31be8773ab0f2ba74 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Sep 2023 19:00:24 -0500 Subject: [PATCH 262/678] more LSP workspace sync functionality --- crates/language-server/src/workspace.rs | 94 +++++++++++++++++++++++-- 1 file changed, 88 insertions(+), 6 deletions(-) diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 4e75501ab7..3a0e08175f 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -185,14 +185,19 @@ impl Workspace { } fn sync_local_ingots(&mut self, db: &mut LanguageServerDatabase, path: &str) -> () { - let paths = &glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + let config_paths = &glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) + .collect::>(); + + let paths = &config_paths + .into_iter() + .map(|path| path.to_string()) .map(ingot_directory_key) .collect::>(); for path in paths { - self.ingot_context_from_config_path(db, path); + self.ingot_context_from_config_path(db, &path); } let existing_keys: Vec = self.ingot_contexts.keys().collect(); @@ -282,9 +287,41 @@ impl IngotFileContext for Workspace { } } +pub(crate) trait SyncableIngotFileContext { + fn sync(&mut self, db: &mut LanguageServerDatabase, path: String) -> (); +} + +impl SyncableIngotFileContext for Workspace { + fn sync(&mut self, db: &mut LanguageServerDatabase, path: String) -> () { + // first let's sync ingots + self.sync_local_ingots(db, &path); + // collect the ingot config paths + let ingot_paths = glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + .unwrap() + .map(|p| p.unwrap().to_str().unwrap().to_string()) + .collect::>(); + + for ingot_path in ingot_paths { + self.sync_ingot_files(db, &ingot_path); + } + + // now let's sync all files + let paths = glob::glob(&format!("{}/**/*.fe", path)) + .unwrap() + .map(|p| p.unwrap().to_str().unwrap().to_string()) + .collect::>(); + + for path in paths { + self.input_from_file_path(db, &path); + } + } +} + #[cfg(test)] mod tests { - use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace}; + use glob::glob; + + use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; use super::StandaloneIngotContext; @@ -428,11 +465,56 @@ mod tests { let foo_files = foo_context.files.keys().collect::>(); for file in foo_files { let contents = std::fs::read_to_string(&file).unwrap(); - let file = foo_context - .input_from_file_path(&mut db, &file) - .unwrap(); + let file = foo_context.input_from_file_path(&mut db, &file).unwrap(); assert!(*file.text(&mut db) == contents); } } + + #[test] + fn test_dangling_fe_source() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let messy_workspace_path = format!("{}/test_files/messy", crate_dir); + let dangling_path = format!("{}/test_files/messy/dangling.fe", crate_dir); + + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &messy_workspace_path); + let dangling_file = workspace + .input_from_file_path(&mut db, &dangling_path) + .unwrap(); + + assert_eq!( + dangling_file.ingot(&db).kind(&mut db), + common::input::IngotKind::StandAlone + ); + + // TODO: make it easier to go both ways between an ingot root path and its config path + let ingot_paths = workspace + .ingot_contexts + .values() + .map(|ctx| { + format!( + "{}{}", + ctx.ingot.path(&mut db).to_string(), + FE_CONFIG_SUFFIX + ) + }) + .collect::>(); + + for ingot_path in ingot_paths { + workspace.sync_ingot_files(&mut db, &ingot_path); + } + + let non_dangling_file_path = format!("{}/test_files/messy/foo/bar/src/main.fe", crate_dir); + let non_dangling_input = workspace + .input_from_file_path(&mut db, &non_dangling_file_path) + .unwrap(); + + assert_eq!( + non_dangling_input.ingot(&db).kind(&mut db), + common::input::IngotKind::Local + ); + } } From 19fde0014fa99ab42262d847c3c78fc076fe6678 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Sep 2023 09:05:04 -0500 Subject: [PATCH 263/678] LSP workspace root sync --- Cargo.lock | 5 +- crates/language-server/Cargo.toml | 1 + .../src/handlers/notifications.rs | 23 +++++---- crates/language-server/src/server.rs | 5 +- crates/language-server/src/state.rs | 48 ++++++++++++++----- crates/language-server/src/workspace.rs | 46 ++++++++++++++---- .../test_files/single_ingot/src/foo.fe | 8 ++++ .../test_files/single_ingot/src/lib.fe | 4 ++ 8 files changed, 108 insertions(+), 32 deletions(-) create mode 100644 crates/language-server/test_files/single_ingot/src/foo.fe diff --git a/Cargo.lock b/Cargo.lock index c799c519ee..4c1f38447b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1132,6 +1132,7 @@ dependencies = [ "salsa-2022", "serde", "serde_json", + "url", ] [[package]] @@ -3007,9 +3008,9 @@ checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "url" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" dependencies = [ "form_urlencoded", "idna", diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 0099c5ee8d..1d53425b3e 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -34,3 +34,4 @@ fe-compiler-test-utils = { path = "../test-utils" } log = "0.4" patricia_tree = "0.6.2" glob = "0.3.1" +url = "2.4.1" diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 1f1b92a567..1127d01b10 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,7 +1,7 @@ use anyhow::{Result, Error}; use serde::Deserialize; -use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDatabase}; +use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDatabase, workspace::SyncableIngotFileContext}; fn string_diagnostics(state: &mut ServerState, path: &str, src: &str) -> Vec { let db = &mut state.db; @@ -50,14 +50,21 @@ pub(crate) fn handle_document_did_change( send_diagnostics(state, diagnostics, params.text_document.uri.clone()) } -pub(crate) fn handle_workspace_did_change_folders( - state: &mut ServerState, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; +// pub(crate) fn handle_workspace_did_change_folders( +// state: &mut ServerState, +// note: lsp_server::Notification, +// ) -> Result<(), Error> { +// let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; + +// let mut workspace = &mut state.workspace; +// let mut db = &mut state.db; - Ok(()) -} +// let workspace_folder = params.event.added[0].uri.to_file_path().unwrap(); + +// workspace.sync(&mut db, workspace_folder.as_path().to_str().unwrap().into()); + +// Ok(()) +// } fn send_diagnostics( state: &mut ServerState, diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 333a0663aa..f948305924 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,7 +1,7 @@ use super::state::ServerState; use anyhow::Result; use lsp_server::{Connection, Notification}; -use lsp_types::{HoverProviderCapability, ServerCapabilities}; +use lsp_types::{HoverProviderCapability, ServerCapabilities, InitializeParams}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { @@ -12,6 +12,7 @@ fn server_capabilities() -> ServerCapabilities { )), // goto definition definition_provider: Some(lsp_types::OneOf::Left(true)), + // support for workspace add/remove changes ..Default::default() } } @@ -20,6 +21,7 @@ pub fn run_server() -> Result<()> { let (connection, io_threads) = Connection::stdio(); let (request_id, _initialize_params) = connection.initialize_start()?; + let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; // todo: actually use initialization params let capabilities = server_capabilities(); @@ -49,6 +51,7 @@ pub fn run_server() -> Result<()> { let mut state = ServerState::new(connection.sender); let _ = state.init_logger(log::Level::Info); + state.set_workspace_root(initialize_params.root_uri.unwrap().to_string())?; let result = state.run(connection.receiver)?; io_threads.join().unwrap(); diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 37f1a4aea5..759a0dd52e 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,16 +1,19 @@ use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; -use crate::workspace::Workspace; -use log::{ Record, Level, Metadata, info }; -use anyhow::Result; +use crate::workspace::{SyncableIngotFileContext, Workspace}; +use anyhow::{Context, Result}; use crossbeam_channel::{Receiver, Sender}; +use log::{info, Level, Metadata, Record}; use log::{LevelFilter, SetLoggerError}; use lsp_server::Message; use lsp_types::notification::Notification; use lsp_types::request::Request; -use crate::handlers::notifications::handle_document_did_change; +use crate::handlers::notifications::{ + handle_document_did_change, + // handle_workspace_did_change_folders, +}; use crate::handlers::request::handle_goto_definition; use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; @@ -23,14 +26,28 @@ pub struct ServerState { impl ServerState { pub fn new(sender: Sender) -> Self { let sender = Arc::new(Mutex::new(sender)); - ServerState { + let state = ServerState { sender, db: LanguageServerDatabase::default(), workspace: Workspace::default(), - } + }; + + state } - - fn send (&mut self, msg: Message) -> Result<()> { + + pub fn set_workspace_root(&mut self, root_uri: String) -> anyhow::Result<()> { + let path = url::Url::parse(&root_uri) + .map(|url| url.to_file_path()) + .context("Failed to parse root URL")?; + let path = path.unwrap_or(std::path::PathBuf::from(root_uri)); + + info!("Setting workspace root to {:?}", path); + self.workspace + .sync(&mut self.db, path.to_str().unwrap().into()); + Ok(()) + } + + fn send(&mut self, msg: Message) -> Result<()> { let sender = self.sender.lock().unwrap(); sender.send(msg)?; Ok(()) @@ -85,6 +102,9 @@ impl ServerState { lsp_types::notification::DidChangeTextDocument::METHOD => { handle_document_did_change(self, note)? } + // lsp_types::notification::DidChangeWorkspaceFolders::METHOD => { + // handle_workspace_did_change_folders(self, note)? + // } _ => {} } } @@ -97,8 +117,11 @@ impl ServerState { Ok(()) } - pub fn init_logger(&self, level:Level) -> Result<(), SetLoggerError> { - let logger = LspLogger { level, sender: self.sender.clone() }; + pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { + let logger = LspLogger { + level, + sender: self.sender.clone(), + }; let static_logger = Box::leak(Box::new(logger)); log::set_logger(static_logger)?; log::set_max_level(LevelFilter::Debug); @@ -106,14 +129,13 @@ impl ServerState { } } - pub(crate) struct LspLogger { level: Level, sender: Arc>>, } impl LspLogger { - fn send (&self, msg: Message) -> Result<()> { + fn send(&self, msg: Message) -> Result<()> { let sender = self.sender.lock().unwrap(); sender.send(msg)?; Ok(()) @@ -149,4 +171,4 @@ impl log::Log for LspLogger { } fn flush(&self) {} -} \ No newline at end of file +} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 3a0e08175f..d6f365601b 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -5,6 +5,7 @@ use common::{ InputFile, InputIngot, }; use hir::{hir_def::TopLevelMod, lower::map_file_to_mod}; +use log::info; use patricia_tree::StringPatriciaMap; use crate::db::LanguageServerDatabase; @@ -215,30 +216,54 @@ impl Workspace { fn sync_ingot_files(&mut self, db: &mut LanguageServerDatabase, config_path: &str) -> () { assert!(config_path.ends_with(FE_CONFIG_SUFFIX)); + info!("Syncing ingot at {}", config_path); let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); let paths = &glob::glob(&format!("{}/**/*.fe", ingot_root)) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); + + info!("Found {} files in ingot", paths.len()); + info!("Syncing ingot files: {:?}", paths); let ingot_context = self .ingot_context_from_config_path(db, config_path) .unwrap(); - let ingot_context_files = &ingot_context.files.keys().collect::>(); - ingot_context_files.iter().for_each(|path| { + let ingot_context_file_keys = &ingot_context.files.keys().collect::>(); + ingot_context_file_keys.iter().for_each(|path| { if !paths.contains(&path) { ingot_context.files.remove(path); } }); paths.iter().for_each(|path| { - if !ingot_context_files.contains(&path) { + if !ingot_context_file_keys.contains(&path) { let file = ingot_context.input_from_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); } }); + + let ingot_context_files = ingot_context + .files + .values() + .map(|x| *x) + .collect::>(); + ingot_context.ingot.set_files(db, ingot_context_files); + + // find the root file, which is either at `./src/main.fe` or `./src/lib.fe` + let root_file = ingot_context + .files + .values() + .find(|file| { + file.path(db).ends_with("src/main.fe") || file.path(db).ends_with("src/lib.fe") + }) + .map(|file| *file); + + if let Some(root_file) = root_file { + ingot_context.ingot.set_root_file(db, root_file); + } } pub fn top_mod_from_file( @@ -251,10 +276,10 @@ impl Workspace { .input_from_file_path(db, file_path.to_str().unwrap()) .unwrap(); file.set_text(db).to(source.to_string()); - let ingot = file.ingot(db); - let mut files = ingot.files(db).clone(); - files.insert(file); - ingot.set_files(db, files); + // let ingot = file.ingot(db); + // let mut files = ingot.files(db).clone(); + // files.insert(file); + // ingot.set_files(db, files); map_file_to_mod(db, file) } } @@ -293,6 +318,7 @@ pub(crate) trait SyncableIngotFileContext { impl SyncableIngotFileContext for Workspace { fn sync(&mut self, db: &mut LanguageServerDatabase, path: String) -> () { + info!("Syncing workspace at {}", path); // first let's sync ingots self.sync_local_ingots(db, &path); // collect the ingot config paths @@ -301,10 +327,12 @@ impl SyncableIngotFileContext for Workspace { .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); + info!("Found {} ingots", ingot_paths.len()); + for ingot_path in ingot_paths { self.sync_ingot_files(db, &ingot_path); } - + // now let's sync all files let paths = glob::glob(&format!("{}/**/*.fe", path)) .unwrap() @@ -404,6 +432,8 @@ mod tests { let ingot = workspace.ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); + + assert_eq!(file.map(|f| f.ingot(&mut db)).unwrap(), ingot.unwrap()); assert_eq!( ingot_context_ingot.unwrap().kind(&mut db), diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe new file mode 100644 index 0000000000..ae8b70b24e --- /dev/null +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -0,0 +1,8 @@ +pub fn foo() { + let x = 5; + x +} + +pub struct Foo { + pub x: i32, +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe index e69de29bb2..bd8d541810 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.fe +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -0,0 +1,4 @@ +mod foo; +use foo; + +let y: foo::Foo; \ No newline at end of file From 2d07af60a22ba8fc2cb58d4f380fbb8b39d37a01 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Sep 2023 09:54:04 -0500 Subject: [PATCH 264/678] LSP debug hover ingot info --- .../language-server/src/handlers/request.rs | 50 +++++++++++++++++-- crates/language-server/src/state.rs | 5 ++ 2 files changed, 51 insertions(+), 4 deletions(-) diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index a4a9c95d86..f54a6c0755 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,13 +1,16 @@ use std::io::BufRead; +use common::{input::IngotKind, InputIngot}; use hir_analysis::name_resolution::EarlyResolvedPath; +use log::info; use lsp_server::Response; use serde::Deserialize; use crate::{ goto::{goto_enclosing_path, Cursor}, state::ServerState, - util::{to_offset_from_position, to_lsp_location_from_scope}, + util::{to_lsp_location_from_scope, to_offset_from_position}, + workspace::IngotFileContext, }; pub(crate) fn handle_hover( @@ -37,7 +40,43 @@ pub(crate) fn handle_hover( file_text.as_str(), ); let file_path = std::path::Path::new(file_path); - let top_mod = state.workspace.top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + info!("getting hover info for file_path: {:?}", file_path); + let ingot = state + .workspace + .input_from_file_path(&mut state.db, file_path.to_str().unwrap()) + .map(|input| input.ingot(&state.db)); + + // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); + + let ingot_info: Option = { + let ingot_type = match ingot { + Some(ingot) => match ingot.kind(&mut state.db) { + IngotKind::StandAlone => None, + IngotKind::Local => Some("Local ingot"), + IngotKind::External => Some("External ingot"), + IngotKind::Std => Some("Standard library"), + }, + None => Some("No ingot information available"), + }; + let ingot_file_count = ingot.unwrap().files(&mut state.db).len(); + let ingot_path = ingot + .unwrap() + .path(&mut state.db) + .strip_prefix(&state.root_path.clone().unwrap_or("".into())) + .unwrap(); + + match ingot_type { + Some(ingot_type) => Some(format!( + "{} with {} files at path: {:?}", + ingot_type, ingot_file_count, ingot_path + )), + None => None, + } + }; + + let top_mod = state + .workspace + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match goto_info { @@ -58,10 +97,11 @@ pub(crate) fn handle_hover( lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value: format!( - "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}", + "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}\n\n### Ingot info: \n\n{:?}", &line, serde_json::to_string_pretty(¶ms).unwrap(), goto_info, + ingot_info, ), }, )), @@ -91,7 +131,9 @@ pub(crate) fn handle_goto_definition( // Get the module and the goto info let file_path = std::path::Path::new(params.text_document.uri.path()); - let top_mod = state.workspace.top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + let top_mod = state + .workspace + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 759a0dd52e..1d214a0baa 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,3 +1,4 @@ +use std::path::PathBuf; use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; @@ -21,6 +22,7 @@ pub struct ServerState { pub(crate) sender: Arc>>, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Workspace, + pub(crate) root_path: Option, } impl ServerState { @@ -30,6 +32,7 @@ impl ServerState { sender, db: LanguageServerDatabase::default(), workspace: Workspace::default(), + root_path: None }; state @@ -44,6 +47,8 @@ impl ServerState { info!("Setting workspace root to {:?}", path); self.workspace .sync(&mut self.db, path.to_str().unwrap().into()); + + self.root_path = Some(path); Ok(()) } From e80628e499cf3f7ceb24bf750ab0dff0c0e33dc9 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Sep 2023 10:36:33 -0500 Subject: [PATCH 265/678] lsp workspace sync --- .../language-server/src/handlers/request.rs | 4 +- crates/language-server/src/server.rs | 11 ++-- crates/language-server/src/state.rs | 16 ------ crates/language-server/src/workspace.rs | 54 +++++++++++++------ 4 files changed, 48 insertions(+), 37 deletions(-) diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index f54a6c0755..2654e1140d 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -62,8 +62,8 @@ pub(crate) fn handle_hover( let ingot_path = ingot .unwrap() .path(&mut state.db) - .strip_prefix(&state.root_path.clone().unwrap_or("".into())) - .unwrap(); + .strip_prefix(&state.workspace.root_path.clone().unwrap_or("".into())) + .ok(); match ingot_type { Some(ingot_type) => Some(format!( diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index f948305924..969d8c3633 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,7 +1,7 @@ use super::state::ServerState; use anyhow::Result; use lsp_server::{Connection, Notification}; -use lsp_types::{HoverProviderCapability, ServerCapabilities, InitializeParams}; +use lsp_types::{HoverProviderCapability, InitializeParams, ServerCapabilities}; fn server_capabilities() -> ServerCapabilities { ServerCapabilities { @@ -51,10 +51,13 @@ pub fn run_server() -> Result<()> { let mut state = ServerState::new(connection.sender); let _ = state.init_logger(log::Level::Info); - state.set_workspace_root(initialize_params.root_uri.unwrap().to_string())?; + state.workspace.set_workspace_root( + &mut state.db, + initialize_params.root_uri.unwrap().to_file_path().ok(), + ); let result = state.run(connection.receiver)?; - + io_threads.join().unwrap(); - + Ok(result) } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index 1d214a0baa..c895459251 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -22,7 +22,6 @@ pub struct ServerState { pub(crate) sender: Arc>>, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Workspace, - pub(crate) root_path: Option, } impl ServerState { @@ -32,26 +31,11 @@ impl ServerState { sender, db: LanguageServerDatabase::default(), workspace: Workspace::default(), - root_path: None }; state } - pub fn set_workspace_root(&mut self, root_uri: String) -> anyhow::Result<()> { - let path = url::Url::parse(&root_uri) - .map(|url| url.to_file_path()) - .context("Failed to parse root URL")?; - let path = path.unwrap_or(std::path::PathBuf::from(root_uri)); - - info!("Setting workspace root to {:?}", path); - self.workspace - .sync(&mut self.db, path.to_str().unwrap().into()); - - self.root_path = Some(path); - Ok(()) - } - fn send(&mut self, msg: Message) -> Result<()> { let sender = self.sender.lock().unwrap(); sender.send(msg)?; diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index d6f365601b..e8629d0750 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -1,5 +1,9 @@ -use std::{collections::BTreeSet, path::Path}; +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, +}; +use anyhow::{anyhow, Result}; use common::{ input::{IngotKind, Version}, InputFile, InputIngot, @@ -157,6 +161,7 @@ impl IngotFileContext for StandaloneIngotContext { pub(crate) struct Workspace { pub(crate) ingot_contexts: StringPatriciaMap, pub(crate) standalone_ingot_context: StandaloneIngotContext, + pub(crate) root_path: Option, } impl Workspace { @@ -164,9 +169,19 @@ impl Workspace { Self { ingot_contexts: StringPatriciaMap::new(), standalone_ingot_context: StandaloneIngotContext::new(), + root_path: None, } } + pub fn set_workspace_root( + &mut self, + db: &mut LanguageServerDatabase, + root_path: Option, + ) { + let path = root_path.clone(); + self.sync(db); + } + pub fn ingot_context_from_config_path( &mut self, db: &LanguageServerDatabase, @@ -223,7 +238,7 @@ impl Workspace { .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); - + info!("Found {} files in ingot", paths.len()); info!("Syncing ingot files: {:?}", paths); @@ -251,7 +266,7 @@ impl Workspace { .map(|x| *x) .collect::>(); ingot_context.ingot.set_files(db, ingot_context_files); - + // find the root file, which is either at `./src/main.fe` or `./src/lib.fe` let root_file = ingot_context .files @@ -260,7 +275,7 @@ impl Workspace { file.path(db).ends_with("src/main.fe") || file.path(db).ends_with("src/lib.fe") }) .map(|file| *file); - + if let Some(root_file) = root_file { ingot_context.ingot.set_root_file(db, root_file); } @@ -313,35 +328,44 @@ impl IngotFileContext for Workspace { } pub(crate) trait SyncableIngotFileContext { - fn sync(&mut self, db: &mut LanguageServerDatabase, path: String) -> (); + fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()>; } impl SyncableIngotFileContext for Workspace { - fn sync(&mut self, db: &mut LanguageServerDatabase, path: String) -> () { - info!("Syncing workspace at {}", path); - // first let's sync ingots - self.sync_local_ingots(db, &path); - // collect the ingot config paths + fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()> { + let path = { + let path = &self.root_path; + path.clone().unwrap() + }; + + let path = path.to_str().unwrap(); + + info!("Syncing workspace at {:?}", path); + self.sync_local_ingots(db, path); + let ingot_paths = glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + .ok() .unwrap() - .map(|p| p.unwrap().to_str().unwrap().to_string()) + .filter_map(Result::ok) + .filter_map(|p| p.to_str().map(|s| s.to_string())) .collect::>(); - + info!("Found {} ingots", ingot_paths.len()); for ingot_path in ingot_paths { self.sync_ingot_files(db, &ingot_path); } - // now let's sync all files let paths = glob::glob(&format!("{}/**/*.fe", path)) + .ok() .unwrap() - .map(|p| p.unwrap().to_str().unwrap().to_string()) + .filter_map(|p| p.ok().unwrap().to_str().map(|s| s.to_string())) .collect::>(); for path in paths { self.input_from_file_path(db, &path); } + Ok(()) } } @@ -432,7 +456,7 @@ mod tests { let ingot = workspace.ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); - + assert_eq!(file.map(|f| f.ingot(&mut db)).unwrap(), ingot.unwrap()); assert_eq!( From 1228edc58da900ae119ed6972e375dc108b797c4 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Sep 2023 16:52:46 -0500 Subject: [PATCH 266/678] LSP test for goto on ingot w/multiple module files --- crates/language-server/src/goto.rs | 62 ++++++++++++++++++- .../language-server/src/handlers/request.rs | 4 +- crates/language-server/src/server.rs | 2 +- crates/language-server/src/workspace.rs | 32 ++++++++-- .../test_files/single_ingot/src/lib.fe | 20 +++++- .../test_files/single_ingot/src/lib.snap | 35 +++++++++++ 6 files changed, 143 insertions(+), 12 deletions(-) create mode 100644 crates/language-server/test_files/single_ingot/src/lib.snap diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index a6d2eba966..5c8bd17ed6 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -86,9 +86,10 @@ pub fn goto_enclosing_path(db: &mut LanguageServerDatabase, top_mod: TopLevelMod #[cfg(test)] mod tests { - use crate::workspace::Workspace; + use crate::workspace::{Workspace, IngotFileContext}; use super::*; + use common::input::IngotKind; use fe_compiler_test_utils::snap_test; use dir_test::{dir_test, Fixture}; use std::path::Path; @@ -110,7 +111,64 @@ mod tests { cursors } - + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/single_ingot", + glob: "**/lib.fe", + )] + fn test_goto_multiple_files(fixture: Fixture<&str>) { + let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); + + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + + let _ = workspace.set_workspace_root(db, &Some(ingot_base_dir.clone())); + + let fe_source_path = ingot_base_dir.join(fixture.path()); + let input = workspace.input_from_file_path(db, fixture.path()); + assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); + + let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); + + let ingot = workspace.ingot_from_file_path(db, fixture.path()); + assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); + + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + let mut cursor_path_map: FxHashMap = FxHashMap::default(); + + cursors.iter().for_each(|cursor|{ + let early_resolution = goto_enclosing_path(db, top_mod, *cursor); + + let goto_info = match early_resolution { + Some(EarlyResolvedPath::Full(bucket)) => + if bucket.len() > 0 { + bucket + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n") + } else { + String::from("`NameResBucket` is empty") + }, + Some(EarlyResolvedPath::Partial { + res, + unresolved_from: _, + }) => res.pretty_path(db).unwrap(), + None => String::from("No resolution available"), + }; + + cursor_path_map.insert(*cursor, goto_info); + }); + + let result = format!( + "{}\n---\n{}", + fixture.content(), + cursor_path_map.iter().map(|(cursor, path)| { + format!("cursor position: {:?}, path: {:?}", cursor, path) + }).collect::>().join("\n") + ); + snap_test!(result, fixture.path()); + } #[dir_test( dir: "$CARGO_MANIFEST_DIR/test_files", diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 2654e1140d..60b8df0292 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -77,9 +77,9 @@ pub(crate) fn handle_hover( let top_mod = state .workspace .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); - let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); + let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); - let goto_info = match goto_info { + let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => bucket .iter() .map(|x| x.pretty_path(&state.db).unwrap()) diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 969d8c3633..aa347c36fc 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -53,7 +53,7 @@ pub fn run_server() -> Result<()> { let _ = state.init_logger(log::Level::Info); state.workspace.set_workspace_root( &mut state.db, - initialize_params.root_uri.unwrap().to_file_path().ok(), + &initialize_params.root_uri.unwrap().to_file_path().ok(), ); let result = state.run(connection.receiver)?; diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index e8629d0750..1309cc97d7 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -3,7 +3,7 @@ use std::{ path::{Path, PathBuf}, }; -use anyhow::{anyhow, Result}; +use anyhow::Result; use common::{ input::{IngotKind, Version}, InputFile, InputIngot, @@ -176,10 +176,11 @@ impl Workspace { pub fn set_workspace_root( &mut self, db: &mut LanguageServerDatabase, - root_path: Option, - ) { + root_path: &Option, + ) -> Result<()> { let path = root_path.clone(); - self.sync(db); + self.root_path = path; + self.sync(db) } pub fn ingot_context_from_config_path( @@ -245,6 +246,7 @@ impl Workspace { let ingot_context = self .ingot_context_from_config_path(db, config_path) .unwrap(); + let ingot_context_file_keys = &ingot_context.files.keys().collect::>(); ingot_context_file_keys.iter().for_each(|path| { if !paths.contains(&path) { @@ -265,6 +267,7 @@ impl Workspace { .values() .map(|x| *x) .collect::>(); + ingot_context.ingot.set_files(db, ingot_context_files); // find the root file, which is either at `./src/main.fe` or `./src/lib.fe` @@ -277,6 +280,7 @@ impl Workspace { .map(|file| *file); if let Some(root_file) = root_file { + info!("Setting root file for ingot: {:?}", root_file.path(db)); ingot_context.ingot.set_root_file(db, root_file); } } @@ -469,6 +473,26 @@ mod tests { ); assert_eq!(ingot_context_ingot.unwrap(), ingot.unwrap()); } + + #[test] + fn test_sync_single_ingot() { + let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let ingot_base_dir = std::path::Path::new(&cargo_manifest_dir).join("test_files/single_ingot/"); + let ingot_config_path = &ingot_base_dir.join("fe.toml"); + + let mut workspace = Workspace::default(); + let mut db = crate::db::LanguageServerDatabase::default(); + + let _ = workspace.set_workspace_root(&mut db, &Some(ingot_base_dir.clone())); + // panic!("wtf? {:?}", ingot_base_dir); + + assert_eq!(workspace.ingot_contexts.len(), 1); + + let fe_source_path = ingot_base_dir.join("src/main.fe"); + let input = workspace.input_from_file_path(&mut db, fe_source_path.to_str().unwrap()); + assert!(input.is_some()); + assert!(input.unwrap().ingot(&mut db).kind(&mut db) == common::input::IngotKind::Local); + } #[test] fn test_sync_nested_ingots() { diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe index bd8d541810..8c40266d0c 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.fe +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -1,4 +1,18 @@ -mod foo; -use foo; +use foo::Foo; -let y: foo::Foo; \ No newline at end of file +mod baz { + use super::Foo; + + struct Bar { + x: Foo + } + + fn bar() -> () { + let x: Foo; + } +} + +fn bar() -> () { + let y: Foo; + let z: baz::Bar; +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap new file mode 100644 index 0000000000..1fdb815e01 --- /dev/null +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -0,0 +1,35 @@ +--- +source: crates/language-server/src/goto.rs +assertion_line: 170 +expression: result +input_file: crates/language-server/test_files/single_ingot/src/lib.fe +--- +use foo::Foo; + +mod baz { + use super::Foo; + + struct Bar { + x: Foo + } + + fn bar() -> () { + let x: Foo; + } +} + +fn bar() -> () { + let y: Foo; + let z: baz::Bar; +} +--- +cursor position: 125, path: "lib::foo::Foo" +cursor position: 122, path: "`NameResBucket` is empty" +cursor position: 167, path: "lib::foo::Foo" +cursor position: 180, path: "`NameResBucket` is empty" +cursor position: 164, path: "`NameResBucket` is empty" +cursor position: 183, path: "lib::baz::Bar" +cursor position: 61, path: "No resolution available" +cursor position: 78, path: "lib::foo::Foo" +cursor position: 21, path: "No resolution available" +cursor position: 30, path: "No resolution available" From d232a39953ced53c08fc7e2533f8637327c5afb6 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 11:29:42 -0500 Subject: [PATCH 267/678] language server goto definition fix --- .../language-server/src/handlers/request.rs | 36 +++++++++++++++---- crates/language-server/src/server.rs | 2 +- crates/language-server/src/util.rs | 12 ++++--- .../test_files/single_ingot/src/foo.fe | 4 +-- .../test_files/single_ingot/src/lib.fe | 12 +++---- .../test_files/single_ingot/src/lib.snap | 28 +++++++-------- 6 files changed, 59 insertions(+), 35 deletions(-) diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 60b8df0292..5b1eee29ba 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -3,7 +3,7 @@ use std::io::BufRead; use common::{input::IngotKind, InputIngot}; use hir_analysis::name_resolution::EarlyResolvedPath; use log::info; -use lsp_server::Response; +use lsp_server::{Response, ResponseError}; use serde::Deserialize; use crate::{ @@ -123,6 +123,7 @@ pub(crate) fn handle_goto_definition( state: &mut ServerState, req: lsp_server::Request, ) -> Result<(), anyhow::Error> { + info!("handling goto definition request: {:?}", req); let params = TextDocumentPositionParams::deserialize(req.params)?; // Convert the position to an offset in the file @@ -149,22 +150,43 @@ pub(crate) fn handle_goto_definition( } None => return Ok(()), }; + + // info!("scopes: {:?}", scopes); let locations = scopes - .into_iter() - .filter_map(|scope| scope) + .iter() + .filter_map(|scope| scope.clone()) .map(|scope| to_lsp_location_from_scope(scope, &state.db)) - .filter_map(|location| location.ok()) .collect::>(); + + let errors = scopes + .iter() + .filter_map(|scope| scope.clone()) + .map(|scope| to_lsp_location_from_scope(scope, &state.db)) + .filter_map(|scope| scope.err()) + .map(|err| err.to_string()) + .collect::>().join("\n"); + + let error = if errors.len() > 0 { + Some(ResponseError { + code: lsp_types::error_codes::SERVER_CANCELLED as i32, + message: errors, + data: None, + }) + } else { + None + }; // Send the response let response_message = Response { id: req.id, result: Some(serde_json::to_value( - lsp_types::GotoDefinitionResponse::Array(locations), + lsp_types::GotoDefinitionResponse::Array(locations.into_iter().filter_map(|x| x.ok()).collect()), )?), - error: None, - }; + error +}; + + info!("goto definition response: {:?}", response_message); state.send_response(response_message)?; Ok(()) diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index aa347c36fc..a8b768e05a 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -54,7 +54,7 @@ pub fn run_server() -> Result<()> { state.workspace.set_workspace_root( &mut state.db, &initialize_params.root_uri.unwrap().to_file_path().ok(), - ); + )?; let result = state.run(connection.receiver)?; io_threads.join().unwrap(); diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index 5ceb5ddf82..dcaeb15c1e 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,6 +1,6 @@ use common::{diagnostics::{Severity, CompleteDiagnostic, Span}, InputDb}; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; -use log::error; +use log::{error, info}; use lsp_types::Position; @@ -25,14 +25,16 @@ pub(crate) fn to_offset_from_position(position: Position, text: &str) -> rowan:: pub(crate) fn to_lsp_range_from_span(span: Span, db: &dyn InputDb) -> Result> { let text = span.file.text(db); let line_offsets = calculate_line_offsets(text); + let start = span.range.start(); + let end = span.range.end(); let start_line = line_offsets - .binary_search(&span.range.start().into()) - .map_err(|_| "Failed to find start line")?; + .binary_search(&start.into()) + .unwrap_or_else(|x| x - 1); let end_line = line_offsets - .binary_search(&span.range.end().into()) - .map_err(|_| "Failed to find end line")?; + .binary_search(&end.into()) + .unwrap_or_else(|x| x - 1); let start_character: usize = usize::from(span.range.start()) - line_offsets[start_line]; let end_character: usize = usize::from(span.range.end()) - line_offsets[end_line]; diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index ae8b70b24e..c2251ee70c 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -1,8 +1,8 @@ pub fn foo() { - let x = 5; + let x = 5 x } pub struct Foo { - pub x: i32, + pub x: i32 } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe index 8c40266d0c..cc513131dc 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.fe +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -1,18 +1,18 @@ -use foo::Foo; +use foo::Foo mod baz { - use super::Foo; + use super::Foo - struct Bar { + pub struct Bar { x: Foo } fn bar() -> () { - let x: Foo; + let x: Foo } } fn bar() -> () { - let y: Foo; - let z: baz::Bar; + let y: Foo + let z: baz::Bar } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap index 1fdb815e01..07d1a8119c 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.snap +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -4,32 +4,32 @@ assertion_line: 170 expression: result input_file: crates/language-server/test_files/single_ingot/src/lib.fe --- -use foo::Foo; +use foo::Foo mod baz { - use super::Foo; + use super::Foo - struct Bar { + pub struct Bar { x: Foo } fn bar() -> () { - let x: Foo; + let x: Foo } } fn bar() -> () { - let y: Foo; - let z: baz::Bar; + let y: Foo + let z: baz::Bar } --- -cursor position: 125, path: "lib::foo::Foo" -cursor position: 122, path: "`NameResBucket` is empty" -cursor position: 167, path: "lib::foo::Foo" -cursor position: 180, path: "`NameResBucket` is empty" -cursor position: 164, path: "`NameResBucket` is empty" +cursor position: 80, path: "lib::foo::Foo" +cursor position: 29, path: "No resolution available" cursor position: 183, path: "lib::baz::Bar" -cursor position: 61, path: "No resolution available" -cursor position: 78, path: "lib::foo::Foo" +cursor position: 180, path: "`NameResBucket` is empty" +cursor position: 168, path: "lib::foo::Foo" cursor position: 21, path: "No resolution available" -cursor position: 30, path: "No resolution available" +cursor position: 165, path: "`NameResBucket` is empty" +cursor position: 127, path: "lib::foo::Foo" +cursor position: 124, path: "`NameResBucket` is empty" +cursor position: 60, path: "No resolution available" From b6d3ec5930beb30f91e71cc81115983148d8c75f Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 11:38:12 -0500 Subject: [PATCH 268/678] formatting; remove unused imports --- crates/language-server/src/db.rs | 45 ++++-- crates/language-server/src/diagnostics.rs | 1 - crates/language-server/src/goto.rs | 134 +++++++++++------- .../src/handlers/notifications.rs | 27 ++-- .../language-server/src/handlers/request.rs | 21 +-- crates/language-server/src/main.rs | 6 +- crates/language-server/src/state.rs | 10 +- crates/language-server/src/util.rs | 60 +++++--- crates/language-server/src/workspace.rs | 16 +-- 9 files changed, 190 insertions(+), 130 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 8c9f122fb9..d02796dc7a 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -1,10 +1,10 @@ -use common::{ - diagnostics::CompleteDiagnostic, - InputDb, -}; +use common::{diagnostics::CompleteDiagnostic, InputDb}; use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::{TopLevelMod, ItemKind}, - HirDb, LowerHirDb, ParsingPass, SpannedHirDb, span::{DynLazySpan, LazySpan}, + analysis_pass::AnalysisPassManager, + diagnostics::DiagnosticVoucher, + hir_def::{ItemKind, TopLevelMod}, + span::{DynLazySpan, LazySpan}, + HirDb, LowerHirDb, ParsingPass, SpannedHirDb, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, @@ -18,14 +18,22 @@ pub struct Jar(crate::diagnostics::file_line_starts); pub trait LanguageServerDb: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ } +{ +} impl LanguageServerDb for DB where DB: Sized + salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb -{ } - +{ +} -#[salsa::db(common::Jar, hir::Jar, hir::LowerJar, hir::SpannedJar, hir_analysis::Jar, Jar)] +#[salsa::db( + common::Jar, + hir::Jar, + hir::LowerJar, + hir::SpannedJar, + hir_analysis::Jar, + Jar +)] pub struct LanguageServerDatabase { storage: salsa::Storage, diags: Vec>, @@ -47,15 +55,23 @@ impl LanguageServerDatabase { }; } - pub fn find_enclosing_item(&mut self, top_mod: TopLevelMod, cursor: Cursor) -> Option { - let items = top_mod.scope_graph(self.as_hir_db()).items_dfs(self.as_hir_db()); + pub fn find_enclosing_item( + &mut self, + top_mod: TopLevelMod, + cursor: Cursor, + ) -> Option { + let items = top_mod + .scope_graph(self.as_hir_db()) + .items_dfs(self.as_hir_db()); let mut smallest_enclosing_item = None; let mut smallest_range_size = None; for item in items { let lazy_item_span = DynLazySpan::from(item.lazy_span()); - let item_span = lazy_item_span.resolve(SpannedHirDb::as_spanned_hir_db(self)).unwrap(); + let item_span = lazy_item_span + .resolve(SpannedHirDb::as_spanned_hir_db(self)) + .unwrap(); if item_span.range.contains(cursor) { let range_size = item_span.range.end() - item_span.range.start(); @@ -68,7 +84,7 @@ impl LanguageServerDatabase { return smallest_enclosing_item; } - + pub fn finalize_diags(&self) -> Vec { let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { @@ -77,7 +93,6 @@ impl LanguageServerDatabase { }); diags } - } impl salsa::Database for LanguageServerDatabase { diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 93ee94c983..4d1d577218 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -12,7 +12,6 @@ use hir::diagnostics::DiagnosticVoucher; use crate::db::{LanguageServerDatabase, LanguageServerDb}; - pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; } diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 5c8bd17ed6..7af2452e4e 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -1,6 +1,6 @@ use fxhash::FxHashMap; use hir::{ - hir_def::{scope_graph::ScopeId, PathId, TopLevelMod, ItemKind}, + hir_def::{scope_graph::ScopeId, ItemKind, PathId, TopLevelMod}, visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, HirDb, }; @@ -33,9 +33,7 @@ impl<'db> Visitor for PathSpanCollector<'db> { fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { let Some(span) = ctxt .span() - .map(|lazy_span| lazy_span.resolve( - self.db.as_spanned_hir_db() - )) + .map(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) .flatten() else { return; @@ -46,7 +44,7 @@ impl<'db> Visitor for PathSpanCollector<'db> { } } -fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option{ +fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option { let mut smallest_enclosing_path = None; let mut smallest_range_size = None; @@ -63,7 +61,11 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option Option { +pub fn goto_enclosing_path( + db: &mut LanguageServerDatabase, + top_mod: TopLevelMod, + cursor: Cursor, +) -> Option { // Find the innermost item enclosing the cursor. let item: ItemKind = db.find_enclosing_item(top_mod, cursor)?; @@ -86,15 +88,18 @@ pub fn goto_enclosing_path(db: &mut LanguageServerDatabase, top_mod: TopLevelMod #[cfg(test)] mod tests { - use crate::workspace::{Workspace, IngotFileContext}; + use crate::workspace::{IngotFileContext, Workspace}; use super::*; use common::input::IngotKind; - use fe_compiler_test_utils::snap_test; use dir_test::{dir_test, Fixture}; + use fe_compiler_test_utils::snap_test; use std::path::Path; - fn extract_multiple_cursor_positions_from_spans(db: &mut LanguageServerDatabase, top_mod: TopLevelMod) -> Vec { + fn extract_multiple_cursor_positions_from_spans( + db: &mut LanguageServerDatabase, + top_mod: TopLevelMod, + ) -> Vec { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); let mut path_collector = PathSpanCollector::new(&db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); @@ -121,55 +126,60 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); - + let _ = workspace.set_workspace_root(db, &Some(ingot_base_dir.clone())); - + let fe_source_path = ingot_base_dir.join(fixture.path()); let input = workspace.input_from_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); - - let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); - + + let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); + let ingot = workspace.ingot_from_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); - cursors.iter().for_each(|cursor|{ + cursors.iter().for_each(|cursor| { let early_resolution = goto_enclosing_path(db, top_mod, *cursor); let goto_info = match early_resolution { - Some(EarlyResolvedPath::Full(bucket)) => - if bucket.len() > 0 { + Some(EarlyResolvedPath::Full(bucket)) => { + if bucket.len() > 0 { bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n") - } else { - String::from("`NameResBucket` is empty") - }, + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n") + } else { + String::from("`NameResBucket` is empty") + } + } Some(EarlyResolvedPath::Partial { res, unresolved_from: _, }) => res.pretty_path(db).unwrap(), None => String::from("No resolution available"), }; - + cursor_path_map.insert(*cursor, goto_info); }); - + let result = format!( "{}\n---\n{}", fixture.content(), - cursor_path_map.iter().map(|(cursor, path)| { - format!("cursor position: {:?}, path: {:?}", cursor, path) - }).collect::>().join("\n") + cursor_path_map + .iter() + .map(|(cursor, path)| { + format!("cursor position: {:?}, path: {:?}", cursor, path) + }) + .collect::>() + .join("\n") ); snap_test!(result, fixture.path()); } - + #[dir_test( dir: "$CARGO_MANIFEST_DIR/test_files", glob: "goto*.fe" @@ -181,7 +191,7 @@ mod tests { let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()); let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); - + let mut cursor_path_map: FxHashMap = FxHashMap::default(); cursors.iter().for_each(|cursor| { @@ -190,25 +200,33 @@ mod tests { match resolved_path { Some(path) => match path { EarlyResolvedPath::Full(bucket) => { - let path = bucket.iter().map(|x| x.pretty_path(db).unwrap()).collect::>() - .join("\n"); + let path = bucket + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n"); cursor_path_map.insert(*cursor, path); - }, - EarlyResolvedPath::Partial { res, unresolved_from: _ } => { + } + EarlyResolvedPath::Partial { + res, + unresolved_from: _, + } => { let path = res.pretty_path(db).unwrap(); cursor_path_map.insert(*cursor, path); - }, + } }, - None => {}, + None => {} }; }); let result = format!( "{}\n---\n{}", fixture.content(), - cursor_path_map.iter().map(|(cursor, path)| { - format!("cursor position: {:?}, path: {}", cursor, path) - }).collect::>().join("\n") + cursor_path_map + .iter() + .map(|(cursor, path)| { format!("cursor position: {:?}, path: {}", cursor, path) }) + .collect::>() + .join("\n") ); snap_test!(result, fixture.path()); } @@ -224,7 +242,7 @@ mod tests { let top_mod = workspace.top_mod_from_file(db, path, fixture.content()); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - + let mut cursor_path_map: FxHashMap = FxHashMap::default(); cursors.iter().for_each(|cursor| { @@ -234,17 +252,23 @@ mod tests { let path_map = path_collector.path_map; let enclosing_path = smallest_enclosing_path(*cursor, &path_map); - - let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early(db, enclosing_path.unwrap().0, enclosing_path.unwrap().1); - + + let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early( + db, + enclosing_path.unwrap().0, + enclosing_path.unwrap().1, + ); + let res = match resolved_enclosing_path { - EarlyResolvedPath::Full(bucket) => { - bucket.iter().map(|x| x.pretty_path(db).unwrap()).collect::>() - .join("\n") - }, - EarlyResolvedPath::Partial { res, unresolved_from: _ } => { - res.pretty_path(db).unwrap() - }, + EarlyResolvedPath::Full(bucket) => bucket + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n"), + EarlyResolvedPath::Partial { + res, + unresolved_from: _, + } => res.pretty_path(db).unwrap(), }; cursor_path_map.insert(*cursor, res); }); @@ -252,9 +276,11 @@ mod tests { let result = format!( "{}\n---\n{}", fixture.content(), - cursor_path_map.iter().map(|(cursor, path)| { - format!("cursor position: {:?}, path: {}", cursor, path) - }).collect::>().join("\n") + cursor_path_map + .iter() + .map(|(cursor, path)| { format!("cursor position: {:?}, path: {}", cursor, path) }) + .collect::>() + .join("\n") ); snap_test!(result, fixture.path()); } diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 1127d01b10..2bb8d584f0 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,9 +1,13 @@ -use anyhow::{Result, Error}; +use anyhow::{Error, Result}; use serde::Deserialize; -use crate::{state::ServerState, util::diag_to_lsp, db::LanguageServerDatabase, workspace::SyncableIngotFileContext}; +use crate::{state::ServerState, util::diag_to_lsp}; -fn string_diagnostics(state: &mut ServerState, path: &str, src: &str) -> Vec { +fn string_diagnostics( + state: &mut ServerState, + path: &str, + src: &str, +) -> Vec { let db = &mut state.db; let workspace = &mut state.workspace; let file_path = std::path::Path::new(path); @@ -22,11 +26,14 @@ pub(crate) fn get_diagnostics( uri.to_file_path().unwrap().to_str().unwrap(), text.as_str(), ); - + let diagnostics = diags.into_iter().flat_map(|diag| { - diag_to_lsp(diag, &state.db).iter().map(|x| x.clone()).collect::>() + diag_to_lsp(diag, &state.db) + .iter() + .map(|x| x.clone()) + .collect::>() }); - + Ok(diagnostics.collect()) } @@ -55,14 +62,14 @@ pub(crate) fn handle_document_did_change( // note: lsp_server::Notification, // ) -> Result<(), Error> { // let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; - + // let mut workspace = &mut state.workspace; // let mut db = &mut state.db; // let workspace_folder = params.event.added[0].uri.to_file_path().unwrap(); - + // workspace.sync(&mut db, workspace_folder.as_path().to_str().unwrap().into()); - + // Ok(()) // } @@ -83,6 +90,6 @@ fn send_diagnostics( let sender = state.sender.lock().unwrap(); sender.send(response)?; - + Ok(()) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 5b1eee29ba..2519e22d09 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,6 +1,6 @@ use std::io::BufRead; -use common::{input::IngotKind, InputIngot}; +use common::input::IngotKind; use hir_analysis::name_resolution::EarlyResolvedPath; use log::info; use lsp_server::{Response, ResponseError}; @@ -150,7 +150,7 @@ pub(crate) fn handle_goto_definition( } None => return Ok(()), }; - + // info!("scopes: {:?}", scopes); let locations = scopes @@ -158,15 +158,16 @@ pub(crate) fn handle_goto_definition( .filter_map(|scope| scope.clone()) .map(|scope| to_lsp_location_from_scope(scope, &state.db)) .collect::>(); - + let errors = scopes .iter() .filter_map(|scope| scope.clone()) .map(|scope| to_lsp_location_from_scope(scope, &state.db)) .filter_map(|scope| scope.err()) .map(|err| err.to_string()) - .collect::>().join("\n"); - + .collect::>() + .join("\n"); + let error = if errors.len() > 0 { Some(ResponseError { code: lsp_types::error_codes::SERVER_CANCELLED as i32, @@ -181,11 +182,13 @@ pub(crate) fn handle_goto_definition( let response_message = Response { id: req.id, result: Some(serde_json::to_value( - lsp_types::GotoDefinitionResponse::Array(locations.into_iter().filter_map(|x| x.ok()).collect()), + lsp_types::GotoDefinitionResponse::Array( + locations.into_iter().filter_map(|x| x.ok()).collect(), + ), )?), - error -}; - + error, + }; + info!("goto definition response: {:?}", response_message); state.send_response(response_message)?; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 279ecd3a64..b2b7ed240d 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,9 +1,9 @@ -mod server; -mod state; mod db; -mod util; mod diagnostics; mod goto; +mod server; +mod state; +mod util; mod workspace; use db::Jar; diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index c895459251..ddab91b660 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -1,9 +1,8 @@ -use std::path::PathBuf; use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; -use crate::workspace::{SyncableIngotFileContext, Workspace}; -use anyhow::{Context, Result}; +use crate::workspace::Workspace; +use anyhow::Result; use crossbeam_channel::{Receiver, Sender}; use log::{info, Level, Metadata, Record}; use log::{LevelFilter, SetLoggerError}; @@ -11,10 +10,7 @@ use lsp_server::Message; use lsp_types::notification::Notification; use lsp_types::request::Request; -use crate::handlers::notifications::{ - handle_document_did_change, - // handle_workspace_did_change_folders, -}; +use crate::handlers::notifications::handle_document_did_change; use crate::handlers::request::handle_goto_definition; use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index dcaeb15c1e..c1438696cd 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -1,9 +1,11 @@ -use common::{diagnostics::{Severity, CompleteDiagnostic, Span}, InputDb}; +use common::{ + diagnostics::{CompleteDiagnostic, Severity, Span}, + InputDb, +}; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; -use log::{error, info}; +use log::error; use lsp_types::Position; - pub(crate) fn calculate_line_offsets(text: &str) -> Vec { text.lines() .scan(0, |state, line| { @@ -22,7 +24,10 @@ pub(crate) fn to_offset_from_position(position: Position, text: &str) -> rowan:: rowan::TextSize::from((line_offset + character_offset) as u32) } -pub(crate) fn to_lsp_range_from_span(span: Span, db: &dyn InputDb) -> Result> { +pub(crate) fn to_lsp_range_from_span( + span: Span, + db: &dyn InputDb, +) -> Result> { let text = span.file.text(db); let line_offsets = calculate_line_offsets(text); let start = span.range.start(); @@ -45,9 +50,16 @@ pub(crate) fn to_lsp_range_from_span(span: Span, db: &dyn InputDb) -> Result Result> { - let lazy_span = scope.name_span(db.as_hir_db()).ok_or("Failed to get name span")?; - let span = lazy_span.resolve(db.as_spanned_hir_db()).ok_or("Failed to resolve span")?; +pub(crate) fn to_lsp_location_from_scope( + scope: ScopeId, + db: &dyn SpannedHirDb, +) -> Result> { + let lazy_span = scope + .name_span(db.as_hir_db()) + .ok_or("Failed to get name span")?; + let span = lazy_span + .resolve(db.as_spanned_hir_db()) + .ok_or("Failed to resolve span")?; let uri = span.file.abs_path(db.as_input_db()); let range = to_lsp_range_from_span(span, db.as_input_db())?; let uri = lsp_types::Url::from_file_path(uri).map_err(|_| "Failed to convert path to URL")?; @@ -62,31 +74,33 @@ pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeveri } } -pub(crate) fn diag_to_lsp(diag: CompleteDiagnostic, db: &dyn InputDb) -> Vec { +pub(crate) fn diag_to_lsp( + diag: CompleteDiagnostic, + db: &dyn InputDb, +) -> Vec { diag.sub_diagnostics .into_iter() .map(|sub| { let lsp_range = to_lsp_range_from_span(sub.span.unwrap(), db); - + match lsp_range { - Ok(range) => - Some(lsp_types::Diagnostic { - range, - severity: Some(severity_to_lsp(diag.severity)), - code: None, - source: None, - message: sub.message.clone(), - related_information: None, - tags: None, - code_description: None, - data: None // for code actions - }), + Ok(range) => Some(lsp_types::Diagnostic { + range, + severity: Some(severity_to_lsp(diag.severity)), + code: None, + source: None, + message: sub.message.clone(), + related_information: None, + tags: None, + code_description: None, + data: None, // for code actions + }), Err(_) => { error!("Failed to convert span to range"); - None + None } } }) .filter_map(|x| x) .collect() - } \ No newline at end of file +} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 1309cc97d7..b8bc05a529 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -92,8 +92,8 @@ impl IngotFileContext for LocalIngotContext { fn ingot_from_file_path( &mut self, - db: &mut LanguageServerDatabase, - path: &str, + _db: &mut LanguageServerDatabase, + _path: &str, ) -> Option { Some(self.ingot) } @@ -341,7 +341,7 @@ impl SyncableIngotFileContext for Workspace { let path = &self.root_path; path.clone().unwrap() }; - + let path = path.to_str().unwrap(); info!("Syncing workspace at {:?}", path); @@ -375,7 +375,6 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use glob::glob; use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; @@ -473,13 +472,14 @@ mod tests { ); assert_eq!(ingot_context_ingot.unwrap(), ingot.unwrap()); } - + #[test] fn test_sync_single_ingot() { let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - let ingot_base_dir = std::path::Path::new(&cargo_manifest_dir).join("test_files/single_ingot/"); - let ingot_config_path = &ingot_base_dir.join("fe.toml"); - + let ingot_base_dir = + std::path::Path::new(&cargo_manifest_dir).join("test_files/single_ingot/"); + let _ingot_config_path = &ingot_base_dir.join("fe.toml"); + let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); From 33dccaeb3233af2c862dfec69533e3200fa4bc9a Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 11:49:54 -0500 Subject: [PATCH 269/678] add backtrace to LSP crash output --- .vscode/launch.json | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 940181cf5d..3b7fb3e6a5 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -15,7 +15,11 @@ ], "preLaunchTask": "compile-vscode-extension", "request": "launch", - "type": "extensionHost" + "type": "extensionHost", + // we need to enable backtrace on the extension host + "env": { + "RUST_BACKTRACE": "1" + } }, ] } \ No newline at end of file From 6578453cd6a6629e1ae783b776f0c97c7cbeaf90 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 11:50:28 -0500 Subject: [PATCH 270/678] lsp top mod error fix --- crates/language-server/src/goto.rs | 6 +++--- crates/language-server/src/handlers/notifications.rs | 2 +- crates/language-server/src/handlers/request.rs | 6 ++++-- crates/language-server/src/workspace.rs | 9 +++------ 4 files changed, 11 insertions(+), 12 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 7af2452e4e..aa31e0c5cd 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -133,7 +133,7 @@ mod tests { let input = workspace.input_from_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); - let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); + let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()).unwrap(); let ingot = workspace.ingot_from_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); @@ -188,7 +188,7 @@ mod tests { let mut db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()); + let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()).unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); @@ -239,7 +239,7 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(db, path, fixture.content()); + let top_mod = workspace.top_mod_from_file(db, path, fixture.content()).unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 2bb8d584f0..87e2a0e0e9 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -12,7 +12,7 @@ fn string_diagnostics( let workspace = &mut state.workspace; let file_path = std::path::Path::new(path); let top_mod = workspace.top_mod_from_file(db, file_path, src); - db.run_on_top_mod(top_mod); + db.run_on_top_mod(top_mod.unwrap()); db.finalize_diags() } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 2519e22d09..7848a27a9b 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -76,7 +76,8 @@ pub(crate) fn handle_hover( let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()) + .unwrap(); let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match early_resolution { @@ -134,7 +135,8 @@ pub(crate) fn handle_goto_definition( let file_path = std::path::Path::new(params.text_document.uri.path()); let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()) + .unwrap(); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index b8bc05a529..de30ab76b9 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -290,16 +290,13 @@ impl Workspace { db: &mut LanguageServerDatabase, file_path: &Path, source: &str, - ) -> TopLevelMod { + ) -> Result { let file = self .input_from_file_path(db, file_path.to_str().unwrap()) .unwrap(); file.set_text(db).to(source.to_string()); - // let ingot = file.ingot(db); - // let mut files = ingot.files(db).clone(); - // files.insert(file); - // ingot.set_files(db, files); - map_file_to_mod(db, file) + let top_mod = map_file_to_mod(db, file); + Ok(top_mod) } } From 450d16662a7c7b1b32f28bb09a2118b10ce0234f Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 12:16:36 -0500 Subject: [PATCH 271/678] Revert "lsp top mod error fix" This reverts commit f2f4b94f964fbc45544bab31a6ce86eceeea3a51. --- crates/language-server/src/goto.rs | 6 +++--- crates/language-server/src/handlers/notifications.rs | 2 +- crates/language-server/src/handlers/request.rs | 6 ++---- crates/language-server/src/workspace.rs | 9 ++++++--- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index aa31e0c5cd..7af2452e4e 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -133,7 +133,7 @@ mod tests { let input = workspace.input_from_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); - let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()).unwrap(); + let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); let ingot = workspace.ingot_from_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); @@ -188,7 +188,7 @@ mod tests { let mut db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()).unwrap(); + let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()); let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); @@ -239,7 +239,7 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(db, path, fixture.content()).unwrap(); + let top_mod = workspace.top_mod_from_file(db, path, fixture.content()); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 87e2a0e0e9..2bb8d584f0 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -12,7 +12,7 @@ fn string_diagnostics( let workspace = &mut state.workspace; let file_path = std::path::Path::new(path); let top_mod = workspace.top_mod_from_file(db, file_path, src); - db.run_on_top_mod(top_mod.unwrap()); + db.run_on_top_mod(top_mod); db.finalize_diags() } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 7848a27a9b..2519e22d09 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -76,8 +76,7 @@ pub(crate) fn handle_hover( let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()) - .unwrap(); + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match early_resolution { @@ -135,8 +134,7 @@ pub(crate) fn handle_goto_definition( let file_path = std::path::Path::new(params.text_document.uri.path()); let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()) - .unwrap(); + .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index de30ab76b9..b8bc05a529 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -290,13 +290,16 @@ impl Workspace { db: &mut LanguageServerDatabase, file_path: &Path, source: &str, - ) -> Result { + ) -> TopLevelMod { let file = self .input_from_file_path(db, file_path.to_str().unwrap()) .unwrap(); file.set_text(db).to(source.to_string()); - let top_mod = map_file_to_mod(db, file); - Ok(top_mod) + // let ingot = file.ingot(db); + // let mut files = ingot.files(db).clone(); + // files.insert(file); + // ingot.set_files(db, files); + map_file_to_mod(db, file) } } From 235c8d6b57336de3a7b933546e2a3f84306df7fc Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 7 Sep 2023 13:38:54 -0500 Subject: [PATCH 272/678] prevent multiple ingots referencing same file inputs --- crates/language-server/src/goto.rs | 8 ++-- .../src/handlers/notifications.rs | 2 +- .../language-server/src/handlers/request.rs | 4 +- crates/language-server/src/server.rs | 2 +- crates/language-server/src/workspace.rs | 44 ++++++++++++++----- 5 files changed, 40 insertions(+), 20 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 7af2452e4e..6af1960f95 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -127,13 +127,13 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); - let _ = workspace.set_workspace_root(db, &Some(ingot_base_dir.clone())); + let _ = workspace.set_workspace_root(db, ingot_base_dir.clone()); let fe_source_path = ingot_base_dir.join(fixture.path()); let input = workspace.input_from_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); - let top_mod = workspace.top_mod_from_file(db, &fe_source_path, fixture.content()); + let top_mod = workspace.top_mod_from_file(db, &fe_source_path, Some(fixture.content())); let ingot = workspace.ingot_from_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); @@ -188,7 +188,7 @@ mod tests { let mut db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(&mut db, path, fixture.content()); + let top_mod = workspace.top_mod_from_file(&mut db, path, Some(fixture.content())); let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); @@ -239,7 +239,7 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(db, path, fixture.content()); + let top_mod = workspace.top_mod_from_file(db, path, Some(fixture.content())); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 2bb8d584f0..b006f209e2 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -11,7 +11,7 @@ fn string_diagnostics( let db = &mut state.db; let workspace = &mut state.workspace; let file_path = std::path::Path::new(path); - let top_mod = workspace.top_mod_from_file(db, file_path, src); + let top_mod = workspace.top_mod_from_file(db, file_path, Some(src)); db.run_on_top_mod(top_mod); db.finalize_diags() } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 2519e22d09..fdc3b9077f 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -76,7 +76,7 @@ pub(crate) fn handle_hover( let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match early_resolution { @@ -134,7 +134,7 @@ pub(crate) fn handle_goto_definition( let file_path = std::path::Path::new(params.text_document.uri.path()); let top_mod = state .workspace - .top_mod_from_file(&mut state.db, file_path, file_text.as_str()); + .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index a8b768e05a..537132537d 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -53,7 +53,7 @@ pub fn run_server() -> Result<()> { let _ = state.init_logger(log::Level::Info); state.workspace.set_workspace_root( &mut state.db, - &initialize_params.root_uri.unwrap().to_file_path().ok(), + initialize_params.root_uri.unwrap().to_file_path().ok().unwrap(), )?; let result = state.run(connection.receiver)?; diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index b8bc05a529..d0ca33e8eb 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -176,10 +176,10 @@ impl Workspace { pub fn set_workspace_root( &mut self, db: &mut LanguageServerDatabase, - root_path: &Option, + root_path: PathBuf, ) -> Result<()> { let path = root_path.clone(); - self.root_path = path; + self.root_path = Some(path); self.sync(db) } @@ -235,7 +235,7 @@ impl Workspace { info!("Syncing ingot at {}", config_path); let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); - let paths = &glob::glob(&format!("{}/**/*.fe", ingot_root)) + let paths = &glob::glob(&format!("{}/src/**/*.fe", ingot_root)) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); @@ -289,17 +289,19 @@ impl Workspace { &mut self, db: &mut LanguageServerDatabase, file_path: &Path, - source: &str, + source: Option<&str>, ) -> TopLevelMod { let file = self .input_from_file_path(db, file_path.to_str().unwrap()) .unwrap(); - file.set_text(db).to(source.to_string()); - // let ingot = file.ingot(db); - // let mut files = ingot.files(db).clone(); - // files.insert(file); - // ingot.set_files(db, files); - map_file_to_mod(db, file) + if let Some(src) = source { + file.set_text(db).to(src.to_string()); + } + let top_mod = map_file_to_mod(db, file); + + info!("top mod: {:?} from file: {:?}", top_mod, file); + + top_mod } } @@ -360,7 +362,7 @@ impl SyncableIngotFileContext for Workspace { self.sync_ingot_files(db, &ingot_path); } - let paths = glob::glob(&format!("{}/**/*.fe", path)) + let paths = glob::glob(&format!("{}/src/**/*.fe", path)) .ok() .unwrap() .filter_map(|p| p.ok().unwrap().to_str().map(|s| s.to_string())) @@ -376,6 +378,7 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { + use std::path::{Path, PathBuf}; use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; use super::StandaloneIngotContext; @@ -483,7 +486,7 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); - let _ = workspace.set_workspace_root(&mut db, &Some(ingot_base_dir.clone())); + let _ = workspace.set_workspace_root(&mut db, ingot_base_dir.clone()); // panic!("wtf? {:?}", ingot_base_dir); assert_eq!(workspace.ingot_contexts.len(), 1); @@ -511,6 +514,23 @@ mod tests { workspace.sync_local_ingots(&mut db, &path); assert!(workspace.ingot_contexts.len() == 2); + + let _ = workspace.set_workspace_root(&mut db, PathBuf::from(&path)); + + // get all top level modules for .fe files in the workspace + let fe_files = glob::glob(&format!("{}/**/*.fe", path)) + .unwrap() + .filter_map(Result::ok) + .map(|p| p.to_str().unwrap().to_string()) + .collect::>(); + + for src_path in fe_files { + let _file = workspace.input_from_file_path(&mut db, &src_path).unwrap(); + + // this would panic if a file has been added to multiple ingots + let _top_mod = workspace.top_mod_from_file(&mut db, Path::new(&src_path), None); + } + } #[test] From a657143ad95377e32545fa3f518b8159f7823b1d Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 8 Sep 2023 12:26:44 -0500 Subject: [PATCH 273/678] style and formatting --- crates/language-server/src/db.rs | 4 +- crates/language-server/src/goto.rs | 45 ++++--- .../src/handlers/notifications.rs | 23 ++-- .../language-server/src/handlers/request.rs | 51 ++++---- crates/language-server/src/main.rs | 4 +- crates/language-server/src/server.rs | 13 +- crates/language-server/src/state.rs | 11 +- crates/language-server/src/util.rs | 20 ++- crates/language-server/src/workspace.rs | 119 +++++++++--------- 9 files changed, 140 insertions(+), 150 deletions(-) diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index d02796dc7a..af47ab1e09 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -46,7 +46,7 @@ impl LanguageServerDatabase { pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) where - F: FnOnce(&LanguageServerDatabase) -> AnalysisPassManager<'_>, + F: FnOnce(&Self) -> AnalysisPassManager<'_>, { self.diags.clear(); self.diags = { @@ -82,7 +82,7 @@ impl LanguageServerDatabase { } } - return smallest_enclosing_item; + smallest_enclosing_item } pub fn finalize_diags(&self) -> Vec { diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 6af1960f95..6eaf8e725e 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -10,8 +10,8 @@ use crate::db::{LanguageServerDatabase, LanguageServerDb}; use common::diagnostics::Span; use hir::span::LazySpan; -pub(crate) type GotoEnclosingPath = (PathId, ScopeId); -pub(crate) type GotoPathMap = FxHashMap; +pub type GotoEnclosingPath = (PathId, ScopeId); +pub type GotoPathMap = FxHashMap; pub struct PathSpanCollector<'db> { path_map: GotoPathMap, @@ -27,14 +27,13 @@ impl<'db> PathSpanCollector<'db> { } } -pub(crate) type Cursor = rowan::TextSize; +pub type Cursor = rowan::TextSize; impl<'db> Visitor for PathSpanCollector<'db> { fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { let Some(span) = ctxt .span() - .map(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) - .flatten() + .and_then(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) else { return; }; @@ -58,7 +57,7 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option Vec { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(&db); + let mut path_collector = PathSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); let path_map = path_collector.path_map; @@ -141,12 +140,12 @@ mod tests { let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); - cursors.iter().for_each(|cursor| { + for cursor in cursors.iter() { let early_resolution = goto_enclosing_path(db, top_mod, *cursor); let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => { - if bucket.len() > 0 { + if !bucket.is_empty() { bucket .iter() .map(|x| x.pretty_path(db).unwrap()) @@ -164,16 +163,14 @@ mod tests { }; cursor_path_map.insert(*cursor, goto_info); - }); + } let result = format!( "{}\n---\n{}", fixture.content(), cursor_path_map .iter() - .map(|(cursor, path)| { - format!("cursor position: {:?}, path: {:?}", cursor, path) - }) + .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path:?}") }) .collect::>() .join("\n") ); @@ -185,16 +182,16 @@ mod tests { glob: "goto*.fe" )] fn test_goto_enclosing_path(fixture: Fixture<&str>) { - let mut db = &mut LanguageServerDatabase::default(); + let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let path = Path::new(fixture.path()); - let top_mod = workspace.top_mod_from_file(&mut db, path, Some(fixture.content())); + let top_mod = workspace.top_mod_from_file(db, path, Some(fixture.content())); - let cursors = extract_multiple_cursor_positions_from_spans(&mut db, top_mod); + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); - cursors.iter().for_each(|cursor| { + for cursor in cursors.iter() { let resolved_path = goto_enclosing_path(db, top_mod, *cursor); match resolved_path { @@ -217,14 +214,14 @@ mod tests { }, None => {} }; - }); + } let result = format!( "{}\n---\n{}", fixture.content(), cursor_path_map .iter() - .map(|(cursor, path)| { format!("cursor position: {:?}, path: {}", cursor, path) }) + .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) .collect::>() .join("\n") ); @@ -245,9 +242,9 @@ mod tests { let mut cursor_path_map: FxHashMap = FxHashMap::default(); - cursors.iter().for_each(|cursor| { + for cursor in cursors.iter() { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(&db); + let mut path_collector = PathSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); let path_map = path_collector.path_map; @@ -271,14 +268,14 @@ mod tests { } => res.pretty_path(db).unwrap(), }; cursor_path_map.insert(*cursor, res); - }); + } let result = format!( "{}\n---\n{}", fixture.content(), cursor_path_map .iter() - .map(|(cursor, path)| { format!("cursor position: {:?}, path: {}", cursor, path) }) + .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) .collect::>() .join("\n") ); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index b006f209e2..bf181429e7 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -16,7 +16,7 @@ fn string_diagnostics( db.finalize_diags() } -pub(crate) fn get_diagnostics( +pub fn get_diagnostics( state: &mut ServerState, text: String, uri: lsp_types::Url, @@ -27,34 +27,31 @@ pub(crate) fn get_diagnostics( text.as_str(), ); - let diagnostics = diags.into_iter().flat_map(|diag| { - diag_to_lsp(diag, &state.db) - .iter() - .map(|x| x.clone()) - .collect::>() - }); + let diagnostics = diags + .into_iter() + .flat_map(|diag| diag_to_lsp(diag, &state.db).clone()); Ok(diagnostics.collect()) } -pub(crate) fn handle_document_did_open( +pub fn handle_document_did_open( state: &mut ServerState, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; let text = params.text_document.text; let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics, params.text_document.uri.clone()) + send_diagnostics(state, diagnostics, params.text_document.uri) } -pub(crate) fn handle_document_did_change( +pub fn handle_document_did_change( state: &mut ServerState, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; let text = params.content_changes[0].text.clone(); let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics, params.text_document.uri.clone()) + send_diagnostics(state, diagnostics, params.text_document.uri) } // pub(crate) fn handle_workspace_did_change_folders( @@ -79,8 +76,8 @@ fn send_diagnostics( uri: lsp_types::Url, ) -> Result<(), Error> { let result = lsp_types::PublishDiagnosticsParams { - uri: uri, - diagnostics: diagnostics, + uri, + diagnostics, version: None, }; let response = lsp_server::Message::Notification(lsp_server::Notification { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index fdc3b9077f..81f2599a0e 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,7 +1,7 @@ use std::io::BufRead; use common::input::IngotKind; -use hir_analysis::name_resolution::EarlyResolvedPath; +use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; use log::info; use lsp_server::{Response, ResponseError}; use serde::Deserialize; @@ -13,7 +13,7 @@ use crate::{ workspace::IngotFileContext, }; -pub(crate) fn handle_hover( +pub fn handle_hover( state: &mut ServerState, req: lsp_server::Request, ) -> Result<(), anyhow::Error> { @@ -65,18 +65,15 @@ pub(crate) fn handle_hover( .strip_prefix(&state.workspace.root_path.clone().unwrap_or("".into())) .ok(); - match ingot_type { - Some(ingot_type) => Some(format!( - "{} with {} files at path: {:?}", - ingot_type, ingot_file_count, ingot_path - )), - None => None, - } + ingot_type.map(|ingot_type| { + format!("{ingot_type} with {ingot_file_count} files at path: {ingot_path:?}") + }) }; - let top_mod = state - .workspace - .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); + let top_mod = + state + .workspace + .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); let goto_info = match early_resolution { @@ -93,8 +90,7 @@ pub(crate) fn handle_hover( }; let result = lsp_types::Hover { - contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent::from( - lsp_types::MarkupContent { + contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value: format!( "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}\n\n### Ingot info: \n\n{:?}", @@ -103,8 +99,7 @@ pub(crate) fn handle_hover( goto_info, ingot_info, ), - }, - )), + }), range: None, }; let response_message = Response { @@ -119,7 +114,7 @@ pub(crate) fn handle_hover( use lsp_types::TextDocumentPositionParams; -pub(crate) fn handle_goto_definition( +pub fn handle_goto_definition( state: &mut ServerState, req: lsp_server::Request, ) -> Result<(), anyhow::Error> { @@ -132,15 +127,16 @@ pub(crate) fn handle_goto_definition( // Get the module and the goto info let file_path = std::path::Path::new(params.text_document.uri.path()); - let top_mod = state - .workspace - .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); + let top_mod = + state + .workspace + .top_mod_from_file(&mut state.db, file_path, Some(file_text.as_str())); let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); // Convert the goto info to a Location let scopes = match goto_info { Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(|x| x.scope()).collect::>() + bucket.iter().map(NameRes::scope).collect::>() } Some(EarlyResolvedPath::Partial { res, @@ -155,20 +151,20 @@ pub(crate) fn handle_goto_definition( let locations = scopes .iter() - .filter_map(|scope| scope.clone()) + .filter_map(|scope| *scope) .map(|scope| to_lsp_location_from_scope(scope, &state.db)) .collect::>(); let errors = scopes .iter() - .filter_map(|scope| scope.clone()) + .filter_map(|scope| *scope) .map(|scope| to_lsp_location_from_scope(scope, &state.db)) - .filter_map(|scope| scope.err()) + .filter_map(std::result::Result::err) .map(|err| err.to_string()) .collect::>() .join("\n"); - let error = if errors.len() > 0 { + let error = if !errors.is_empty() { Some(ResponseError { code: lsp_types::error_codes::SERVER_CANCELLED as i32, message: errors, @@ -183,7 +179,10 @@ pub(crate) fn handle_goto_definition( id: req.id, result: Some(serde_json::to_value( lsp_types::GotoDefinitionResponse::Array( - locations.into_iter().filter_map(|x| x.ok()).collect(), + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), ), )?), error, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index b2b7ed240d..e5f3e7dcac 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -8,8 +8,8 @@ mod workspace; use db::Jar; mod handlers { - pub(crate) mod notifications; - pub(crate) mod request; + pub mod notifications; + pub mod request; } use server::run_server; diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index 537132537d..5dbbe8d1a6 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -27,7 +27,7 @@ pub fn run_server() -> Result<()> { let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { - capabilities: capabilities, + capabilities, server_info: Some(lsp_types::ServerInfo { name: String::from("fe-language-server"), version: Some(String::from(env!("CARGO_PKG_VERSION"))), @@ -53,11 +53,16 @@ pub fn run_server() -> Result<()> { let _ = state.init_logger(log::Level::Info); state.workspace.set_workspace_root( &mut state.db, - initialize_params.root_uri.unwrap().to_file_path().ok().unwrap(), + initialize_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), )?; - let result = state.run(connection.receiver)?; + let result = state.run(connection.receiver); io_threads.join().unwrap(); - Ok(result) + result } diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs index ddab91b660..9c4ba3351c 100644 --- a/crates/language-server/src/state.rs +++ b/crates/language-server/src/state.rs @@ -23,13 +23,12 @@ pub struct ServerState { impl ServerState { pub fn new(sender: Sender) -> Self { let sender = Arc::new(Mutex::new(sender)); - let state = ServerState { + + Self { sender, db: LanguageServerDatabase::default(), workspace: Workspace::default(), - }; - - state + } } fn send(&mut self, msg: Message) -> Result<()> { @@ -114,7 +113,7 @@ impl ServerState { } } -pub(crate) struct LspLogger { +pub struct LspLogger { level: Level, sender: Arc>>, } @@ -147,7 +146,7 @@ impl log::Log for LspLogger { Level::Debug => lsp_types::MessageType::LOG, Level::Trace => lsp_types::MessageType::LOG, }, - message: message, + message, }) .unwrap(), }, diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index c1438696cd..dfc063c162 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -6,7 +6,7 @@ use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; use log::error; use lsp_types::Position; -pub(crate) fn calculate_line_offsets(text: &str) -> Vec { +pub fn calculate_line_offsets(text: &str) -> Vec { text.lines() .scan(0, |state, line| { let offset = *state; @@ -16,7 +16,7 @@ pub(crate) fn calculate_line_offsets(text: &str) -> Vec { .collect() } -pub(crate) fn to_offset_from_position(position: Position, text: &str) -> rowan::TextSize { +pub fn to_offset_from_position(position: Position, text: &str) -> rowan::TextSize { let line_offsets: Vec = calculate_line_offsets(text); let line_offset = line_offsets[position.line as usize]; let character_offset = position.character as usize; @@ -24,7 +24,7 @@ pub(crate) fn to_offset_from_position(position: Position, text: &str) -> rowan:: rowan::TextSize::from((line_offset + character_offset) as u32) } -pub(crate) fn to_lsp_range_from_span( +pub fn to_lsp_range_from_span( span: Span, db: &dyn InputDb, ) -> Result> { @@ -50,7 +50,7 @@ pub(crate) fn to_lsp_range_from_span( }) } -pub(crate) fn to_lsp_location_from_scope( +pub fn to_lsp_location_from_scope( scope: ScopeId, db: &dyn SpannedHirDb, ) -> Result> { @@ -66,7 +66,7 @@ pub(crate) fn to_lsp_location_from_scope( Ok(lsp_types::Location { uri, range }) } -pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { +pub fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { Severity::Error => lsp_types::DiagnosticSeverity::ERROR, Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, @@ -74,13 +74,10 @@ pub(crate) fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeveri } } -pub(crate) fn diag_to_lsp( - diag: CompleteDiagnostic, - db: &dyn InputDb, -) -> Vec { +pub fn diag_to_lsp(diag: CompleteDiagnostic, db: &dyn InputDb) -> Vec { diag.sub_diagnostics .into_iter() - .map(|sub| { + .filter_map(|sub| { let lsp_range = to_lsp_range_from_span(sub.span.unwrap(), db); match lsp_range { @@ -89,7 +86,7 @@ pub(crate) fn diag_to_lsp( severity: Some(severity_to_lsp(diag.severity)), code: None, source: None, - message: sub.message.clone(), + message: sub.message, related_information: None, tags: None, code_description: None, @@ -101,6 +98,5 @@ pub(crate) fn diag_to_lsp( } } }) - .filter_map(|x| x) .collect() } diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index d0ca33e8eb..9bdb149e9c 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -21,7 +21,7 @@ fn ingot_directory_key(path: String) -> String { .to_string() } -pub(crate) trait IngotFileContext { +pub trait IngotFileContext { fn input_from_file_path( &mut self, db: &mut LanguageServerDatabase, @@ -34,7 +34,7 @@ pub(crate) trait IngotFileContext { ) -> Option; } -pub(crate) struct LocalIngotContext { +pub struct LocalIngotContext { pub ingot: InputIngot, pub files: StringPatriciaMap, } @@ -46,7 +46,7 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -pub(crate) fn get_containing_ingot<'a, T>( +pub fn get_containing_ingot<'a, T>( ingots: &'a mut StringPatriciaMap, path: &'a str, ) -> Option<&'a mut T> { @@ -81,12 +81,12 @@ impl IngotFileContext for LocalIngotContext { let ingot = self.ingot_from_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { - let file = InputFile::new(db, ingot, path.into(), "".into()); + let file = InputFile::new(db, ingot, path.into(), String::new()); Some(file) }, |file| Some(*file), ); - self.files.insert(path.to_string(), input.unwrap()); + self.files.insert(path, input.unwrap()); input } @@ -99,7 +99,7 @@ impl IngotFileContext for LocalIngotContext { } } -pub(crate) struct StandaloneIngotContext { +pub struct StandaloneIngotContext { ingots: StringPatriciaMap, files: StringPatriciaMap, } @@ -122,14 +122,14 @@ impl IngotFileContext for StandaloneIngotContext { let ingot = self.ingot_from_file_path(db, path)?; let input_file = self.files.get(path).map_or_else( || { - let file = InputFile::new(db, ingot, path.into(), "".into()); + let file = InputFile::new(db, ingot, path.into(), String::new()); Some(file) }, |file| Some(*file), ); ingot.set_files(db, [input_file.unwrap()].into()); ingot.set_root_file(db, input_file.unwrap()); - self.files.insert(path.to_string(), input_file.unwrap()); + self.files.insert(path, input_file.unwrap()); input_file } @@ -150,15 +150,15 @@ impl IngotFileContext for StandaloneIngotContext { Version::new(0, 0, 0), BTreeSet::new(), ); - self.ingots.insert(path.to_string(), ingot); + self.ingots.insert(path, ingot); Some(ingot) }, - |ingot| Some(ingot), + Some, ) } } -pub(crate) struct Workspace { +pub struct Workspace { pub(crate) ingot_contexts: StringPatriciaMap, pub(crate) standalone_ingot_context: StandaloneIngotContext, pub(crate) root_path: Option, @@ -178,7 +178,7 @@ impl Workspace { db: &mut LanguageServerDatabase, root_path: PathBuf, ) -> Result<()> { - let path = root_path.clone(); + let path = root_path; self.root_path = Some(path); self.sync(db) } @@ -191,30 +191,29 @@ impl Workspace { let key = &ingot_directory_key(config_path.into()); if self.ingot_contexts.contains_key(key) { return self.ingot_contexts.get_mut(key); - } else { - let ingot_context = LocalIngotContext::new(db, config_path)?; - self.ingot_contexts - // .insert(config_path.to_string(), ingot_context); - // instead chop off the trailing fe.toml - .insert(key, ingot_context); - return self.ingot_contexts.get_mut(key); } + let ingot_context = LocalIngotContext::new(db, config_path)?; + self.ingot_contexts + // .insert(config_path.to_string(), ingot_context); + // instead chop off the trailing fe.toml + .insert(key, ingot_context); + return self.ingot_contexts.get_mut(key); } - fn sync_local_ingots(&mut self, db: &mut LanguageServerDatabase, path: &str) -> () { - let config_paths = &glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + fn sync_local_ingots(&mut self, db: &mut LanguageServerDatabase, path: &str) { + let config_paths = &glob::glob(&format!("{path}/**/{FE_CONFIG_SUFFIX}")) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); let paths = &config_paths - .into_iter() - .map(|path| path.to_string()) + .iter() + .map(std::string::ToString::to_string) .map(ingot_directory_key) .collect::>(); for path in paths { - self.ingot_context_from_config_path(db, &path); + self.ingot_context_from_config_path(db, path); } let existing_keys: Vec = self.ingot_contexts.keys().collect(); @@ -222,7 +221,7 @@ impl Workspace { let keys_to_remove: Vec = existing_keys .iter() .filter(|key| !paths.contains(key)) - .map(|path| path.into()) + .map(std::convert::Into::into) .collect(); for key in keys_to_remove { @@ -230,12 +229,12 @@ impl Workspace { } } - fn sync_ingot_files(&mut self, db: &mut LanguageServerDatabase, config_path: &str) -> () { + fn sync_ingot_files(&mut self, db: &mut LanguageServerDatabase, config_path: &str) { assert!(config_path.ends_with(FE_CONFIG_SUFFIX)); info!("Syncing ingot at {}", config_path); let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); - let paths = &glob::glob(&format!("{}/src/**/*.fe", ingot_root)) + let paths = &glob::glob(&format!("{ingot_root}/src/**/*.fe")) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); @@ -248,24 +247,24 @@ impl Workspace { .unwrap(); let ingot_context_file_keys = &ingot_context.files.keys().collect::>(); - ingot_context_file_keys.iter().for_each(|path| { - if !paths.contains(&path) { + for path in ingot_context_file_keys.iter() { + if !paths.contains(path) { ingot_context.files.remove(path); } - }); + } - paths.iter().for_each(|path| { - if !ingot_context_file_keys.contains(&path) { + for path in paths.iter() { + if !ingot_context_file_keys.contains(path) { let file = ingot_context.input_from_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); } - }); + } let ingot_context_files = ingot_context .files .values() - .map(|x| *x) + .copied() .collect::>(); ingot_context.ingot.set_files(db, ingot_context_files); @@ -277,7 +276,7 @@ impl Workspace { .find(|file| { file.path(db).ends_with("src/main.fe") || file.path(db).ends_with("src/lib.fe") }) - .map(|file| *file); + .copied(); if let Some(root_file) = root_file { info!("Setting root file for ingot: {:?}", root_file.path(db)); @@ -298,7 +297,7 @@ impl Workspace { file.set_text(db).to(src.to_string()); } let top_mod = map_file_to_mod(db, file); - + info!("top mod: {:?} from file: {:?}", top_mod, file); top_mod @@ -315,7 +314,7 @@ impl IngotFileContext for Workspace { if let Some(ctx) = ctx { ctx.input_from_file_path(db, path) } else { - (&mut self.standalone_ingot_context).input_from_file_path(db, path) + self.standalone_ingot_context.input_from_file_path(db, path) } } @@ -328,12 +327,12 @@ impl IngotFileContext for Workspace { if ctx.is_some() { Some(ctx.unwrap().ingot_from_file_path(db, path).unwrap()) } else { - (&mut self.standalone_ingot_context).ingot_from_file_path(db, path) + self.standalone_ingot_context.ingot_from_file_path(db, path) } } } -pub(crate) trait SyncableIngotFileContext { +pub trait SyncableIngotFileContext { fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()>; } @@ -349,11 +348,11 @@ impl SyncableIngotFileContext for Workspace { info!("Syncing workspace at {:?}", path); self.sync_local_ingots(db, path); - let ingot_paths = glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + let ingot_paths = glob::glob(&format!("{path}/**/{FE_CONFIG_SUFFIX}")) .ok() .unwrap() .filter_map(Result::ok) - .filter_map(|p| p.to_str().map(|s| s.to_string())) + .filter_map(|p| p.to_str().map(std::string::ToString::to_string)) .collect::>(); info!("Found {} ingots", ingot_paths.len()); @@ -362,10 +361,15 @@ impl SyncableIngotFileContext for Workspace { self.sync_ingot_files(db, &ingot_path); } - let paths = glob::glob(&format!("{}/src/**/*.fe", path)) + let paths = glob::glob(&format!("{path}/src/**/*.fe")) .ok() .unwrap() - .filter_map(|p| p.ok().unwrap().to_str().map(|s| s.to_string())) + .filter_map(|p| { + p.ok() + .unwrap() + .to_str() + .map(std::string::ToString::to_string) + }) .collect::>(); for path in paths { @@ -378,8 +382,8 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use std::path::{Path, PathBuf}; use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; + use std::path::{Path, PathBuf}; use super::StandaloneIngotContext; @@ -500,7 +504,7 @@ mod tests { #[test] fn test_sync_nested_ingots() { let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - let path = format!("{}/test_files/nested_ingots", crate_dir); + let path = format!("{crate_dir}/test_files/nested_ingots"); assert!( glob::glob(&format!("{}/**/{}", path, super::FE_CONFIG_SUFFIX)) .unwrap() @@ -514,29 +518,28 @@ mod tests { workspace.sync_local_ingots(&mut db, &path); assert!(workspace.ingot_contexts.len() == 2); - + let _ = workspace.set_workspace_root(&mut db, PathBuf::from(&path)); // get all top level modules for .fe files in the workspace - let fe_files = glob::glob(&format!("{}/**/*.fe", path)) + let fe_files = glob::glob(&format!("{path}/**/*.fe")) .unwrap() .filter_map(Result::ok) .map(|p| p.to_str().unwrap().to_string()) .collect::>(); - + for src_path in fe_files { let _file = workspace.input_from_file_path(&mut db, &src_path).unwrap(); - + // this would panic if a file has been added to multiple ingots let _top_mod = workspace.top_mod_from_file(&mut db, Path::new(&src_path), None); } - } #[test] fn test_sync_ingot_files() { let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - let path = format!("{}/test_files/nested_ingots", crate_dir); + let path = format!("{crate_dir}/test_files/nested_ingots"); assert!( glob::glob(&format!("{}/**/{}", path, super::FE_CONFIG_SUFFIX)) .unwrap() @@ -572,8 +575,8 @@ mod tests { #[test] fn test_dangling_fe_source() { let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - let messy_workspace_path = format!("{}/test_files/messy", crate_dir); - let dangling_path = format!("{}/test_files/messy/dangling.fe", crate_dir); + let messy_workspace_path = format!("{crate_dir}/test_files/messy"); + let dangling_path = format!("{crate_dir}/test_files/messy/dangling.fe"); let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); @@ -592,20 +595,14 @@ mod tests { let ingot_paths = workspace .ingot_contexts .values() - .map(|ctx| { - format!( - "{}{}", - ctx.ingot.path(&mut db).to_string(), - FE_CONFIG_SUFFIX - ) - }) + .map(|ctx| format!("{}{}", ctx.ingot.path(&mut db), FE_CONFIG_SUFFIX)) .collect::>(); for ingot_path in ingot_paths { workspace.sync_ingot_files(&mut db, &ingot_path); } - let non_dangling_file_path = format!("{}/test_files/messy/foo/bar/src/main.fe", crate_dir); + let non_dangling_file_path = format!("{crate_dir}/test_files/messy/foo/bar/src/main.fe"); let non_dangling_input = workspace .input_from_file_path(&mut db, &non_dangling_file_path) .unwrap(); From c1194306fa8e063dc91cb52bd71ac6695b72d0a4 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 14 Sep 2023 14:43:39 -0500 Subject: [PATCH 274/678] notes --- crates/language-server/src/goto.rs | 1 + .../src/handlers/notifications.rs | 17 +---------------- crates/language-server/src/workspace.rs | 5 +---- 3 files changed, 3 insertions(+), 20 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 6eaf8e725e..5f34f00e40 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -72,6 +72,7 @@ pub fn goto_enclosing_path( let mut path_collector = PathSpanCollector::new(db); path_collector.visit_item(&mut visitor_ctxt, item); + // can we do this without the cache? let path_map = path_collector.path_map; // Find the path that encloses the cursor. diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index bf181429e7..0659ca725e 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -16,6 +16,7 @@ fn string_diagnostics( db.finalize_diags() } +// todo: handle diagnostics referencing multiple files pub fn get_diagnostics( state: &mut ServerState, text: String, @@ -54,22 +55,6 @@ pub fn handle_document_did_change( send_diagnostics(state, diagnostics, params.text_document.uri) } -// pub(crate) fn handle_workspace_did_change_folders( -// state: &mut ServerState, -// note: lsp_server::Notification, -// ) -> Result<(), Error> { -// let params = lsp_types::DidChangeWorkspaceFoldersParams::deserialize(note.params)?; - -// let mut workspace = &mut state.workspace; -// let mut db = &mut state.db; - -// let workspace_folder = params.event.added[0].uri.to_file_path().unwrap(); - -// workspace.sync(&mut db, workspace_folder.as_path().to_str().unwrap().into()); - -// Ok(()) -// } - fn send_diagnostics( state: &mut ServerState, diagnostics: Vec, diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 9bdb149e9c..ab9bf9665c 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -193,10 +193,7 @@ impl Workspace { return self.ingot_contexts.get_mut(key); } let ingot_context = LocalIngotContext::new(db, config_path)?; - self.ingot_contexts - // .insert(config_path.to_string(), ingot_context); - // instead chop off the trailing fe.toml - .insert(key, ingot_context); + self.ingot_contexts.insert(key, ingot_context); return self.ingot_contexts.get_mut(key); } From 6c32ee329696c8033cba103939da39c43b197274 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 15 Sep 2023 15:49:12 -0500 Subject: [PATCH 275/678] Initial subdiagnostics support --- crates/language-server/src/goto.rs | 6 +- .../src/handlers/notifications.rs | 38 +++++++----- .../language-server/src/handlers/request.rs | 9 +-- crates/language-server/src/util.rs | 58 +++++++++++-------- 4 files changed, 63 insertions(+), 48 deletions(-) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 5f34f00e40..2b57a14458 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -141,7 +141,7 @@ mod tests { let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); let mut cursor_path_map: FxHashMap = FxHashMap::default(); - for cursor in cursors.iter() { + for cursor in &cursors { let early_resolution = goto_enclosing_path(db, top_mod, *cursor); let goto_info = match early_resolution { @@ -192,7 +192,7 @@ mod tests { let mut cursor_path_map: FxHashMap = FxHashMap::default(); - for cursor in cursors.iter() { + for cursor in &cursors { let resolved_path = goto_enclosing_path(db, top_mod, *cursor); match resolved_path { @@ -243,7 +243,7 @@ mod tests { let mut cursor_path_map: FxHashMap = FxHashMap::default(); - for cursor in cursors.iter() { + for cursor in &cursors { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); let mut path_collector = PathSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 0659ca725e..3f0b9a1082 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -1,4 +1,7 @@ + + use anyhow::{Error, Result}; +use fxhash::FxHashMap; use serde::Deserialize; use crate::{state::ServerState, util::diag_to_lsp}; @@ -21,7 +24,7 @@ pub fn get_diagnostics( state: &mut ServerState, text: String, uri: lsp_types::Url, -) -> Result, Error> { +) -> Result, Error> { let diags = string_diagnostics( state, uri.to_file_path().unwrap().to_str().unwrap(), @@ -42,7 +45,7 @@ pub fn handle_document_did_open( let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; let text = params.text_document.text; let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics, params.text_document.uri) + send_diagnostics(state, diagnostics) } pub fn handle_document_did_change( @@ -52,26 +55,29 @@ pub fn handle_document_did_change( let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; let text = params.content_changes[0].text.clone(); let diagnostics = get_diagnostics(state, text, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics, params.text_document.uri) + send_diagnostics(state, diagnostics) } fn send_diagnostics( state: &mut ServerState, - diagnostics: Vec, - uri: lsp_types::Url, + diagnostics: FxHashMap ) -> Result<(), Error> { - let result = lsp_types::PublishDiagnosticsParams { - uri, - diagnostics, - version: None, - }; - let response = lsp_server::Message::Notification(lsp_server::Notification { - method: String::from("textDocument/publishDiagnostics"), - params: serde_json::to_value(result).unwrap(), + let results = diagnostics.into_iter().map(|(uri, diag)| { + let result = lsp_types::PublishDiagnosticsParams { + uri, + diagnostics: vec![diag], + version: None, + }; + lsp_server::Message::Notification(lsp_server::Notification { + method: String::from("textDocument/publishDiagnostics"), + params: serde_json::to_value(result).unwrap(), + }) + }); + + results.for_each(|result| { + let sender = state.sender.lock().unwrap(); + sender.send(result); }); - - let sender = state.sender.lock().unwrap(); - sender.send(response)?; Ok(()) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 81f2599a0e..641861dac0 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -164,15 +164,12 @@ pub fn handle_goto_definition( .collect::>() .join("\n"); - let error = if !errors.is_empty() { - Some(ResponseError { + let error = (!errors.is_empty()).then_some( + ResponseError { code: lsp_types::error_codes::SERVER_CANCELLED as i32, message: errors, data: None, - }) - } else { - None - }; + }); // Send the response let response_message = Response { diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index dfc063c162..85a44fcad4 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -2,9 +2,11 @@ use common::{ diagnostics::{CompleteDiagnostic, Severity, Span}, InputDb, }; +use fxhash::FxHashMap; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; use log::error; use lsp_types::Position; +use url::Url; pub fn calculate_line_offsets(text: &str) -> Vec { text.lines() @@ -74,29 +76,39 @@ pub fn severity_to_lsp(severity: Severity) -> lsp_types::DiagnosticSeverity { } } -pub fn diag_to_lsp(diag: CompleteDiagnostic, db: &dyn InputDb) -> Vec { - diag.sub_diagnostics - .into_iter() - .filter_map(|sub| { - let lsp_range = to_lsp_range_from_span(sub.span.unwrap(), db); +pub fn diag_to_lsp( + diag: CompleteDiagnostic, + db: &dyn InputDb, +) -> FxHashMap { + let mut result = FxHashMap::::default(); + diag.sub_diagnostics.into_iter().for_each(|sub| { + let uri = sub.span.as_ref().unwrap().file.abs_path(db); + let lsp_range = to_lsp_range_from_span(sub.span.unwrap(), db); + + // todo: generalize this to handle other kinds of URLs besides file URLs + let uri = Url::from_file_path(uri).unwrap(); - match lsp_range { - Ok(range) => Some(lsp_types::Diagnostic { - range, - severity: Some(severity_to_lsp(diag.severity)), - code: None, - source: None, - message: sub.message, - related_information: None, - tags: None, - code_description: None, - data: None, // for code actions - }), - Err(_) => { - error!("Failed to convert span to range"); - None - } + match lsp_range { + Ok(range) => { + result.insert( + uri, + lsp_types::Diagnostic { + range, + severity: Some(severity_to_lsp(diag.severity)), + code: None, + source: None, + message: sub.message, + related_information: None, + tags: None, + code_description: None, + data: None, // for code actions + }, + ); } - }) - .collect() + Err(_) => { + error!("Failed to convert span to range"); + } + } + }); + result } From 565e3fb67e0e7bc532e8e6a7d5da39c4c79913c3 Mon Sep 17 00:00:00 2001 From: Sean Billig Date: Mon, 18 Sep 2023 17:46:23 -0700 Subject: [PATCH 276/678] Add enum record variant fields to scope graph --- crates/hir/src/hir_def/scope_graph.rs | 76 ++++++++++++++++++++++++--- crates/hir/src/lower/scope_builder.rs | 29 +++++++--- crates/hir/src/visitor.rs | 23 ++++---- 3 files changed, 103 insertions(+), 25 deletions(-) diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index ac5c44d0e8..66699efa9c 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -10,7 +10,7 @@ use crate::{ use super::{ scope_graph_viz::ScopeGraphFormatter, Body, Enum, ExprId, Func, FuncParamLabel, IdentId, - IngotId, ItemKind, TopLevelMod, Use, Visibility, + IngotId, ItemKind, TopLevelMod, Use, VariantKind, Visibility, }; /// Represents a scope relation graph in a top-level module. @@ -82,7 +82,7 @@ pub enum ScopeId { FuncParam(ItemKind, usize), /// A field scope. - Field(ItemKind, usize), + Field(FieldParent, usize), /// A variant scope. Variant(ItemKind, usize), @@ -97,7 +97,8 @@ impl ScopeId { ScopeId::Item(item) => item.top_mod(db), ScopeId::GenericParam(item, _) => item.top_mod(db), ScopeId::FuncParam(item, _) => item.top_mod(db), - ScopeId::Field(item, _) => item.top_mod(db), + ScopeId::Field(FieldParent::Item(item), _) => item.top_mod(db), + ScopeId::Field(FieldParent::Variant(item, _), _) => item.top_mod(db), ScopeId::Variant(item, _) => item.top_mod(db), ScopeId::Block(body, _) => body.top_mod(db), } @@ -122,7 +123,8 @@ impl ScopeId { ScopeId::Item(item) => item, ScopeId::GenericParam(item, _) => item, ScopeId::FuncParam(item, _) => item, - ScopeId::Field(item, _) => item, + ScopeId::Field(FieldParent::Item(item), _) => item, + ScopeId::Field(FieldParent::Variant(item, _), _) => item, ScopeId::Variant(item, _) => item, ScopeId::Block(body, _) => body.into(), } @@ -261,11 +263,18 @@ impl ScopeId { enum_.variants(db).data(db)[idx].name.to_opt() } - ScopeId::Field(parent, idx) => match parent { + ScopeId::Field(FieldParent::Item(parent), idx) => match parent { ItemKind::Struct(s) => s.fields(db).data(db)[idx].name.to_opt(), ItemKind::Contract(c) => c.fields(db).data(db)[idx].name.to_opt(), _ => unreachable!(), }, + ScopeId::Field(FieldParent::Variant(parent, vidx), fidx) => { + let enum_: Enum = parent.try_into().unwrap(); + match enum_.variants(db).data(db)[vidx].kind { + VariantKind::Record(fields) => fields.data(db)[fidx].name.to_opt(), + _ => unreachable!(), + } + } ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); @@ -297,11 +306,24 @@ impl ScopeId { Some(enum_.lazy_span().variants().variant(idx).name().into()) } - ScopeId::Field(parent, idx) => match parent { + ScopeId::Field(FieldParent::Item(parent), idx) => match parent { ItemKind::Struct(s) => Some(s.lazy_span().fields().field(idx).name().into()), ItemKind::Contract(c) => Some(c.lazy_span().fields().field(idx).name().into()), _ => unreachable!(), }, + ScopeId::Field(FieldParent::Variant(parent, vidx), fidx) => { + let enum_: Enum = parent.try_into().unwrap(); + Some( + enum_ + .lazy_span() + .variants() + .variant(vidx) + .fields() + .field(fidx) + .name() + .into(), + ) + } ScopeId::FuncParam(parent, idx) => { let func: Func = parent.try_into().unwrap(); @@ -353,6 +375,12 @@ impl ScopeId { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum FieldParent { + Item(ItemKind), + Variant(ItemKind, usize), +} + struct ScopeGraphItemIterDfs<'a> { db: &'a dyn HirDb, graph: &'a ScopeGraph, @@ -542,7 +570,13 @@ pub struct AnonEdge(); #[cfg(test)] mod tests { - use crate::{hir_def::ItemKind, test_db::TestDb}; + use crate::{ + hir_def::{ + scope_graph::{FieldParent, ScopeId}, + ItemKind, + }, + test_db::TestDb, + }; #[test] fn item_tree() { @@ -555,7 +589,7 @@ mod tests { fn baz() } } - + enum MyEnum {} mod baz { @@ -581,4 +615,30 @@ mod tests { } } } + + #[test] + fn enum_record_fields() { + let mut db = TestDb::default(); + + let text = r#" + enum Foo { + X { a: i8, b: i8 }, + } + "#; + + let file = db.standalone_file(text); + let scope_graph = db.parse_source(file); + let root = scope_graph.top_mod.scope(); + let enum_ = scope_graph.children(root).next().unwrap(); + assert!(matches!(enum_.item(), ItemKind::Enum(_))); + + let variant = scope_graph.children(enum_).next().unwrap(); + assert!(matches!(variant, ScopeId::Variant(_, _))); + + let field = scope_graph.children(variant).next().unwrap(); + assert!(matches!( + field, + ScopeId::Field(FieldParent::Variant(_, _), _) + )); + } } diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 6b36a027bf..6b4ce57bc5 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -3,9 +3,9 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ hir_def::{ - scope_graph::{EdgeKind, Scope, ScopeEdge, ScopeGraph, ScopeId}, + scope_graph::{EdgeKind, FieldParent, Scope, ScopeEdge, ScopeGraph, ScopeId}, Body, ExprId, FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, - TopLevelMod, TrackedItemId, Use, VariantDefListId, Visibility, + TopLevelMod, TrackedItemId, Use, VariantDefListId, VariantKind, Visibility, }, HirDb, }; @@ -141,7 +141,11 @@ impl<'db> ScopeGraphBuilder<'db> { Struct(inner) => { self.graph.add_lex_edge(item_node, parent_node); - self.add_field_scope(item_node, inner.into(), inner.fields(self.db)); + self.add_field_scope( + item_node, + FieldParent::Item(inner.into()), + inner.fields(self.db), + ); self.add_generic_param_scope( item_node, inner.into(), @@ -156,7 +160,11 @@ impl<'db> ScopeGraphBuilder<'db> { Contract(inner) => { self.graph.add_lex_edge(item_node, parent_node); - self.add_field_scope(item_node, inner.into(), inner.fields(self.db)); + self.add_field_scope( + item_node, + FieldParent::Item(inner.into()), + inner.fields(self.db), + ); inner .name(self.db) .to_opt() @@ -310,11 +318,11 @@ impl<'db> ScopeGraphBuilder<'db> { fn add_field_scope( &mut self, parent_node: NodeId, - parent_item: ItemKind, + parent: FieldParent, fields: FieldDefListId, ) { for (i, field) in fields.data(self.db).iter().enumerate() { - let scope_id = ScopeId::Field(parent_item, i); + let scope_id = ScopeId::Field(parent, i); let scope_data = Scope::new(scope_id, field.vis); let field_node = self.graph.push(scope_id, scope_data); @@ -336,17 +344,22 @@ impl<'db> ScopeGraphBuilder<'db> { ) { let parent_vis = parent_item.vis(self.db); - for (i, field) in variants.data(self.db).iter().enumerate() { + for (i, variant) in variants.data(self.db).iter().enumerate() { let scope_id = ScopeId::Variant(parent_item, i); let scope_data = Scope::new(scope_id, parent_vis); let variant_node = self.graph.push(scope_id, scope_data); self.graph.add_lex_edge(variant_node, parent_node); - let kind = field + let kind = variant .name .to_opt() .map(EdgeKind::variant) .unwrap_or_else(EdgeKind::anon); + + if let VariantKind::Record(fields) = variant.kind { + self.add_field_scope(variant_node, FieldParent::Variant(parent_item, i), fields) + } + self.graph.add_edge(parent_node, variant_node, kind) } } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 75669ea388..edf1c48450 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -2,13 +2,14 @@ use std::{marker::PhantomData, mem}; use crate::{ hir_def::{ - attr, scope_graph::ScopeId, Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, - FieldDef, FieldDefListId, FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, - FuncParamName, GenericArg, GenericArgListId, GenericParam, GenericParamListId, IdentId, - Impl, ImplTrait, ItemKind, LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, - StmtId, Struct, TopLevelMod, Trait, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, - Use, UseAlias, UsePathId, UsePathSegment, VariantDef, VariantDefListId, VariantKind, - WhereClauseId, WherePredicate, + attr, + scope_graph::{FieldParent, ScopeId}, + Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, FieldDef, FieldDefListId, + FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, + GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, + LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, TopLevelMod, + Trait, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, + UsePathSegment, VariantDef, VariantDefListId, VariantKind, WhereClauseId, WherePredicate, }, span::{lazy_spans::*, transition::ChainRoot, SpanDowncast}, HirDb, @@ -1491,10 +1492,14 @@ pub fn walk_field_def_list( ) where V: Visitor + ?Sized, { - let parent_item = ctxt.scope().item(); + let parent = match ctxt.scope() { + ScopeId::Item(item) => FieldParent::Item(item), + ScopeId::Variant(item, idx) => FieldParent::Variant(item, idx), + _ => unreachable!(), + }; for (idx, field) in fields.data(ctxt.db).iter().enumerate() { ctxt.with_new_scoped_ctxt( - ScopeId::Field(parent_item, idx), + ScopeId::Field(parent, idx), |span| span.field_moved(idx), |ctxt| { visitor.visit_field_def(ctxt, field); From e6af5817cc14450ba5cb4bf2eceb977f0978015f Mon Sep 17 00:00:00 2001 From: Sean Billig Date: Thu, 21 Sep 2023 10:14:30 -0700 Subject: [PATCH 277/678] Visitor support for enum record variants --- crates/hir/src/visitor.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index edf1c48450..c6d0b173c0 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1575,9 +1575,11 @@ pub fn walk_variant_def( |span| span.tuple_type_moved(), |ctxt| visitor.visit_tuple_type(ctxt, t), ), - VariantKind::Record(_) => { - todo!() - } + + VariantKind::Record(fields) => ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| visitor.visit_field_def_list(ctxt, fields), + ), } } From 5e69ea4052349a0a3408fadc16387605947949cf Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 7 Sep 2023 14:11:50 +0200 Subject: [PATCH 278/678] Define `Ty` for inference engine --- Cargo.lock | 1 + crates/hir-analysis/Cargo.toml | 1 + crates/hir-analysis/src/infer/mod.rs | 1 + crates/hir-analysis/src/infer/ty.rs | 149 +++++++++++++++++++++++++++ crates/hir-analysis/src/lib.rs | 4 + crates/hir/src/hir_def/params.rs | 8 +- 6 files changed, 163 insertions(+), 1 deletion(-) create mode 100644 crates/hir-analysis/src/infer/mod.rs create mode 100644 crates/hir-analysis/src/infer/ty.rs diff --git a/Cargo.lock b/Cargo.lock index 436e7e603e..dc95a76c9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1078,6 +1078,7 @@ dependencies = [ "fe-hir", "fe-macros", "itertools", + "lazy_static", "rustc-hash", "salsa-2022", "smallvec", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 96d1144ff2..c3667e363e 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -14,6 +14,7 @@ rustc-hash = "1.1.0" either = "1.8" derive_more = "0.99" itertools = "0.10" +lazy_static = "1.4" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir-analysis/src/infer/mod.rs b/crates/hir-analysis/src/infer/mod.rs new file mode 100644 index 0000000000..87caf6050e --- /dev/null +++ b/crates/hir-analysis/src/infer/mod.rs @@ -0,0 +1 @@ +pub mod ty; diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs new file mode 100644 index 0000000000..94576cae78 --- /dev/null +++ b/crates/hir-analysis/src/infer/ty.rs @@ -0,0 +1,149 @@ +use hir::hir_def::{Contract, Enum, Struct}; + +use crate::HirAnalysisDb; + +#[salsa::interned] +pub struct Ty { + data: TyData, +} + +impl Ty { + pub fn kind<'db>(self, db: &'db dyn HirAnalysisDb) -> &'db Kind { + ty_kind(db, self) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyData { + /// Type variables. + TyVar(TyVar), + + // Type application, e.g., `Option` is represented as `TApp(TyConst(Option), + // TyConst(i32)`. + TApp(Box, Box), + + TyConst(TyConst), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Kind { + /// Represents monotypes, `*`. + Star, + + /// Represents higher order types. + /// e.g., + /// `* -> *` or `(* -> *) -> *` + Abs(Box, Box), +} + +impl Kind { + fn abs(lhs: Kind, rhs: Kind) -> Self { + Kind::Abs(Box::new(lhs), Box::new(rhs)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyVar { + id: u32, + kind: Kind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyConst { + Primitive(PrimTy), + Abs, + Adt(AdtTy), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum PrimTy { + Bool, + U8, + U16, + U32, + U64, + U128, + U256, + I8, + I16, + I32, + I64, + I128, + I256, + String, + Array, + Tuple(usize), + Ptr, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AdtTy { + id: AdtId, + kind: Kind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum AdtId { + Enum(Enum), + Struct(Struct), + Contract(Contract), +} + +pub(super) trait HasKind { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind; +} + +impl HasKind for TyData { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + match self { + TyData::TyVar(ty_var) => ty_var.kind(db), + TyData::TApp(lhs, _) => match lhs.kind(db) { + Kind::Abs(_, rhs) => *rhs, + _ => unreachable!(), + }, + TyData::TyConst(ty_const) => ty_const.kind(db), + } + } +} + +impl HasKind for TyVar { + fn kind(&self, _db: &dyn HirAnalysisDb) -> Kind { + self.kind.clone() + } +} + +impl HasKind for TyConst { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + match self { + TyConst::Primitive(prim) => prim.kind(db), + TyConst::Abs => Kind::abs(Kind::Star, Kind::abs(Kind::Star, Kind::Star)), + TyConst::Adt(adt) => adt.kind(db), + } + } +} + +impl HasKind for PrimTy { + fn kind(&self, _: &dyn HirAnalysisDb) -> Kind { + match self { + Self::Array => (0..2) + .into_iter() + .fold(Kind::Star, |acc, _| Kind::abs(Kind::Star, acc)), + Self::Tuple(n) => (0..*n) + .into_iter() + .fold(Kind::Star, |acc, _| Kind::abs(Kind::Star, acc)), + Self::Ptr => Kind::abs(Kind::Star, Kind::Star), + _ => Kind::Star, + } + } +} + +impl HasKind for AdtTy { + fn kind(&self, _: &dyn HirAnalysisDb) -> Kind { + self.kind.clone() + } +} + +#[salsa::tracked(return_ref)] +pub fn ty_kind(db: &dyn HirAnalysisDb, ty: Ty) -> Kind { + ty.data(db).kind(db) +} diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 7c809f8e62..0e50f71d99 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -7,6 +7,9 @@ pub struct Jar( name_resolution::resolve_imports, name_resolution::diagnostics::NameResolutionDiagAccumulator, name_resolution::diagnostics::ImportResolutionDiagAccumulator, + /// Type inference. + infer::ty::Ty, + infer::ty::ty_kind, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { @@ -16,6 +19,7 @@ pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { } impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + HirDb {} +pub mod infer; pub mod name_resolution; #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 96ab9b2eb7..3f6069d235 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -1,4 +1,4 @@ -use crate::hir_def::TypeId; +use crate::{hir_def::TypeId, HirDb}; use super::{Body, IdentId, Partial, PathId}; @@ -14,6 +14,12 @@ pub struct GenericParamListId { pub data: Vec, } +impl GenericParamListId { + pub fn len(&self, db: &dyn HirDb) -> usize { + self.data(db).len() + } +} + #[salsa::interned] pub struct FuncParamListId { #[return_ref] From 70771064dbacdf5b53d31efde70aa80174c1307d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 7 Sep 2023 19:53:08 +0200 Subject: [PATCH 279/678] Define lowered trait representation for trait solving --- crates/hir-analysis/src/infer/mod.rs | 1 + crates/hir-analysis/src/infer/trait_.rs | 41 +++++++++++++++++++++++++ crates/hir-analysis/src/infer/ty.rs | 6 ++-- crates/hir-analysis/src/lib.rs | 4 ++- 4 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 crates/hir-analysis/src/infer/trait_.rs diff --git a/crates/hir-analysis/src/infer/mod.rs b/crates/hir-analysis/src/infer/mod.rs index 87caf6050e..d6606e3e16 100644 --- a/crates/hir-analysis/src/infer/mod.rs +++ b/crates/hir-analysis/src/infer/mod.rs @@ -1 +1,2 @@ +pub mod trait_; pub mod ty; diff --git a/crates/hir-analysis/src/infer/trait_.rs b/crates/hir-analysis/src/infer/trait_.rs new file mode 100644 index 0000000000..e7e4277b36 --- /dev/null +++ b/crates/hir-analysis/src/infer/trait_.rs @@ -0,0 +1,41 @@ +/// This module contains the logic for solving trait bounds. +use hir::hir_def::{Func, Trait}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::ty::{TyId, TyVar}; + +/// `Ty` implements `Trait` with the given type arguments. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Predicate { + pub trait_: TraitInstId, + pub ty: TyId, + pub trait_args: Vec, +} + +/// T is satisfied under the given predicates. +/// i.e., `predicates => T` +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Qualified { + predicates: Vec, + t: T, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TraitImplTable { + pub impls: FxHashMap>, + pub method_table: FxHashMap>, +} + +/// Represents an instantiated trait which is implemented to types. +#[salsa::interned] +pub struct TraitInstId { + pub trait_: Trait, + pub args: Vec, + pub super_traits: Vec, +} + +pub struct TraitDef { + pub trait_: Trait, + pub args: Vec>, + pub super_traits: Vec, +} diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs index 94576cae78..07a2531ac1 100644 --- a/crates/hir-analysis/src/infer/ty.rs +++ b/crates/hir-analysis/src/infer/ty.rs @@ -3,11 +3,11 @@ use hir::hir_def::{Contract, Enum, Struct}; use crate::HirAnalysisDb; #[salsa::interned] -pub struct Ty { +pub struct TyId { data: TyData, } -impl Ty { +impl TyId { pub fn kind<'db>(self, db: &'db dyn HirAnalysisDb) -> &'db Kind { ty_kind(db, self) } @@ -144,6 +144,6 @@ impl HasKind for AdtTy { } #[salsa::tracked(return_ref)] -pub fn ty_kind(db: &dyn HirAnalysisDb, ty: Ty) -> Kind { +pub fn ty_kind(db: &dyn HirAnalysisDb, ty: TyId) -> Kind { ty.data(db).kind(db) } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 0e50f71d99..973f9cf162 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -8,8 +8,10 @@ pub struct Jar( name_resolution::diagnostics::NameResolutionDiagAccumulator, name_resolution::diagnostics::ImportResolutionDiagAccumulator, /// Type inference. - infer::ty::Ty, + infer::ty::TyId, infer::ty::ty_kind, + // Trait resolution. + infer::trait_::TraitInstId, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { From a5da0383448c35c6a3af3fcc68bd86f6d0e0d633 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 8 Sep 2023 12:06:16 +0200 Subject: [PATCH 280/678] Add `predicates` to trait/adt definitions --- crates/hir-analysis/src/infer/trait_.rs | 22 +++++----- crates/hir-analysis/src/infer/ty.rs | 55 ++++++++++++++++++------- crates/hir-analysis/src/lib.rs | 2 + 3 files changed, 55 insertions(+), 24 deletions(-) diff --git a/crates/hir-analysis/src/infer/trait_.rs b/crates/hir-analysis/src/infer/trait_.rs index e7e4277b36..9e6afe8563 100644 --- a/crates/hir-analysis/src/infer/trait_.rs +++ b/crates/hir-analysis/src/infer/trait_.rs @@ -2,40 +2,42 @@ use hir::hir_def::{Func, Trait}; use rustc_hash::{FxHashMap, FxHashSet}; -use super::ty::{TyId, TyVar}; +use super::ty::TyId; /// `Ty` implements `Trait` with the given type arguments. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct Predicate { pub trait_: TraitInstId, pub ty: TyId, pub trait_args: Vec, } -/// T is satisfied under the given predicates. -/// i.e., `predicates => T` +/// Represents an each type which implements a trait. +/// Whenever `predicates` are satisfied, `impl_` is satisfied. #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Qualified { +pub struct Implementor { predicates: Vec, - t: T, + impl_: Predicate, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct TraitImplTable { - pub impls: FxHashMap>, + pub impls: FxHashMap, pub method_table: FxHashMap>, } /// Represents an instantiated trait which is implemented to types. #[salsa::interned] pub struct TraitInstId { - pub trait_: Trait, - pub args: Vec, + pub trait_: TraitDef, + pub substs: Vec, pub super_traits: Vec, } +#[salsa::tracked] pub struct TraitDef { pub trait_: Trait, - pub args: Vec>, + pub args: TyId, + pub predicates: Vec, pub super_traits: Vec, } diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs index 07a2531ac1..aa8c5d1e1e 100644 --- a/crates/hir-analysis/src/infer/ty.rs +++ b/crates/hir-analysis/src/infer/ty.rs @@ -2,6 +2,8 @@ use hir::hir_def::{Contract, Enum, Struct}; use crate::HirAnalysisDb; +use super::trait_::Predicate; + #[salsa::interned] pub struct TyId { data: TyData, @@ -15,14 +17,22 @@ impl TyId { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyData { - /// Type variables. + /// Type variable. TyVar(TyVar), + /// Type Parameter. + TyParam(TyParam), + + /// Dependent type, e.g., [T; N: usize] + DependentTy(TyVar, TyId), + // Type application, e.g., `Option` is represented as `TApp(TyConst(Option), // TyConst(i32)`. - TApp(Box, Box), + TApp(Box, Box), TyConst(TyConst), + + Invalid, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -48,11 +58,17 @@ pub struct TyVar { kind: Kind, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyParam { + name: usize, + kind: Kind, +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyConst { Primitive(PrimTy), Abs, - Adt(AdtTy), + Adt(AdtDef), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -76,13 +92,7 @@ pub enum PrimTy { Ptr, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AdtTy { - id: AdtId, - kind: Kind, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] pub enum AdtId { Enum(Enum), Struct(Struct), @@ -97,11 +107,14 @@ impl HasKind for TyData { fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { match self { TyData::TyVar(ty_var) => ty_var.kind(db), + TyData::TyParam(ty_param) => ty_param.kind.clone(), + TyData::DependentTy(_, _) => Kind::Star, TyData::TApp(lhs, _) => match lhs.kind(db) { - Kind::Abs(_, rhs) => *rhs, + Kind::Abs(_, rhs) => *rhs.clone(), _ => unreachable!(), }, TyData::TyConst(ty_const) => ty_const.kind(db), + TyData::Invalid => Kind::Star, } } } @@ -137,9 +150,14 @@ impl HasKind for PrimTy { } } -impl HasKind for AdtTy { - fn kind(&self, _: &dyn HirAnalysisDb) -> Kind { - self.kind.clone() +impl HasKind for AdtDef { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + let mut kind = Kind::Star; + for param in self.params(db).iter().rev() { + kind = Kind::abs(ty_kind(db, *param).clone(), kind); + } + + kind } } @@ -147,3 +165,12 @@ impl HasKind for AdtTy { pub fn ty_kind(db: &dyn HirAnalysisDb, ty: TyId) -> Kind { ty.data(db).kind(db) } + +#[salsa::tracked] +pub struct AdtDef { + pub adt: AdtId, + #[return_ref] + pub params: Vec, + #[return_ref] + predicates: Vec, +} diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 973f9cf162..dfe637d1eb 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -10,7 +10,9 @@ pub struct Jar( /// Type inference. infer::ty::TyId, infer::ty::ty_kind, + infer::ty::AdtDef, // Trait resolution. + infer::trait_::TraitDef, infer::trait_::TraitInstId, ); From fde1704432f734b0467b2a67c65e11c6c2b6458e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 8 Sep 2023 12:20:03 +0200 Subject: [PATCH 281/678] Define `AdtVariant` --- crates/hir-analysis/src/infer/ty.rs | 37 ++++++++++++++++++----------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs index aa8c5d1e1e..c45afdcce8 100644 --- a/crates/hir-analysis/src/infer/ty.rs +++ b/crates/hir-analysis/src/infer/ty.rs @@ -15,6 +15,29 @@ impl TyId { } } +#[salsa::tracked] +pub struct AdtDef { + pub adt: AdtId, + #[return_ref] + pub params: Vec, + pub variants: Vec, + #[return_ref] + predicates: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AdtVariant { + ty: TyId, + /// Fields of the variant. + /// If the parent is an struct, the length of the vector is always 1. + fields: Vec, +} + +#[salsa::tracked(return_ref)] +pub fn ty_kind(db: &dyn HirAnalysisDb, ty: TyId) -> Kind { + ty.data(db).kind(db) +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyData { /// Type variable. @@ -160,17 +183,3 @@ impl HasKind for AdtDef { kind } } - -#[salsa::tracked(return_ref)] -pub fn ty_kind(db: &dyn HirAnalysisDb, ty: TyId) -> Kind { - ty.data(db).kind(db) -} - -#[salsa::tracked] -pub struct AdtDef { - pub adt: AdtId, - #[return_ref] - pub params: Vec, - #[return_ref] - predicates: Vec, -} From 8c15c9b95e2cfb02b65fde998f16efe7d77fa31a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 8 Sep 2023 17:38:55 +0200 Subject: [PATCH 282/678] Add universal type, postpone the implementation of dependent types --- crates/hir-analysis/src/infer/trait_.rs | 4 +- crates/hir-analysis/src/infer/ty.rs | 50 +++++++++++++++++-------- 2 files changed, 36 insertions(+), 18 deletions(-) diff --git a/crates/hir-analysis/src/infer/trait_.rs b/crates/hir-analysis/src/infer/trait_.rs index 9e6afe8563..7f3ad46d17 100644 --- a/crates/hir-analysis/src/infer/trait_.rs +++ b/crates/hir-analysis/src/infer/trait_.rs @@ -31,13 +31,11 @@ pub struct TraitImplTable { pub struct TraitInstId { pub trait_: TraitDef, pub substs: Vec, - pub super_traits: Vec, } #[salsa::tracked] pub struct TraitDef { pub trait_: Trait, - pub args: TyId, - pub predicates: Vec, + pub args: Vec, pub super_traits: Vec, } diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs index c45afdcce8..a314d645c8 100644 --- a/crates/hir-analysis/src/infer/ty.rs +++ b/crates/hir-analysis/src/infer/ty.rs @@ -1,9 +1,7 @@ -use hir::hir_def::{Contract, Enum, Struct}; +use hir::hir_def::{Contract, Enum, IdentId, Struct}; use crate::HirAnalysisDb; -use super::trait_::Predicate; - #[salsa::interned] pub struct TyId { data: TyData, @@ -21,8 +19,6 @@ pub struct AdtDef { #[return_ref] pub params: Vec, pub variants: Vec, - #[return_ref] - predicates: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -46,15 +42,21 @@ pub enum TyData { /// Type Parameter. TyParam(TyParam), - /// Dependent type, e.g., [T; N: usize] - DependentTy(TyVar, TyId), + TyAll(TyAll), - // Type application, e.g., `Option` is represented as `TApp(TyConst(Option), - // TyConst(i32)`. - TApp(Box, Box), + // Type application, + // e.g.,`TApp(TyConst(Option), TyConst(i32))`. + TApp(TyId, TyId), TyConst(TyConst), + // TODO: DependentTy, + // TermTy(TermTy) + // DependentTyAll(TyAll, TyConst), + // DependentTyParam(TyVar, TyConst), + + // Invalid type which means the type is not defined. + // This type can be unified with any other types. Invalid, } @@ -67,6 +69,9 @@ pub enum Kind { /// e.g., /// `* -> *` or `(* -> *) -> *` Abs(Box, Box), + + /// `Any` kind is set to the type iff the type is `Invalid`. + Any, } impl Kind { @@ -83,7 +88,19 @@ pub struct TyVar { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TyParam { - name: usize, + name: IdentId, + kind: Kind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyAll { + index: usize, + kind: Kind, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TAll { + idx: usize, kind: Kind, } @@ -131,13 +148,16 @@ impl HasKind for TyData { match self { TyData::TyVar(ty_var) => ty_var.kind(db), TyData::TyParam(ty_param) => ty_param.kind.clone(), - TyData::DependentTy(_, _) => Kind::Star, - TyData::TApp(lhs, _) => match lhs.kind(db) { - Kind::Abs(_, rhs) => *rhs.clone(), + TyData::TyAll(ty_all) => ty_all.kind.clone(), + TyData::TApp(lhs, rhs) => match lhs.kind(db) { + Kind::Abs(k_arg, k_ret) => { + debug_assert!(rhs.kind(db) == k_arg.as_ref()); + k_ret.as_ref().clone() + } _ => unreachable!(), }, TyData::TyConst(ty_const) => ty_const.kind(db), - TyData::Invalid => Kind::Star, + TyData::Invalid => Kind::Any, } } } From 2bd8e66fd28438148d90668add4ed30dc36ba7a8 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 22:09:40 +0200 Subject: [PATCH 283/678] Implement type lowering and ADT lowering --- Cargo.lock | 1 - crates/hir-analysis/Cargo.toml | 1 - crates/hir-analysis/src/infer/diagnostics.rs | 31 ++ crates/hir-analysis/src/infer/lower.rs | 398 ++++++++++++++++++ crates/hir-analysis/src/infer/mod.rs | 2 + crates/hir-analysis/src/infer/ty.rs | 142 +++++-- crates/hir-analysis/src/lib.rs | 9 + .../hir-analysis/src/name_resolution/mod.rs | 2 +- crates/hir/src/hir_def/types.rs | 1 + crates/hir/src/span/types.rs | 2 +- crates/hir/src/visitor.rs | 4 +- 11 files changed, 551 insertions(+), 42 deletions(-) create mode 100644 crates/hir-analysis/src/infer/diagnostics.rs create mode 100644 crates/hir-analysis/src/infer/lower.rs diff --git a/Cargo.lock b/Cargo.lock index dc95a76c9f..436e7e603e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1078,7 +1078,6 @@ dependencies = [ "fe-hir", "fe-macros", "itertools", - "lazy_static", "rustc-hash", "salsa-2022", "smallvec", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index c3667e363e..96d1144ff2 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -14,7 +14,6 @@ rustc-hash = "1.1.0" either = "1.8" derive_more = "0.99" itertools = "0.10" -lazy_static = "1.4" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir-analysis/src/infer/diagnostics.rs b/crates/hir-analysis/src/infer/diagnostics.rs new file mode 100644 index 0000000000..479456a777 --- /dev/null +++ b/crates/hir-analysis/src/infer/diagnostics.rs @@ -0,0 +1,31 @@ +use hir::span::DynLazySpan; + +#[salsa::accumulator] +pub struct StructDefDiagAccumulator(pub(super) TyLowerDiag); +#[salsa::accumulator] +pub struct EnumDefDiagAccumulator(pub(super) TyLowerDiag); +#[salsa::accumulator] +pub struct ContractDefDiagAccumulator(pub(super) TyLowerDiag); +#[salsa::accumulator] +pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyLowerDiag { + AssocTy(DynLazySpan), + InvalidType(DynLazySpan), + NotFullyAppliedType(DynLazySpan), +} + +impl TyLowerDiag { + pub(super) fn assoc_ty(span: impl Into) -> Self { + Self::AssocTy(span.into()) + } + + pub(super) fn invalid_type(span: impl Into) -> Self { + Self::InvalidType(span.into()) + } + + pub fn not_fully_applied_type(span: impl Into) -> Self { + Self::NotFullyAppliedType(span.into()) + } +} diff --git a/crates/hir-analysis/src/infer/lower.rs b/crates/hir-analysis/src/infer/lower.rs new file mode 100644 index 0000000000..e404174305 --- /dev/null +++ b/crates/hir-analysis/src/infer/lower.rs @@ -0,0 +1,398 @@ +use either::Either; +use hir::{ + hir_def::{ + kw, scope_graph::ScopeId, Contract, Enum, FieldDefListId, GenericArg, GenericArgListId, + GenericParam, ItemKind, Partial, PathId, Struct, TypeAlias as HirTypeAlias, + TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, + }, + span::{types::LazyTySpan, DynLazySpan}, + visitor::prelude::{ + LazyFieldDefListSpan, LazyGenericArgSpan, LazyPathTypeSpan, LazyPtrTypeSpan, + LazyTupleTypeSpan, LazyVariantDefListSpan, + }, +}; + +use crate::{ + infer::diagnostics::{ + ContractDefDiagAccumulator, EnumDefDiagAccumulator, StructDefDiagAccumulator, + }, + name_resolution::{ + resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, + }, + HirAnalysisDb, +}; + +use super::{ + diagnostics::TyLowerDiag, + ty::{AdtDef, AdtId, AdtVariant, Kind, TyData, TyId, TyParam}, +}; + +#[salsa::tracked] +pub fn lower_struct(db: &dyn HirAnalysisDb, struct_: Struct) -> TyId { + let (ty, diags) = AdtTyBuilder::new(db, struct_.into()).build(); + for diag in diags { + StructDefDiagAccumulator::push(db, diag) + } + ty +} + +#[salsa::tracked] +pub fn lower_enum(db: &dyn HirAnalysisDb, enum_: Enum) -> TyId { + let (ty, diags) = AdtTyBuilder::new(db, enum_.into()).build(); + for diag in diags { + EnumDefDiagAccumulator::push(db, diag) + } + ty +} + +#[salsa::tracked] +pub fn lower_contract(db: &dyn HirAnalysisDb, contract: Contract) -> TyId { + let (ty, diags) = AdtTyBuilder::new(db, contract.into()).build(); + for diag in diags { + ContractDefDiagAccumulator::push(db, diag) + } + ty +} + +#[salsa::tracked] +pub fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlias { + todo!() +} + +/// Represents a lowered type alias. `TyAlias` itself isn't a type, but +/// can be instantiated to a `TyId` by substituting its type +/// parameters with actual types. +/// +/// NOTE: `TyAlias` can't become an alias to partial applied types, i.e., the +/// right hand side of the alias declaration must be a fully applied type. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TyAlias { + alias_to: TyId, + params: Vec, +} + +impl TyAlias { + fn subst_with(&self, _db: &dyn HirAnalysisDb, _substs: &[TyId]) -> TyId { + todo!() + } +} + +pub(crate) struct TyBuilder<'db> { + db: &'db dyn HirAnalysisDb, + scope: ScopeId, + diags: Vec, +} + +impl<'db> TyBuilder<'db> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, scope: ScopeId) -> Self { + Self { + db, + scope, + diags: Vec::new(), + } + } + + pub(super) fn lower_ty(&mut self, ty: HirTyId, span: LazyTySpan) -> TyId { + match ty.data(self.db.as_hir_db()) { + HirTyKind::Ptr(pointee) => self.lower_ptr(*pointee, span.into_ptr_type()), + + HirTyKind::Path(path, args) => self.lower_path(*path, *args, span.into_path_type()), + + HirTyKind::SelfType => self.lower_self_ty(span), + + HirTyKind::Tuple(elems) => self.lower_tuple(elems, span.into_tuple_type()), + + HirTyKind::Array(_, _) => { + todo!() + } + } + } + + pub(super) fn lower_path( + &mut self, + path: Partial, + args: GenericArgListId, + span: LazyPathTypeSpan, + ) -> TyId { + let path_ty = path + .to_opt() + .map(|path| { + let res = resolve_path_early(self.db, path, self.scope); + self.lower_resolved_path(&res, span.path().into()) + }) + .unwrap_or_else(|| Either::Left(TyId::invalid(self.db))); + + let generic_arg_span = span.generic_args(); + + let arg_tys: Vec<_> = args + .data(self.db.as_hir_db()) + .iter() + .enumerate() + .map(|(idx, arg)| self.lower_generic_arg(arg, generic_arg_span.arg(idx))) + .collect(); + + match path_ty { + Either::Left(ty) => arg_tys + .into_iter() + .fold(ty, |acc, arg| TyId::apply(self.db, acc, arg)), + + Either::Right(alias) => alias.subst_with(self.db, &arg_tys), + } + } + + pub(super) fn lower_self_ty(&mut self, span: LazyTySpan) -> TyId { + let res = resolve_segments_early(self.db, &[Partial::Present(kw::SELF_TY)], self.scope); + self.lower_resolved_path(&res, span.into()).unwrap_left() + } + + fn lower_ptr(&mut self, pointee: Partial, span: LazyPtrTypeSpan) -> TyId { + let pointee = pointee + .to_opt() + .map(|pointee| self.lower_ty(pointee, span.pointee())) + .unwrap_or_else(|| TyId::invalid(self.db)); + + let ptr = TyId::ptr(self.db); + TyId::apply(self.db, ptr, pointee) + } + + fn lower_tuple(&mut self, elems: &[Partial], span: LazyTupleTypeSpan) -> TyId { + let len = elems.len(); + let tuple = TyId::tuple(self.db, len); + elems.iter().enumerate().fold(tuple, |acc, (idx, elem)| { + let elem = elem + .to_opt() + .map(|elem| self.lower_ty(elem, span.elem_ty(idx))) + .unwrap_or_else(|| TyId::invalid(self.db)); + + TyId::apply(self.db, acc, elem) + }) + } + + fn lower_resolved_path( + &mut self, + path: &EarlyResolvedPath, + span: DynLazySpan, + ) -> Either { + let res = match path { + EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { + Ok(res) => res, + + // This error is already handled by the name resolution. + Err(_) => return Either::Left(TyId::invalid(self.db)), + }, + + EarlyResolvedPath::Partial { .. } => { + // TODO: Fix here when we add an associated type. + self.diags.push(TyLowerDiag::assoc_ty(span)); + return Either::Left(TyId::invalid(self.db)); + } + }; + + let scope = match res.kind { + NameResKind::Scope(scope) => scope, + NameResKind::Prim(prim_ty) => { + return Either::Left(TyId::from_hir_prim_ty(self.db, prim_ty)) + } + }; + + let item = match scope { + ScopeId::Item(item) => item, + ScopeId::GenericParam(item, idx) => { + return Either::Left(lower_generic_param(self.db, item, idx)); + } + _ => unreachable!(), + }; + + match item { + ItemKind::Enum(enum_) => Either::Left(lower_enum(self.db, enum_)), + ItemKind::Struct(struct_) => Either::Left(lower_struct(self.db, struct_)), + ItemKind::Contract(contract) => Either::Left(lower_contract(self.db, contract)), + ItemKind::TypeAlias(alias) => Either::Right(lower_type_alias(self.db, alias)), + _ => { + self.diags.push(TyLowerDiag::invalid_type(span)); + Either::Left(TyId::invalid(self.db)) + } + } + } + + pub(super) fn lower_generic_arg(&mut self, arg: &GenericArg, span: LazyGenericArgSpan) -> TyId { + match arg { + GenericArg::Type(ty_arg) => ty_arg + .ty + .to_opt() + .map(|ty| self.lower_ty(ty, span.into_type_arg().ty())) + .unwrap_or_else(|| TyId::invalid(self.db)), + + GenericArg::Const(_) => todo!(), + } + } +} + +struct AdtTyBuilder<'db> { + db: &'db dyn HirAnalysisDb, + adt: AdtId, + params: Vec, + variants: Vec, + diags: Vec, +} + +impl<'db> AdtTyBuilder<'db> { + fn new(db: &'db dyn HirAnalysisDb, adt: AdtId) -> Self { + Self { + db, + adt, + params: Vec::new(), + variants: Vec::new(), + diags: Vec::new(), + } + } + + fn build(mut self) -> (TyId, Vec) { + self.collect_params(); + self.collect_variants(); + + let adt_def = AdtDef::new(self.db, self.adt, self.params, self.variants); + (TyId::adt(self.db, adt_def), self.diags) + } + + fn collect_params(&mut self) { + let hir_db = self.db.as_hir_db(); + let params = match self.adt { + AdtId::Struct(struct_) => struct_.generic_params(hir_db), + AdtId::Enum(enum_) => enum_.generic_params(hir_db), + AdtId::Contract(_) => return, + }; + + for idx in 0..params.len(hir_db) { + let param = lower_generic_param(self.db, self.adt.into(), idx); + self.params.push(param); + } + for idx in 0..params.data(hir_db).len() { + let param_ty = lower_generic_param(self.db, self.adt.into(), idx); + self.params.push(param_ty); + } + } + + fn collect_variants(&mut self) { + match self.adt { + AdtId::Struct(struct_) => { + let span = struct_.lazy_span(); + self.collect_field_types(struct_.fields(self.db.as_hir_db()), span.fields()); + } + + AdtId::Contract(contract) => { + let span = contract.lazy_span(); + self.collect_field_types(contract.fields(self.db.as_hir_db()), span.fields()) + } + + AdtId::Enum(enum_) => { + let span = enum_.lazy_span(); + self.collect_enum_variant_types( + enum_.variants(self.db.as_hir_db()), + span.variants(), + ) + } + }; + } + + fn collect_field_types(&mut self, fields: FieldDefListId, span: LazyFieldDefListSpan) { + fields + .data(self.db.as_hir_db()) + .iter() + .enumerate() + .for_each(|(i, field)| { + let ty = match field.ty.to_opt() { + Some(ty) => { + let mut builder = TyBuilder::new(self.db, self.adt.scope()); + let ty_span = span.field(i).ty(); + + let ty = builder.lower_ty(ty, ty_span.clone()); + let ty = self.verify_fully_applied_type(ty, ty_span.into()); + + self.diags.extend(builder.diags); + ty + } + + None => TyId::invalid(self.db), + }; + + let variant = AdtVariant { + name: field.name, + tys: vec![ty], + }; + self.variants.push(variant); + }) + } + + fn collect_enum_variant_types( + &mut self, + variants: VariantDefListId, + span: LazyVariantDefListSpan, + ) { + variants + .data(self.db.as_hir_db()) + .iter() + .enumerate() + .for_each(|(i, variant)| { + let tys = match variant.ty { + Some(ty) => { + let mut builder = TyBuilder::new(self.db, self.adt.scope()); + let ty_span = span.variant(i).ty(); + + let ty = builder.lower_ty(ty, ty_span.clone()); + let ty = self.verify_fully_applied_type(ty, ty_span.into()); + + self.diags.extend(builder.diags); + vec![ty] + } + + None => vec![], + }; + + let variant = AdtVariant { + name: variant.name, + tys, + }; + self.variants.push(variant) + }) + } + + /// Verifies that the type is fully applied type. + /// If the `ty` is not a fully applied type, error diagnostics are + /// accumulated and returns `TyId::invalid()`, otherwise returns given `ty`. + fn verify_fully_applied_type(&mut self, ty: TyId, span: DynLazySpan) -> TyId { + if ty.is_mono_type(self.db) { + ty + } else { + self.diags.push(TyLowerDiag::not_fully_applied_type(span)); + TyId::invalid(self.db) + } + } +} + +fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> TyId { + let params = match item { + ItemKind::Struct(struct_) => struct_.generic_params(db.as_hir_db()), + ItemKind::Enum(enum_) => enum_.generic_params(db.as_hir_db()), + _ => unreachable!(), + }; + + let param = ¶ms.data(db.as_hir_db())[idx]; + match param { + GenericParam::Type(param) => { + if let Some(name) = param.name.to_opt() { + let ty_param = TyParam { + name, + idx, + kind: Kind::Star, + }; + TyId::new(db, TyData::TyParam(ty_param)) + } else { + TyId::new(db, TyData::Invalid) + } + } + GenericParam::Const(_) => { + todo!() + } + } +} diff --git a/crates/hir-analysis/src/infer/mod.rs b/crates/hir-analysis/src/infer/mod.rs index d6606e3e16..22b2418935 100644 --- a/crates/hir-analysis/src/infer/mod.rs +++ b/crates/hir-analysis/src/infer/mod.rs @@ -1,2 +1,4 @@ +pub mod diagnostics; +pub mod lower; pub mod trait_; pub mod ty; diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/infer/ty.rs index a314d645c8..1e1e299788 100644 --- a/crates/hir-analysis/src/infer/ty.rs +++ b/crates/hir-analysis/src/infer/ty.rs @@ -1,4 +1,8 @@ -use hir::hir_def::{Contract, Enum, IdentId, Struct}; +use hir::hir_def::{ + prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, + scope_graph::ScopeId, + Contract, Enum, IdentId, ItemKind, Partial, Struct, +}; use crate::HirAnalysisDb; @@ -11,6 +15,59 @@ impl TyId { pub fn kind<'db>(self, db: &'db dyn HirAnalysisDb) -> &'db Kind { ty_kind(db, self) } + + pub(super) fn ptr(db: &dyn HirAnalysisDb) -> Self { + Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::Ptr))) + } + + pub(super) fn tuple(db: &dyn HirAnalysisDb, n: usize) -> Self { + Self::new(db, TyData::TyCon(TyConcrete::tuple(n))) + } + + pub(super) fn adt(db: &dyn HirAnalysisDb, adt: AdtDef) -> Self { + Self::new(db, TyData::TyCon(TyConcrete::Adt(adt))) + } + + pub(super) fn apply(db: &dyn HirAnalysisDb, ty: Self, arg: Self) -> Self { + Self::new(db, TyData::TyApp(ty, arg)) + } + + pub(super) fn invalid(db: &dyn HirAnalysisDb) -> Self { + Self::new(db, TyData::Invalid) + } + + pub(super) fn from_hir_prim_ty(db: &dyn HirAnalysisDb, hir_prim: HirPrimTy) -> Self { + match hir_prim { + HirPrimTy::Bool => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::Bool))), + + HirPrimTy::Int(int_ty) => match int_ty { + HirIntTy::I8 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I8))), + HirIntTy::I16 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I16))), + HirIntTy::I32 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I32))), + HirIntTy::I64 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I64))), + HirIntTy::I128 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I128))), + HirIntTy::I256 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::I256))), + }, + + HirPrimTy::Uint(uint_ty) => match uint_ty { + HirUintTy::U8 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U8))), + HirUintTy::U16 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U16))), + HirUintTy::U32 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U32))), + HirUintTy::U64 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U64))), + HirUintTy::U128 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U128))), + HirUintTy::U256 => Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::U256))), + }, + } + } + + /// Returns true if the type is declared as a monotype or fully applied + /// type. + pub(super) fn is_mono_type(self, db: &dyn HirAnalysisDb) -> bool { + match self.kind(db) { + Kind::Abs(_, _) => false, + _ => true, + } + } } #[salsa::tracked] @@ -23,10 +80,11 @@ pub struct AdtDef { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AdtVariant { - ty: TyId, + pub name: Partial, /// Fields of the variant. - /// If the parent is an struct, the length of the vector is always 1. - fields: Vec, + /// If the adt is an struct or contract, the length of the vector is always + /// 1. + pub tys: Vec, } #[salsa::tracked(return_ref)] @@ -42,18 +100,16 @@ pub enum TyData { /// Type Parameter. TyParam(TyParam), - TyAll(TyAll), - // Type application, // e.g.,`TApp(TyConst(Option), TyConst(i32))`. - TApp(TyId, TyId), + TyApp(TyId, TyId), - TyConst(TyConst), + TyCon(TyConcrete), // TODO: DependentTy, // TermTy(TermTy) - // DependentTyAll(TyAll, TyConst), - // DependentTyParam(TyVar, TyConst), + // DependentTyParam(TyParam, TyConst), + // DependentTyVar(TyVar, TyConst), // Invalid type which means the type is not defined. // This type can be unified with any other types. @@ -82,35 +138,30 @@ impl Kind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TyVar { - id: u32, - kind: Kind, + pub id: u32, + pub kind: Kind, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TyParam { - name: IdentId, - kind: Kind, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TyAll { - index: usize, - kind: Kind, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TAll { - idx: usize, - kind: Kind, + pub name: IdentId, + pub idx: usize, + pub kind: Kind, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TyConst { - Primitive(PrimTy), +pub enum TyConcrete { + Prim(PrimTy), Abs, Adt(AdtDef), } +impl TyConcrete { + pub(super) fn tuple(n: usize) -> Self { + Self::Prim(PrimTy::Tuple(n)) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum PrimTy { Bool, @@ -132,13 +183,33 @@ pub enum PrimTy { Ptr, } -#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] pub enum AdtId { Enum(Enum), Struct(Struct), Contract(Contract), } +impl Into for AdtId { + fn into(self) -> ItemKind { + match self { + Self::Enum(enum_) => ItemKind::Enum(enum_), + Self::Struct(struct_) => ItemKind::Struct(struct_), + Self::Contract(contract) => ItemKind::Contract(contract), + } + } +} + +impl AdtId { + pub(super) fn scope(self) -> ScopeId { + match self { + Self::Enum(enum_) => enum_.scope(), + Self::Struct(struct_) => struct_.scope(), + Self::Contract(contract_) => contract_.scope(), + } + } +} + pub(super) trait HasKind { fn kind(&self, db: &dyn HirAnalysisDb) -> Kind; } @@ -148,15 +219,14 @@ impl HasKind for TyData { match self { TyData::TyVar(ty_var) => ty_var.kind(db), TyData::TyParam(ty_param) => ty_param.kind.clone(), - TyData::TyAll(ty_all) => ty_all.kind.clone(), - TyData::TApp(lhs, rhs) => match lhs.kind(db) { + TyData::TyCon(ty_const) => ty_const.kind(db), + TyData::TyApp(lhs, rhs) => match lhs.kind(db) { Kind::Abs(k_arg, k_ret) => { debug_assert!(rhs.kind(db) == k_arg.as_ref()); k_ret.as_ref().clone() } _ => unreachable!(), }, - TyData::TyConst(ty_const) => ty_const.kind(db), TyData::Invalid => Kind::Any, } } @@ -168,12 +238,12 @@ impl HasKind for TyVar { } } -impl HasKind for TyConst { +impl HasKind for TyConcrete { fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { match self { - TyConst::Primitive(prim) => prim.kind(db), - TyConst::Abs => Kind::abs(Kind::Star, Kind::abs(Kind::Star, Kind::Star)), - TyConst::Adt(adt) => adt.kind(db), + TyConcrete::Prim(prim) => prim.kind(db), + TyConcrete::Abs => Kind::abs(Kind::Star, Kind::abs(Kind::Star, Kind::Star)), + TyConcrete::Adt(adt) => adt.kind(db), } } } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index dfe637d1eb..c9aeab9cb5 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -11,9 +11,18 @@ pub struct Jar( infer::ty::TyId, infer::ty::ty_kind, infer::ty::AdtDef, + /// Type lowering. + infer::lower::lower_enum, + infer::lower::lower_struct, + infer::lower::lower_contract, + infer::lower::lower_type_alias, // Trait resolution. infer::trait_::TraitDef, infer::trait_::TraitInstId, + infer::diagnostics::StructDefDiagAccumulator, + infer::diagnostics::EnumDefDiagAccumulator, + infer::diagnostics::TypeAliasDefDiagAccumulator, + infer::diagnostics::ContractDefDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index b963fce2b6..38b4fd270f 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -8,7 +8,7 @@ mod visibility_checker; use either::Either; pub use import_resolver::ResolvedImports; pub use name_resolver::{ - NameDerivation, NameDomain, NameQuery, NameRes, NameResBucket, QueryDirective, + NameDerivation, NameDomain, NameQuery, NameRes, NameResBucket, NameResKind, QueryDirective, }; pub use path_resolver::EarlyResolvedPath; diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index e46c880295..0152d85cab 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -2,6 +2,7 @@ use super::{Body, GenericArgListId, Partial, PathId}; #[salsa::interned] pub struct TypeId { + #[return_ref] pub data: TypeKind, } diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index b71d55f018..82163b7af3 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -46,7 +46,7 @@ define_lazy_span_node!( (star, star), } @node { - (ty, inner, LazyTySpan), + (pointee, inner, LazyTySpan), } ); diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index c6d0b173c0..68f021690a 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1626,7 +1626,7 @@ where TypeKind::Ptr(ty) => { if let Some(ty) = ty.to_opt() { ctxt.with_new_ctxt( - |ctxt| ctxt.into_ptr_type().ty(), + |ctxt| ctxt.into_ptr_type().pointee(), |ctxt| { visitor.visit_ty(ctxt, ty); }, @@ -1646,7 +1646,7 @@ where ctxt.with_new_ctxt( |span| span.generic_args_moved(), |ctxt| { - visitor.visit_generic_arg_list(ctxt, generic_args); + visitor.visit_generic_arg_list(ctxt, *generic_args); }, ); }, From 2a62684e5e7b6bc1dc5dfe10e3ae9100154f24fb Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 22:37:09 +0200 Subject: [PATCH 284/678] Add utility methods to retrieve all ADT types in a top mod --- crates/hir/src/hir_def/item.rs | 51 ++++++++++++++++++++++++++++++++++ crates/hir/src/lib.rs | 4 +++ 2 files changed, 55 insertions(+) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 34292dac04..a9afccb8e3 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -283,6 +283,57 @@ impl TopLevelMod { // Please change here if we introduce it. Visibility::Public } + + /// Returns all structs in the top level module including ones in nested + /// modules. + pub fn all_structs<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_structs_in_top_mod(db, self) + } + + /// Returns all enums in the top level module including ones in nested + /// modules. + pub fn all_enums<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_enums_in_top_mod(db, self) + } + + /// Returns all contracts in the top level module including ones in nested + /// modules. + pub fn all_contracts<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_contracts_in_top_mod(db, self) + } +} + +#[salsa::tracked(return_ref)] +pub fn all_structs_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + top_mod + .children_nested(db) + .filter_map(|item| match item { + ItemKind::Struct(struct_) => Some(struct_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_enums_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + top_mod + .children_non_nested(db) + .filter_map(|item| match item { + ItemKind::Enum(enum_) => Some(enum_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_contracts_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + top_mod + .children_non_nested(db) + .filter_map(|item| match item { + ItemKind::Contract(contract) => Some(contract), + _ => None, + }) + .collect() } #[salsa::tracked] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7a343eaaec..21fca1ecd2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -49,6 +49,10 @@ pub struct Jar( hir_def::TypeId, hir_def::TupleTypeId, hir_def::UsePathId, + /// Utility methods for analysis. + hir_def::all_structs_in_top_mod, + hir_def::all_enums_in_top_mod, + hir_def::all_contracts_in_top_mod, /// Accumulated diagnostics. ParseErrorAccumulator, /// Private tracked functions. These are not part of the public API, and From f602a520c84f1fd7879915fce001cfc818f0a0ac Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 22:48:03 +0200 Subject: [PATCH 285/678] Implement `TypeDefAnalysisPass` --- crates/common2/src/diagnostics.rs | 5 +- crates/hir-analysis/src/infer/diagnostics.rs | 67 +++++++++++++++++++- crates/hir-analysis/src/infer/mod.rs | 52 +++++++++++++++ 3 files changed, 119 insertions(+), 5 deletions(-) diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs index 564fd70a61..577ac740e4 100644 --- a/crates/common2/src/diagnostics.rs +++ b/crates/common2/src/diagnostics.rs @@ -144,7 +144,7 @@ pub enum DiagnosticPass { NameResolution, - TyCheck, + TypeDefinition, ExternalAnalysis(ExternalAnalysisKey), } @@ -154,8 +154,7 @@ impl DiagnosticPass { match self { Self::Parse => 1, Self::NameResolution => 2, - - Self::TyCheck => 3, + Self::TypeDefinition => 3, Self::ExternalAnalysis(_) => std::u16::MAX, } diff --git a/crates/hir-analysis/src/infer/diagnostics.rs b/crates/hir-analysis/src/infer/diagnostics.rs index 479456a777..7ed40404d3 100644 --- a/crates/hir-analysis/src/infer/diagnostics.rs +++ b/crates/hir-analysis/src/infer/diagnostics.rs @@ -1,4 +1,10 @@ -use hir::span::DynLazySpan; +use common::diagnostics::{ + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, +}; +use hir::{ + diagnostics::DiagnosticVoucher, + span::{DynLazySpan, LazySpan}, +}; #[salsa::accumulator] pub struct StructDefDiagAccumulator(pub(super) TyLowerDiag); @@ -11,9 +17,9 @@ pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { - AssocTy(DynLazySpan), InvalidType(DynLazySpan), NotFullyAppliedType(DynLazySpan), + AssocTy(DynLazySpan), } impl TyLowerDiag { @@ -28,4 +34,61 @@ impl TyLowerDiag { pub fn not_fully_applied_type(span: impl Into) -> Self { Self::NotFullyAppliedType(span.into()) } + + fn local_code(&self) -> u16 { + match self { + Self::InvalidType(_) => 0, + Self::NotFullyAppliedType(_) => 1, + Self::AssocTy(_) => 2, + } + } + + fn message(&self) -> String { + match self { + Self::InvalidType(_) => "expected type".to_string(), + Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), + Self::AssocTy(_) => "associated type is not supported ".to_string(), + } + } + + fn sub_diags(&self, db: &dyn hir::SpannedHirDb) -> Vec { + match self { + Self::InvalidType(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected type here".to_string(), + span.resolve(db), + )], + + Self::NotFullyAppliedType(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected fully applied type here".to_string(), + span.resolve(db), + )], + + Self::AssocTy(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "associated type is not implemented".to_string(), + span.resolve(db), + )], + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl DiagnosticVoucher for TyLowerDiag { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) + } + + fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } } diff --git a/crates/hir-analysis/src/infer/mod.rs b/crates/hir-analysis/src/infer/mod.rs index 22b2418935..238ebcf4ee 100644 --- a/crates/hir-analysis/src/infer/mod.rs +++ b/crates/hir-analysis/src/infer/mod.rs @@ -1,4 +1,56 @@ +use hir::analysis_pass::ModuleAnalysisPass; + +use crate::HirAnalysisDb; + pub mod diagnostics; pub mod lower; pub mod trait_; pub mod ty; + +pub struct TypeDefAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: hir::hir_def::TopLevelMod, + ) -> Vec> { + let mut diags = vec![]; + let hir_db = self.db.as_hir_db(); + for struct_ in top_mod.all_structs(hir_db) { + lower::lower_struct(self.db, *struct_); + diags.extend( + lower::lower_struct::accumulated::( + self.db, *struct_, + ) + .into_iter() + .map(|diag| Box::new(diag) as _), + ) + } + + for enum_ in top_mod.all_enums(hir_db) { + lower::lower_enum(self.db, *enum_); + diags.extend( + lower::lower_enum::accumulated::( + self.db, *enum_, + ) + .into_iter() + .map(|diag| Box::new(diag) as _), + ) + } + + for contract in top_mod.all_contracts(hir_db) { + lower::lower_contract(self.db, *contract); + diags.extend( + lower::lower_contract::accumulated::( + self.db, *contract, + ) + .into_iter() + .map(|diag| Box::new(diag) as _), + ) + } + + diags + } +} From 3363c5a8bb7b9d34ee1248f62716c3205e67d21a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 22:49:56 +0200 Subject: [PATCH 286/678] Rename module name from `infer` to `ty` --- crates/hir-analysis/src/lib.rs | 28 +++++++++---------- .../src/{infer => ty}/diagnostics.rs | 0 .../hir-analysis/src/{infer => ty}/lower.rs | 6 ++-- crates/hir-analysis/src/{infer => ty}/mod.rs | 0 .../hir-analysis/src/{infer => ty}/trait_.rs | 0 crates/hir-analysis/src/{infer => ty}/ty.rs | 0 6 files changed, 17 insertions(+), 17 deletions(-) rename crates/hir-analysis/src/{infer => ty}/diagnostics.rs (100%) rename crates/hir-analysis/src/{infer => ty}/lower.rs (99%) rename crates/hir-analysis/src/{infer => ty}/mod.rs (100%) rename crates/hir-analysis/src/{infer => ty}/trait_.rs (100%) rename crates/hir-analysis/src/{infer => ty}/ty.rs (100%) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index c9aeab9cb5..d01b51e2c3 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -8,21 +8,21 @@ pub struct Jar( name_resolution::diagnostics::NameResolutionDiagAccumulator, name_resolution::diagnostics::ImportResolutionDiagAccumulator, /// Type inference. - infer::ty::TyId, - infer::ty::ty_kind, - infer::ty::AdtDef, + ty::ty::TyId, + ty::ty::ty_kind, + ty::ty::AdtDef, /// Type lowering. - infer::lower::lower_enum, - infer::lower::lower_struct, - infer::lower::lower_contract, - infer::lower::lower_type_alias, + ty::lower::lower_enum, + ty::lower::lower_struct, + ty::lower::lower_contract, + ty::lower::lower_type_alias, // Trait resolution. - infer::trait_::TraitDef, - infer::trait_::TraitInstId, - infer::diagnostics::StructDefDiagAccumulator, - infer::diagnostics::EnumDefDiagAccumulator, - infer::diagnostics::TypeAliasDefDiagAccumulator, - infer::diagnostics::ContractDefDiagAccumulator, + ty::trait_::TraitDef, + ty::trait_::TraitInstId, + ty::diagnostics::StructDefDiagAccumulator, + ty::diagnostics::EnumDefDiagAccumulator, + ty::diagnostics::TypeAliasDefDiagAccumulator, + ty::diagnostics::ContractDefDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { @@ -32,8 +32,8 @@ pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { } impl HirAnalysisDb for DB where DB: ?Sized + salsa::DbWithJar + HirDb {} -pub mod infer; pub mod name_resolution; +pub mod ty; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Spanned { diff --git a/crates/hir-analysis/src/infer/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs similarity index 100% rename from crates/hir-analysis/src/infer/diagnostics.rs rename to crates/hir-analysis/src/ty/diagnostics.rs diff --git a/crates/hir-analysis/src/infer/lower.rs b/crates/hir-analysis/src/ty/lower.rs similarity index 99% rename from crates/hir-analysis/src/infer/lower.rs rename to crates/hir-analysis/src/ty/lower.rs index e404174305..455e4d6174 100644 --- a/crates/hir-analysis/src/infer/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -13,12 +13,12 @@ use hir::{ }; use crate::{ - infer::diagnostics::{ - ContractDefDiagAccumulator, EnumDefDiagAccumulator, StructDefDiagAccumulator, - }, name_resolution::{ resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, }, + ty::diagnostics::{ + ContractDefDiagAccumulator, EnumDefDiagAccumulator, StructDefDiagAccumulator, + }, HirAnalysisDb, }; diff --git a/crates/hir-analysis/src/infer/mod.rs b/crates/hir-analysis/src/ty/mod.rs similarity index 100% rename from crates/hir-analysis/src/infer/mod.rs rename to crates/hir-analysis/src/ty/mod.rs diff --git a/crates/hir-analysis/src/infer/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs similarity index 100% rename from crates/hir-analysis/src/infer/trait_.rs rename to crates/hir-analysis/src/ty/trait_.rs diff --git a/crates/hir-analysis/src/infer/ty.rs b/crates/hir-analysis/src/ty/ty.rs similarity index 100% rename from crates/hir-analysis/src/infer/ty.rs rename to crates/hir-analysis/src/ty/ty.rs From b7a5b4ce60af5fc3a3135c54da57803385afa0a0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 22:51:54 +0200 Subject: [PATCH 287/678] Register `TypeDefAnalysisPass` to driver's pass manager --- crates/driver2/src/lib.rs | 2 ++ crates/hir-analysis/src/ty/mod.rs | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 39b58d96e9..b045ec784b 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -17,6 +17,7 @@ use hir::{ }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, + ty::TypeDefAnalysisPass, HirAnalysisDb, }; @@ -144,5 +145,6 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TypeDefAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 238ebcf4ee..fd3c25718e 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -11,6 +11,12 @@ pub struct TypeDefAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } +impl<'db> TypeDefAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { fn run_on_module( &mut self, From e1a8c3dfc247bcd786020733600f4b570dc50d10 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 23:16:08 +0200 Subject: [PATCH 288/678] Add check for kind mismatch in type level application --- crates/hir-analysis/src/ty/diagnostics.rs | 35 +++++++++++++---- crates/hir-analysis/src/ty/lower.rs | 30 ++++++++++---- crates/hir-analysis/src/ty/ty.rs | 48 ++++++++++++++++++++--- 3 files changed, 93 insertions(+), 20 deletions(-) diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 7ed40404d3..8d72868f00 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -6,6 +6,10 @@ use hir::{ span::{DynLazySpan, LazySpan}, }; +use crate::HirAnalysisDb; + +use super::ty::TyId; + #[salsa::accumulator] pub struct StructDefDiagAccumulator(pub(super) TyLowerDiag); #[salsa::accumulator] @@ -19,27 +23,37 @@ pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); pub enum TyLowerDiag { InvalidType(DynLazySpan), NotFullyAppliedType(DynLazySpan), + KindMismatch(DynLazySpan, String), AssocTy(DynLazySpan), } impl TyLowerDiag { - pub(super) fn assoc_ty(span: impl Into) -> Self { - Self::AssocTy(span.into()) + pub(super) fn assoc_ty(span: DynLazySpan) -> Self { + Self::AssocTy(span) } - pub(super) fn invalid_type(span: impl Into) -> Self { - Self::InvalidType(span.into()) + pub(super) fn invalid_type(span: DynLazySpan) -> Self { + Self::InvalidType(span) } - pub fn not_fully_applied_type(span: impl Into) -> Self { - Self::NotFullyAppliedType(span.into()) + pub fn not_fully_applied_type(span: DynLazySpan) -> Self { + Self::NotFullyAppliedType(span) + } + + pub fn kind_mismatch(db: &dyn HirAnalysisDb, abs: TyId, arg: TyId, span: DynLazySpan) -> Self { + let k_abs = abs.kind(db); + let k_arg = arg.kind(db); + + let msg = format!("can't apply `{}` kind to `{}` kind", k_arg, k_abs); + Self::KindMismatch(span, msg.into()) } fn local_code(&self) -> u16 { match self { Self::InvalidType(_) => 0, Self::NotFullyAppliedType(_) => 1, - Self::AssocTy(_) => 2, + Self::KindMismatch(_, _) => 2, + Self::AssocTy(_) => 3, } } @@ -47,6 +61,7 @@ impl TyLowerDiag { match self { Self::InvalidType(_) => "expected type".to_string(), Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), + Self::KindMismatch(_, _) => "kind mismatch in type application".to_string(), Self::AssocTy(_) => "associated type is not supported ".to_string(), } } @@ -65,6 +80,12 @@ impl TyLowerDiag { span.resolve(db), )], + Self::KindMismatch(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + Self::AssocTy(span) => vec![SubDiagnostic::new( LabelStyle::Primary, "associated type is not implemented".to_string(), diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 455e4d6174..8818c11b10 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -132,9 +132,9 @@ impl<'db> TyBuilder<'db> { .collect(); match path_ty { - Either::Left(ty) => arg_tys - .into_iter() - .fold(ty, |acc, arg| TyId::apply(self.db, acc, arg)), + Either::Left(ty) => arg_tys.into_iter().enumerate().fold(ty, |acc, (idx, arg)| { + self.ty_app(acc, arg, generic_arg_span.arg(idx).into()) + }), Either::Right(alias) => alias.subst_with(self.db, &arg_tys), } @@ -152,7 +152,7 @@ impl<'db> TyBuilder<'db> { .unwrap_or_else(|| TyId::invalid(self.db)); let ptr = TyId::ptr(self.db); - TyId::apply(self.db, ptr, pointee) + self.ty_app(ptr, pointee, span.into()) } fn lower_tuple(&mut self, elems: &[Partial], span: LazyTupleTypeSpan) -> TyId { @@ -164,10 +164,23 @@ impl<'db> TyBuilder<'db> { .map(|elem| self.lower_ty(elem, span.elem_ty(idx))) .unwrap_or_else(|| TyId::invalid(self.db)); - TyId::apply(self.db, acc, elem) + self.ty_app(acc, elem, span.elem_ty(idx).into()) }) } + /// Perform type level application. + /// If type application is not possible for the given `abs`/`arg` pair, + /// diagnostics are accumulated then returns` TyId::invalid()`. + fn ty_app(&mut self, abs: TyId, arg: TyId, span: DynLazySpan) -> TyId { + if let Some(ty) = TyId::apply(self.db, abs, arg) { + ty + } else { + self.diags + .push(TyLowerDiag::kind_mismatch(self.db, abs, arg, span)); + TyId::invalid(self.db) + } + } + fn lower_resolved_path( &mut self, path: &EarlyResolvedPath, @@ -215,7 +228,7 @@ impl<'db> TyBuilder<'db> { } } - pub(super) fn lower_generic_arg(&mut self, arg: &GenericArg, span: LazyGenericArgSpan) -> TyId { + fn lower_generic_arg(&mut self, arg: &GenericArg, span: LazyGenericArgSpan) -> TyId { match arg { GenericArg::Type(ty_arg) => ty_arg .ty @@ -358,8 +371,9 @@ impl<'db> AdtTyBuilder<'db> { } /// Verifies that the type is fully applied type. - /// If the `ty` is not a fully applied type, error diagnostics are - /// accumulated and returns `TyId::invalid()`, otherwise returns given `ty`. + /// If the `ty` is not a fully applied type, diagnostics are + /// accumulated then returns `TyId::invalid()`, otherwise returns given + /// `ty`. fn verify_fully_applied_type(&mut self, ty: TyId, span: DynLazySpan) -> TyId { if ty.is_mono_type(self.db) { ty diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 1e1e299788..6beea01857 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -1,3 +1,5 @@ +use std::fmt; + use hir::hir_def::{ prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, scope_graph::ScopeId, @@ -8,7 +10,7 @@ use crate::HirAnalysisDb; #[salsa::interned] pub struct TyId { - data: TyData, + pub data: TyData, } impl TyId { @@ -28,8 +30,20 @@ impl TyId { Self::new(db, TyData::TyCon(TyConcrete::Adt(adt))) } - pub(super) fn apply(db: &dyn HirAnalysisDb, ty: Self, arg: Self) -> Self { - Self::new(db, TyData::TyApp(ty, arg)) + /// Perform type level application. + /// If the kind is mismatched, return None. + pub(super) fn apply(db: &dyn HirAnalysisDb, ty: Self, arg: Self) -> Option { + let k_ty = ty.kind(db); + let k_arg = arg.kind(db); + if k_ty.is_any() || k_arg.is_any() { + return Some(Self::new(db, TyData::Invalid)); + } + + if k_ty.is_applicable(&k_arg) { + Some(Self::new(db, TyData::TyApp(ty, arg))) + } else { + None + } } pub(super) fn invalid(db: &dyn HirAnalysisDb) -> Self { @@ -134,6 +148,30 @@ impl Kind { fn abs(lhs: Kind, rhs: Kind) -> Self { Kind::Abs(Box::new(lhs), Box::new(rhs)) } + + fn is_applicable(&self, rhs: &Self) -> bool { + match self { + Self::Abs(k_arg, _) => k_arg.as_ref() == rhs, + _ => false, + } + } + + fn is_any(&self) -> bool { + match self { + Self::Any => true, + _ => false, + } + } +} + +impl fmt::Display for Kind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Star => write!(f, "*"), + Self::Abs(lhs, rhs) => write!(f, "({} -> {})", lhs, rhs), + Self::Any => write!(f, "Any"), + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -220,9 +258,9 @@ impl HasKind for TyData { TyData::TyVar(ty_var) => ty_var.kind(db), TyData::TyParam(ty_param) => ty_param.kind.clone(), TyData::TyCon(ty_const) => ty_const.kind(db), - TyData::TyApp(lhs, rhs) => match lhs.kind(db) { + TyData::TyApp(abs, arg) => match abs.kind(db) { Kind::Abs(k_arg, k_ret) => { - debug_assert!(rhs.kind(db) == k_arg.as_ref()); + debug_assert!(k_arg.as_ref() == arg.kind(db)); k_ret.as_ref().clone() } _ => unreachable!(), From 4326b2c6fe39c732946d0858d3d036a3e8fa983a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 23:42:56 +0200 Subject: [PATCH 289/678] Fix missing `SelfTy` edge in `struct/enum/contract` --- crates/hir/src/lower/scope_builder.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs index 6b4ce57bc5..fb3747534d 100644 --- a/crates/hir/src/lower/scope_builder.rs +++ b/crates/hir/src/lower/scope_builder.rs @@ -151,6 +151,8 @@ impl<'db> ScopeGraphBuilder<'db> { inner.into(), inner.generic_params(self.db), ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); inner .name(self.db) .to_opt() @@ -165,6 +167,9 @@ impl<'db> ScopeGraphBuilder<'db> { FieldParent::Item(inner.into()), inner.fields(self.db), ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + inner .name(self.db) .to_opt() @@ -180,6 +185,8 @@ impl<'db> ScopeGraphBuilder<'db> { inner.into(), inner.generic_params(self.db), ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); inner .name(self.db) .to_opt() From 74421a84d367823cc0b2c2c52dc03458c53d3b82 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 23:50:08 +0200 Subject: [PATCH 290/678] Add uitest for mismatched kind in type level application --- crates/uitest/fixtures/ty/kind_mismatch.fe | 8 ++++ crates/uitest/fixtures/ty/kind_mismatch.snap | 12 ++++++ crates/uitest/tests/ty.rs | 39 ++++++++++++++++++++ 3 files changed, 59 insertions(+) create mode 100644 crates/uitest/fixtures/ty/kind_mismatch.fe create mode 100644 crates/uitest/fixtures/ty/kind_mismatch.snap create mode 100644 crates/uitest/tests/ty.rs diff --git a/crates/uitest/fixtures/ty/kind_mismatch.fe b/crates/uitest/fixtures/ty/kind_mismatch.fe new file mode 100644 index 0000000000..3ad62cefe6 --- /dev/null +++ b/crates/uitest/fixtures/ty/kind_mismatch.fe @@ -0,0 +1,8 @@ +pub struct Foo { + t: T + u: U +} + +pub struct Bar { + foo: Foo +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/kind_mismatch.snap b/crates/uitest/fixtures/ty/kind_mismatch.snap new file mode 100644 index 0000000000..583a248aca --- /dev/null +++ b/crates/uitest/fixtures/ty/kind_mismatch.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/kind_mismatch.fe +--- +error[3-0002]: kind mismatch in type application + ┌─ kind_mismatch.fe:7:19 + │ +7 │ foo: Foo + │ ^^^ can't apply `(* -> (* -> (* -> (* -> *))))` kind to `(* -> (* -> (* -> *)))` kind + + diff --git a/crates/uitest/tests/ty.rs b/crates/uitest/tests/ty.rs new file mode 100644 index 0000000000..c8e366cf70 --- /dev/null +++ b/crates/uitest/tests/ty.rs @@ -0,0 +1,39 @@ +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use driver::DriverDataBase; +use fe_compiler_test_utils::snap_test; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/ty", + glob: "*.fe" +)] +fn run_ty(fixture: Fixture<&str>) { + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_top_mod(top_mod); + let diags = driver.format_diags(); + snap_test!(diags, fixture.path()); +} + +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/fixtures/ty", + glob: "*.fe", + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn run_ty(fixture: Fixture<&str>) { + let mut driver = DriverDataBase::default(); + let path = Path::new(fixture.path()); + let top_mod = driver.top_mod_from_file(path, fixture.content()); + driver.run_on_top_mod(top_mod); + } +} From 199582e02f1f8d8d7df847ed1077dcb55b3f5936 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 10 Sep 2023 23:50:26 +0200 Subject: [PATCH 291/678] Add uitest for non fully applied type in field/variant --- crates/uitest/fixtures/ty/not_fully_applied.fe | 12 ++++++++++++ .../uitest/fixtures/ty/not_fully_applied.snap | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 crates/uitest/fixtures/ty/not_fully_applied.fe create mode 100644 crates/uitest/fixtures/ty/not_fully_applied.snap diff --git a/crates/uitest/fixtures/ty/not_fully_applied.fe b/crates/uitest/fixtures/ty/not_fully_applied.fe new file mode 100644 index 0000000000..8a08195a38 --- /dev/null +++ b/crates/uitest/fixtures/ty/not_fully_applied.fe @@ -0,0 +1,12 @@ +pub struct Foo { + t: T + u: U +} + +pub struct Bar { + f: Foo +} + +pub enum Baz { + Variant(Foo) +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/not_fully_applied.snap b/crates/uitest/fixtures/ty/not_fully_applied.snap new file mode 100644 index 0000000000..dee7f43116 --- /dev/null +++ b/crates/uitest/fixtures/ty/not_fully_applied.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/not_fully_applied.fe +--- +error[3-0001]: expected fully applied type + ┌─ not_fully_applied.fe:7:8 + │ +7 │ f: Foo + │ ^^^^^^^^ expected fully applied type here + +error[3-0002]: kind mismatch in type application + ┌─ not_fully_applied.fe:11:13 + │ +11 │ Variant(Foo) + │ ^^^^^^^^ can't apply `(* -> (* -> (* -> *)))` kind to `(* -> *)` kind + + From 311623ae3da327d371e95edfa5039daabecf648a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 11 Sep 2023 00:19:50 +0200 Subject: [PATCH 292/678] Improve kind related error messages --- crates/hir-analysis/src/ty/lower.rs | 37 +++++++++++++------ crates/hir-analysis/src/ty/mod.rs | 2 +- crates/uitest/fixtures/ty/kind_mismatch.snap | 2 +- .../uitest/fixtures/ty/not_fully_applied.fe | 17 ++++++--- .../uitest/fixtures/ty/not_fully_applied.snap | 16 +++++--- 5 files changed, 50 insertions(+), 24 deletions(-) diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 8818c11b10..9e27697632 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -159,12 +159,17 @@ impl<'db> TyBuilder<'db> { let len = elems.len(); let tuple = TyId::tuple(self.db, len); elems.iter().enumerate().fold(tuple, |acc, (idx, elem)| { - let elem = elem + let elem_ty = elem .to_opt() .map(|elem| self.lower_ty(elem, span.elem_ty(idx))) .unwrap_or_else(|| TyId::invalid(self.db)); + let (elem_ty, diag) = + verify_fully_applied_type(self.db, elem_ty, span.elem_ty(idx).into()); + if let Some(diag) = diag { + self.diags.push(diag); + } - self.ty_app(acc, elem, span.elem_ty(idx).into()) + self.ty_app(acc, elem_ty, span.elem_ty(idx).into()) }) } @@ -280,10 +285,6 @@ impl<'db> AdtTyBuilder<'db> { let param = lower_generic_param(self.db, self.adt.into(), idx); self.params.push(param); } - for idx in 0..params.data(hir_db).len() { - let param_ty = lower_generic_param(self.db, self.adt.into(), idx); - self.params.push(param_ty); - } } fn collect_variants(&mut self) { @@ -375,12 +376,26 @@ impl<'db> AdtTyBuilder<'db> { /// accumulated then returns `TyId::invalid()`, otherwise returns given /// `ty`. fn verify_fully_applied_type(&mut self, ty: TyId, span: DynLazySpan) -> TyId { - if ty.is_mono_type(self.db) { - ty - } else { - self.diags.push(TyLowerDiag::not_fully_applied_type(span)); - TyId::invalid(self.db) + let (ty, diag) = verify_fully_applied_type(self.db, ty, span); + if let Some(diag) = diag { + self.diags.push(diag); } + ty + } +} + +fn verify_fully_applied_type( + db: &dyn HirAnalysisDb, + ty: TyId, + span: DynLazySpan, +) -> (TyId, Option) { + if ty.is_mono_type(db) { + (ty, None) + } else { + ( + TyId::invalid(db), + TyLowerDiag::not_fully_applied_type(span).into(), + ) } } diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index fd3c25718e..a24951d592 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -49,7 +49,7 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { for contract in top_mod.all_contracts(hir_db) { lower::lower_contract(self.db, *contract); diags.extend( - lower::lower_contract::accumulated::( + lower::lower_contract::accumulated::( self.db, *contract, ) .into_iter() diff --git a/crates/uitest/fixtures/ty/kind_mismatch.snap b/crates/uitest/fixtures/ty/kind_mismatch.snap index 583a248aca..9b784f47bb 100644 --- a/crates/uitest/fixtures/ty/kind_mismatch.snap +++ b/crates/uitest/fixtures/ty/kind_mismatch.snap @@ -7,6 +7,6 @@ error[3-0002]: kind mismatch in type application ┌─ kind_mismatch.fe:7:19 │ 7 │ foo: Foo - │ ^^^ can't apply `(* -> (* -> (* -> (* -> *))))` kind to `(* -> (* -> (* -> *)))` kind + │ ^^^ can't apply `(* -> (* -> *))` kind to `(* -> *)` kind diff --git a/crates/uitest/fixtures/ty/not_fully_applied.fe b/crates/uitest/fixtures/ty/not_fully_applied.fe index 8a08195a38..dd20ff68b1 100644 --- a/crates/uitest/fixtures/ty/not_fully_applied.fe +++ b/crates/uitest/fixtures/ty/not_fully_applied.fe @@ -1,12 +1,17 @@ -pub struct Foo { +pub struct Gen { t: T u: U } -pub struct Bar { - f: Foo +pub struct MyS { + f: Gen + u: (i32, i32) } -pub enum Baz { - Variant(Foo) -} \ No newline at end of file +pub enum MyE { + Variant(Gen) +} + +pub contract MyC { + f: Gen +} diff --git a/crates/uitest/fixtures/ty/not_fully_applied.snap b/crates/uitest/fixtures/ty/not_fully_applied.snap index dee7f43116..3317604041 100644 --- a/crates/uitest/fixtures/ty/not_fully_applied.snap +++ b/crates/uitest/fixtures/ty/not_fully_applied.snap @@ -6,13 +6,19 @@ input_file: crates/uitest/fixtures/ty/not_fully_applied.fe error[3-0001]: expected fully applied type ┌─ not_fully_applied.fe:7:8 │ -7 │ f: Foo +7 │ f: Gen │ ^^^^^^^^ expected fully applied type here -error[3-0002]: kind mismatch in type application - ┌─ not_fully_applied.fe:11:13 +error[3-0001]: expected fully applied type + ┌─ not_fully_applied.fe:12:13 + │ +12 │ Variant(Gen) + │ ^^^^^^^^ expected fully applied type here + +error[3-0001]: expected fully applied type + ┌─ not_fully_applied.fe:16:8 │ -11 │ Variant(Foo) - │ ^^^^^^^^ can't apply `(* -> (* -> (* -> *)))` kind to `(* -> *)` kind +16 │ f: Gen + │ ^^^^^^^^ expected fully applied type here From 771a37a01827167086c6689aa62628f430da5948 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 11 Sep 2023 16:10:13 +0200 Subject: [PATCH 293/678] Introduce `AdtRef` and `AdtRefId` --- crates/hir-analysis/src/lib.rs | 9 +-- crates/hir-analysis/src/ty/diagnostics.rs | 6 +- crates/hir-analysis/src/ty/lower.rs | 77 ++++++++++------------- crates/hir-analysis/src/ty/mod.rs | 58 ++++++++--------- crates/hir-analysis/src/ty/ty.rs | 55 +++++++++++----- 5 files changed, 100 insertions(+), 105 deletions(-) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index d01b51e2c3..750f282631 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -11,18 +11,15 @@ pub struct Jar( ty::ty::TyId, ty::ty::ty_kind, ty::ty::AdtDef, + ty::ty::AdtRefId, /// Type lowering. - ty::lower::lower_enum, - ty::lower::lower_struct, - ty::lower::lower_contract, + ty::lower::lower_adt, ty::lower::lower_type_alias, // Trait resolution. ty::trait_::TraitDef, ty::trait_::TraitInstId, - ty::diagnostics::StructDefDiagAccumulator, - ty::diagnostics::EnumDefDiagAccumulator, + ty::diagnostics::AdtDefDiagAccumulator, ty::diagnostics::TypeAliasDefDiagAccumulator, - ty::diagnostics::ContractDefDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 8d72868f00..a311e6faee 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -11,11 +11,7 @@ use crate::HirAnalysisDb; use super::ty::TyId; #[salsa::accumulator] -pub struct StructDefDiagAccumulator(pub(super) TyLowerDiag); -#[salsa::accumulator] -pub struct EnumDefDiagAccumulator(pub(super) TyLowerDiag); -#[salsa::accumulator] -pub struct ContractDefDiagAccumulator(pub(super) TyLowerDiag); +pub struct AdtDefDiagAccumulator(pub(super) TyLowerDiag); #[salsa::accumulator] pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 9e27697632..f15e8eb2d0 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,9 +1,9 @@ use either::Either; use hir::{ hir_def::{ - kw, scope_graph::ScopeId, Contract, Enum, FieldDefListId, GenericArg, GenericArgListId, - GenericParam, ItemKind, Partial, PathId, Struct, TypeAlias as HirTypeAlias, - TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, + kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, + ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, + TypeKind as HirTyKind, VariantDefListId, }, span::{types::LazyTySpan, DynLazySpan}, visitor::prelude::{ @@ -16,40 +16,20 @@ use crate::{ name_resolution::{ resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, }, - ty::diagnostics::{ - ContractDefDiagAccumulator, EnumDefDiagAccumulator, StructDefDiagAccumulator, - }, + ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb, }; use super::{ diagnostics::TyLowerDiag, - ty::{AdtDef, AdtId, AdtVariant, Kind, TyData, TyId, TyParam}, + ty::{AdtDef, AdtRef, AdtRefId, AdtVariant, Kind, TyData, TyId, TyParam}, }; #[salsa::tracked] -pub fn lower_struct(db: &dyn HirAnalysisDb, struct_: Struct) -> TyId { - let (ty, diags) = AdtTyBuilder::new(db, struct_.into()).build(); - for diag in diags { - StructDefDiagAccumulator::push(db, diag) - } - ty -} - -#[salsa::tracked] -pub fn lower_enum(db: &dyn HirAnalysisDb, enum_: Enum) -> TyId { - let (ty, diags) = AdtTyBuilder::new(db, enum_.into()).build(); - for diag in diags { - EnumDefDiagAccumulator::push(db, diag) - } - ty -} - -#[salsa::tracked] -pub fn lower_contract(db: &dyn HirAnalysisDb, contract: Contract) -> TyId { - let (ty, diags) = AdtTyBuilder::new(db, contract.into()).build(); +pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> TyId { + let (ty, diags) = AdtTyBuilder::new(db, adt).build(); for diag in diags { - ContractDefDiagAccumulator::push(db, diag) + AdtDefDiagAccumulator::push(db, diag) } ty } @@ -222,9 +202,18 @@ impl<'db> TyBuilder<'db> { }; match item { - ItemKind::Enum(enum_) => Either::Left(lower_enum(self.db, enum_)), - ItemKind::Struct(struct_) => Either::Left(lower_struct(self.db, struct_)), - ItemKind::Contract(contract) => Either::Left(lower_contract(self.db, contract)), + ItemKind::Enum(enum_) => { + let adt_ref = AdtRefId::from_enum(self.db, enum_); + Either::Left(lower_adt(self.db, adt_ref)) + } + ItemKind::Struct(struct_) => { + let adt_ref = AdtRefId::from_struct(self.db, struct_); + Either::Left(lower_adt(self.db, adt_ref)) + } + ItemKind::Contract(contract) => { + let adt_ref = AdtRefId::from_contract(self.db, contract); + Either::Left(lower_adt(self.db, adt_ref)) + } ItemKind::TypeAlias(alias) => Either::Right(lower_type_alias(self.db, alias)), _ => { self.diags.push(TyLowerDiag::invalid_type(span)); @@ -248,14 +237,14 @@ impl<'db> TyBuilder<'db> { struct AdtTyBuilder<'db> { db: &'db dyn HirAnalysisDb, - adt: AdtId, + adt: AdtRefId, params: Vec, variants: Vec, diags: Vec, } impl<'db> AdtTyBuilder<'db> { - fn new(db: &'db dyn HirAnalysisDb, adt: AdtId) -> Self { + fn new(db: &'db dyn HirAnalysisDb, adt: AdtRefId) -> Self { Self { db, adt, @@ -275,31 +264,31 @@ impl<'db> AdtTyBuilder<'db> { fn collect_params(&mut self) { let hir_db = self.db.as_hir_db(); - let params = match self.adt { - AdtId::Struct(struct_) => struct_.generic_params(hir_db), - AdtId::Enum(enum_) => enum_.generic_params(hir_db), - AdtId::Contract(_) => return, + let params = match self.adt.data(self.db) { + AdtRef::Struct(struct_) => struct_.generic_params(hir_db), + AdtRef::Enum(enum_) => enum_.generic_params(hir_db), + AdtRef::Contract(_) => return, }; for idx in 0..params.len(hir_db) { - let param = lower_generic_param(self.db, self.adt.into(), idx); + let param = lower_generic_param(self.db, self.adt.as_item(self.db), idx); self.params.push(param); } } fn collect_variants(&mut self) { - match self.adt { - AdtId::Struct(struct_) => { + match self.adt.data(self.db) { + AdtRef::Struct(struct_) => { let span = struct_.lazy_span(); self.collect_field_types(struct_.fields(self.db.as_hir_db()), span.fields()); } - AdtId::Contract(contract) => { + AdtRef::Contract(contract) => { let span = contract.lazy_span(); self.collect_field_types(contract.fields(self.db.as_hir_db()), span.fields()) } - AdtId::Enum(enum_) => { + AdtRef::Enum(enum_) => { let span = enum_.lazy_span(); self.collect_enum_variant_types( enum_.variants(self.db.as_hir_db()), @@ -317,7 +306,7 @@ impl<'db> AdtTyBuilder<'db> { .for_each(|(i, field)| { let ty = match field.ty.to_opt() { Some(ty) => { - let mut builder = TyBuilder::new(self.db, self.adt.scope()); + let mut builder = TyBuilder::new(self.db, self.adt.data(self.db).scope()); let ty_span = span.field(i).ty(); let ty = builder.lower_ty(ty, ty_span.clone()); @@ -350,7 +339,7 @@ impl<'db> AdtTyBuilder<'db> { .for_each(|(i, variant)| { let tys = match variant.ty { Some(ty) => { - let mut builder = TyBuilder::new(self.db, self.adt.scope()); + let mut builder = TyBuilder::new(self.db, self.adt.scope(self.db)); let ty_span = span.variant(i).ty(); let ty = builder.lower_ty(ty, ty_span.clone()); diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index a24951d592..736824657f 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -2,6 +2,8 @@ use hir::analysis_pass::ModuleAnalysisPass; use crate::HirAnalysisDb; +use self::{diagnostics::AdtDefDiagAccumulator, ty::AdtRefId}; + pub mod diagnostics; pub mod lower; pub mod trait_; @@ -22,41 +24,31 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { &mut self, top_mod: hir::hir_def::TopLevelMod, ) -> Vec> { - let mut diags = vec![]; let hir_db = self.db.as_hir_db(); - for struct_ in top_mod.all_structs(hir_db) { - lower::lower_struct(self.db, *struct_); - diags.extend( - lower::lower_struct::accumulated::( - self.db, *struct_, - ) - .into_iter() - .map(|diag| Box::new(diag) as _), - ) - } - - for enum_ in top_mod.all_enums(hir_db) { - lower::lower_enum(self.db, *enum_); - diags.extend( - lower::lower_enum::accumulated::( - self.db, *enum_, - ) - .into_iter() - .map(|diag| Box::new(diag) as _), + let adts = top_mod + .all_structs(hir_db) + .iter() + .map(|s| AdtRefId::from_struct(self.db, *s)) + .chain( + top_mod + .all_enums(hir_db) + .iter() + .map(|e| AdtRefId::from_enum(self.db, *e)), ) - } - - for contract in top_mod.all_contracts(hir_db) { - lower::lower_contract(self.db, *contract); - diags.extend( - lower::lower_contract::accumulated::( - self.db, *contract, - ) + .chain( + top_mod + .all_contracts(hir_db) + .iter() + .map(|c| AdtRefId::from_contract(self.db, *c)), + ); + + adts.map(|adt| { + lower::lower_adt(self.db, adt); + lower::lower_adt::accumulated::(self.db, adt) .into_iter() - .map(|diag| Box::new(diag) as _), - ) - } - - diags + .map(|diag| Box::new(diag) as _) + }) + .flatten() + .collect() } } diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 6beea01857..183e444a02 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -86,7 +86,7 @@ impl TyId { #[salsa::tracked] pub struct AdtDef { - pub adt: AdtId, + pub adt: AdtRefId, #[return_ref] pub params: Vec, pub variants: Vec, @@ -221,29 +221,50 @@ pub enum PrimTy { Ptr, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] -pub enum AdtId { - Enum(Enum), - Struct(Struct), - Contract(Contract), +#[salsa::interned] +pub struct AdtRefId { + pub data: AdtRef, } -impl Into for AdtId { - fn into(self) -> ItemKind { - match self { - Self::Enum(enum_) => ItemKind::Enum(enum_), - Self::Struct(struct_) => ItemKind::Struct(struct_), - Self::Contract(contract) => ItemKind::Contract(contract), +impl AdtRefId { + pub fn scope(self, db: &dyn HirAnalysisDb) -> ScopeId { + self.data(db).scope() + } + + pub fn as_item(self, db: &dyn HirAnalysisDb) -> ItemKind { + match self.data(db) { + AdtRef::Enum(e) => e.into(), + AdtRef::Struct(s) => s.into(), + AdtRef::Contract(c) => c.into(), } } + + pub fn from_enum(db: &dyn HirAnalysisDb, enum_: Enum) -> Self { + Self::new(db, AdtRef::Enum(enum_)) + } + + pub fn from_struct(db: &dyn HirAnalysisDb, struct_: Struct) -> Self { + Self::new(db, AdtRef::Struct(struct_)) + } + + pub fn from_contract(db: &dyn HirAnalysisDb, contract: Contract) -> Self { + Self::new(db, AdtRef::Contract(contract)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum AdtRef { + Enum(Enum), + Struct(Struct), + Contract(Contract), } -impl AdtId { - pub(super) fn scope(self) -> ScopeId { +impl AdtRef { + pub fn scope(self) -> ScopeId { match self { - Self::Enum(enum_) => enum_.scope(), - Self::Struct(struct_) => struct_.scope(), - Self::Contract(contract_) => contract_.scope(), + Self::Enum(e) => e.scope(), + Self::Struct(s) => s.scope(), + Self::Contract(c) => c.scope(), } } } From 140ae74bc7a0a388bd9f99d8a0385e49eec495ec Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 11 Sep 2023 22:18:16 +0200 Subject: [PATCH 294/678] Make field type evaluation lazy; Define `InvalidCause` --- crates/hir-analysis/src/ty/diagnostics.rs | 56 ++++++++- crates/hir-analysis/src/ty/lower.rs | 138 ++++++---------------- crates/hir-analysis/src/ty/ty.rs | 88 +++++++++++--- 3 files changed, 155 insertions(+), 127 deletions(-) diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index a311e6faee..03d4ebda7a 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -8,7 +8,7 @@ use hir::{ use crate::HirAnalysisDb; -use super::ty::TyId; +use super::ty::{AdtRefId, TyId}; #[salsa::accumulator] pub struct AdtDefDiagAccumulator(pub(super) TyLowerDiag); @@ -20,14 +20,14 @@ pub enum TyLowerDiag { InvalidType(DynLazySpan), NotFullyAppliedType(DynLazySpan), KindMismatch(DynLazySpan, String), + RecursiveType { + primary_span: DynLazySpan, + cycle_participants: Vec, + }, AssocTy(DynLazySpan), } impl TyLowerDiag { - pub(super) fn assoc_ty(span: DynLazySpan) -> Self { - Self::AssocTy(span) - } - pub(super) fn invalid_type(span: DynLazySpan) -> Self { Self::InvalidType(span) } @@ -44,20 +44,43 @@ impl TyLowerDiag { Self::KindMismatch(span, msg.into()) } + pub(super) fn recursive_type( + db: &dyn HirAnalysisDb, + primary_span: DynLazySpan, + participants: Vec, + ) -> Self { + let cycle_participants = participants.into_iter().map(|p| p.name_span(db)).collect(); + + Self::RecursiveType { + primary_span, + cycle_participants, + } + } + + pub(super) fn assoc_ty(span: DynLazySpan) -> Self { + Self::AssocTy(span) + } + fn local_code(&self) -> u16 { match self { Self::InvalidType(_) => 0, Self::NotFullyAppliedType(_) => 1, Self::KindMismatch(_, _) => 2, - Self::AssocTy(_) => 3, + Self::RecursiveType { .. } => 3, + Self::AssocTy(_) => 4, } } fn message(&self) -> String { match self { Self::InvalidType(_) => "expected type".to_string(), + Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), + Self::KindMismatch(_, _) => "kind mismatch in type application".to_string(), + + Self::RecursiveType { .. } => "recursive type is not allowed".to_string(), + Self::AssocTy(_) => "associated type is not supported ".to_string(), } } @@ -82,6 +105,27 @@ impl TyLowerDiag { span.resolve(db), )], + Self::RecursiveType { + primary_span, + cycle_participants, + } => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + "causing cycle here".to_string(), + primary_span.resolve(db), + )]; + + diags.extend(cycle_participants.iter().map(|span| { + SubDiagnostic::new( + LabelStyle::Secondary, + format!("this type is part of the cycle"), + span.resolve(db), + ) + })); + + diags + } + Self::AssocTy(span) => vec![SubDiagnostic::new( LabelStyle::Primary, "associated type is not implemented".to_string(), diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index f15e8eb2d0..c419850004 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -6,10 +6,7 @@ use hir::{ TypeKind as HirTyKind, VariantDefListId, }, span::{types::LazyTySpan, DynLazySpan}, - visitor::prelude::{ - LazyFieldDefListSpan, LazyGenericArgSpan, LazyPathTypeSpan, LazyPtrTypeSpan, - LazyTupleTypeSpan, LazyVariantDefListSpan, - }, + visitor::prelude::{LazyGenericArgSpan, LazyPathTypeSpan, LazyPtrTypeSpan, LazyTupleTypeSpan}, }; use crate::{ @@ -22,7 +19,7 @@ use crate::{ use super::{ diagnostics::TyLowerDiag, - ty::{AdtDef, AdtRef, AdtRefId, AdtVariant, Kind, TyData, TyId, TyParam}, + ty::{AdtDef, AdtRef, AdtRefId, AdtVariant, InvalidCause, Kind, TyData, TyId, TyParam}, }; #[salsa::tracked] @@ -100,7 +97,7 @@ impl<'db> TyBuilder<'db> { let res = resolve_path_early(self.db, path, self.scope); self.lower_resolved_path(&res, span.path().into()) }) - .unwrap_or_else(|| Either::Left(TyId::invalid(self.db))); + .unwrap_or_else(|| Either::Left(TyId::invalid(self.db, InvalidCause::Other))); let generic_arg_span = span.generic_args(); @@ -129,7 +126,7 @@ impl<'db> TyBuilder<'db> { let pointee = pointee .to_opt() .map(|pointee| self.lower_ty(pointee, span.pointee())) - .unwrap_or_else(|| TyId::invalid(self.db)); + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); let ptr = TyId::ptr(self.db); self.ty_app(ptr, pointee, span.into()) @@ -142,12 +139,8 @@ impl<'db> TyBuilder<'db> { let elem_ty = elem .to_opt() .map(|elem| self.lower_ty(elem, span.elem_ty(idx))) - .unwrap_or_else(|| TyId::invalid(self.db)); - let (elem_ty, diag) = - verify_fully_applied_type(self.db, elem_ty, span.elem_ty(idx).into()); - if let Some(diag) = diag { - self.diags.push(diag); - } + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); + let elem_ty = verify_fully_applied_type(self.db, elem_ty, span.elem_ty(idx).into()); self.ty_app(acc, elem_ty, span.elem_ty(idx).into()) }) @@ -157,13 +150,7 @@ impl<'db> TyBuilder<'db> { /// If type application is not possible for the given `abs`/`arg` pair, /// diagnostics are accumulated then returns` TyId::invalid()`. fn ty_app(&mut self, abs: TyId, arg: TyId, span: DynLazySpan) -> TyId { - if let Some(ty) = TyId::apply(self.db, abs, arg) { - ty - } else { - self.diags - .push(TyLowerDiag::kind_mismatch(self.db, abs, arg, span)); - TyId::invalid(self.db) - } + TyId::apply(self.db, abs, arg) } fn lower_resolved_path( @@ -176,13 +163,11 @@ impl<'db> TyBuilder<'db> { Ok(res) => res, // This error is already handled by the name resolution. - Err(_) => return Either::Left(TyId::invalid(self.db)), + Err(_) => return Either::Left(TyId::invalid(self.db, InvalidCause::Other)), }, EarlyResolvedPath::Partial { .. } => { - // TODO: Fix here when we add an associated type. - self.diags.push(TyLowerDiag::assoc_ty(span)); - return Either::Left(TyId::invalid(self.db)); + return Either::Left(TyId::invalid(self.db, InvalidCause::AssocTy)); } }; @@ -215,10 +200,7 @@ impl<'db> TyBuilder<'db> { Either::Left(lower_adt(self.db, adt_ref)) } ItemKind::TypeAlias(alias) => Either::Right(lower_type_alias(self.db, alias)), - _ => { - self.diags.push(TyLowerDiag::invalid_type(span)); - Either::Left(TyId::invalid(self.db)) - } + _ => Either::Left(TyId::invalid(self.db, InvalidCause::ReferenceToNonType)), } } @@ -228,7 +210,7 @@ impl<'db> TyBuilder<'db> { .ty .to_opt() .map(|ty| self.lower_ty(ty, span.into_type_arg().ty())) - .unwrap_or_else(|| TyId::invalid(self.db)), + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), GenericArg::Const(_) => todo!(), } @@ -280,75 +262,42 @@ impl<'db> AdtTyBuilder<'db> { match self.adt.data(self.db) { AdtRef::Struct(struct_) => { let span = struct_.lazy_span(); - self.collect_field_types(struct_.fields(self.db.as_hir_db()), span.fields()); + self.collect_field_types(struct_.fields(self.db.as_hir_db())); } AdtRef::Contract(contract) => { let span = contract.lazy_span(); - self.collect_field_types(contract.fields(self.db.as_hir_db()), span.fields()) + self.collect_field_types(contract.fields(self.db.as_hir_db())) } AdtRef::Enum(enum_) => { let span = enum_.lazy_span(); - self.collect_enum_variant_types( - enum_.variants(self.db.as_hir_db()), - span.variants(), - ) + self.collect_enum_variant_types(enum_.variants(self.db.as_hir_db())) } }; } - fn collect_field_types(&mut self, fields: FieldDefListId, span: LazyFieldDefListSpan) { - fields - .data(self.db.as_hir_db()) - .iter() - .enumerate() - .for_each(|(i, field)| { - let ty = match field.ty.to_opt() { - Some(ty) => { - let mut builder = TyBuilder::new(self.db, self.adt.data(self.db).scope()); - let ty_span = span.field(i).ty(); - - let ty = builder.lower_ty(ty, ty_span.clone()); - let ty = self.verify_fully_applied_type(ty, ty_span.into()); - - self.diags.extend(builder.diags); - ty - } - - None => TyId::invalid(self.db), - }; - - let variant = AdtVariant { - name: field.name, - tys: vec![ty], - }; - self.variants.push(variant); - }) + fn collect_field_types(&mut self, fields: FieldDefListId) { + fields.data(self.db.as_hir_db()).iter().for_each(|field| { + let variant = AdtVariant { + name: field.name, + tys: vec![field.ty], + }; + self.variants.push(variant); + }) } - fn collect_enum_variant_types( - &mut self, - variants: VariantDefListId, - span: LazyVariantDefListSpan, - ) { + fn collect_enum_variant_types(&mut self, variants: VariantDefListId) { variants .data(self.db.as_hir_db()) .iter() .enumerate() .for_each(|(i, variant)| { + // TODO: FIX here when record variant is introduced. let tys = match variant.ty { Some(ty) => { - let mut builder = TyBuilder::new(self.db, self.adt.scope(self.db)); - let ty_span = span.variant(i).ty(); - - let ty = builder.lower_ty(ty, ty_span.clone()); - let ty = self.verify_fully_applied_type(ty, ty_span.into()); - - self.diags.extend(builder.diags); - vec![ty] + vec![Some(ty).into()] } - None => vec![], }; @@ -359,33 +308,6 @@ impl<'db> AdtTyBuilder<'db> { self.variants.push(variant) }) } - - /// Verifies that the type is fully applied type. - /// If the `ty` is not a fully applied type, diagnostics are - /// accumulated then returns `TyId::invalid()`, otherwise returns given - /// `ty`. - fn verify_fully_applied_type(&mut self, ty: TyId, span: DynLazySpan) -> TyId { - let (ty, diag) = verify_fully_applied_type(self.db, ty, span); - if let Some(diag) = diag { - self.diags.push(diag); - } - ty - } -} - -fn verify_fully_applied_type( - db: &dyn HirAnalysisDb, - ty: TyId, - span: DynLazySpan, -) -> (TyId, Option) { - if ty.is_mono_type(db) { - (ty, None) - } else { - ( - TyId::invalid(db), - TyLowerDiag::not_fully_applied_type(span).into(), - ) - } } fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> TyId { @@ -406,7 +328,7 @@ fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> Ty }; TyId::new(db, TyData::TyParam(ty_param)) } else { - TyId::new(db, TyData::Invalid) + TyId::invalid(db, InvalidCause::Other) } } GenericParam::Const(_) => { @@ -414,3 +336,11 @@ fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> Ty } } } + +fn verify_fully_applied_type(db: &dyn HirAnalysisDb, ty: TyId, span: DynLazySpan) -> TyId { + if ty.is_mono_type(db) { + ty + } else { + TyId::invalid(db, InvalidCause::NotFullyApplied) + } +} diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 183e444a02..44ad6c478a 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -1,9 +1,12 @@ use std::fmt; -use hir::hir_def::{ - prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, - scope_graph::ScopeId, - Contract, Enum, IdentId, ItemKind, Partial, Struct, +use hir::{ + hir_def::{ + prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, + scope_graph::ScopeId, + Contract, Enum, IdentId, ItemKind, Partial, Struct, TypeId as HirTyId, + }, + span::DynLazySpan, }; use crate::HirAnalysisDb; @@ -18,6 +21,20 @@ impl TyId { ty_kind(db, self) } + pub fn is_invalid(self, db: &dyn HirAnalysisDb) -> bool { + match self.data(db) { + TyData::Invalid(_) => true, + _ => false, + } + } + + pub fn invalid_cause(self, db: &dyn HirAnalysisDb) -> Option { + match self.data(db) { + TyData::Invalid(cause) => Some(cause), + _ => None, + } + } + pub(super) fn ptr(db: &dyn HirAnalysisDb) -> Self { Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::Ptr))) } @@ -31,23 +48,24 @@ impl TyId { } /// Perform type level application. - /// If the kind is mismatched, return None. - pub(super) fn apply(db: &dyn HirAnalysisDb, ty: Self, arg: Self) -> Option { - let k_ty = ty.kind(db); - let k_arg = arg.kind(db); - if k_ty.is_any() || k_arg.is_any() { - return Some(Self::new(db, TyData::Invalid)); + /// If the kind is mismatched, return `TyData::Invalid`. + pub(super) fn apply(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { + if abs.is_invalid(db) || arg.is_invalid(db) { + return TyId::invalid(db, InvalidCause::Other); } + let k_ty = abs.kind(db); + let k_arg = arg.kind(db); + if k_ty.is_applicable(&k_arg) { - Some(Self::new(db, TyData::TyApp(ty, arg))) + Self::new(db, TyData::TyApp(abs, arg)) } else { - None + Self::invalid(db, InvalidCause::KindMismatch { abs, arg }) } } - pub(super) fn invalid(db: &dyn HirAnalysisDb) -> Self { - Self::new(db, TyData::Invalid) + pub(super) fn invalid(db: &dyn HirAnalysisDb, cause: InvalidCause) -> Self { + Self::new(db, TyData::Invalid(cause)) } pub(super) fn from_hir_prim_ty(db: &dyn HirAnalysisDb, hir_prim: HirPrimTy) -> Self { @@ -98,7 +116,7 @@ pub struct AdtVariant { /// Fields of the variant. /// If the adt is an struct or contract, the length of the vector is always /// 1. - pub tys: Vec, + pub tys: Vec>, } #[salsa::tracked(return_ref)] @@ -127,7 +145,26 @@ pub enum TyData { // Invalid type which means the type is not defined. // This type can be unified with any other types. - Invalid, + Invalid(InvalidCause), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum InvalidCause { + /// Type is not fully applied where it is required. + NotFullyApplied, + + /// Kind mismatch in type level application. + KindMismatch { abs: TyId, arg: TyId }, + + /// Associated Type is not allowed at the moment. + AssocTy, + + /// Type is not defined even though the name resolution is succeeded. + ReferenceToNonType, + + /// `Other` indicates the cause is already reported in other analysis + /// passes, e.g., parser or name resolution. + Other, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -239,6 +276,23 @@ impl AdtRefId { } } + pub fn name(self, db: &dyn HirAnalysisDb) -> IdentId { + let hir_db = db.as_hir_db(); + match self.data(db) { + AdtRef::Enum(e) => e.name(hir_db), + AdtRef::Struct(s) => s.name(hir_db), + AdtRef::Contract(c) => c.name(hir_db), + } + .to_opt() + .unwrap_or_else(|| IdentId::new(hir_db, "".to_string())) + } + + pub fn name_span(self, db: &dyn HirAnalysisDb) -> DynLazySpan { + self.scope(db) + .name_span(db.as_hir_db()) + .unwrap_or_else(|| DynLazySpan::invalid()) + } + pub fn from_enum(db: &dyn HirAnalysisDb, enum_: Enum) -> Self { Self::new(db, AdtRef::Enum(enum_)) } @@ -286,7 +340,7 @@ impl HasKind for TyData { } _ => unreachable!(), }, - TyData::Invalid => Kind::Any, + TyData::Invalid(_) => Kind::Any, } } } From 6bf8f2af5da3ad44998577095eb253044a775c83 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 11 Sep 2023 22:32:18 +0200 Subject: [PATCH 295/678] Add `lower_hir_ty` tracked fn --- crates/hir-analysis/src/lib.rs | 1 + crates/hir-analysis/src/ty/lower.rs | 107 ++++++++++------------------ crates/hir-analysis/src/ty/ty.rs | 9 +-- 3 files changed, 39 insertions(+), 78 deletions(-) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 750f282631..e87e379c42 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -13,6 +13,7 @@ pub struct Jar( ty::ty::AdtDef, ty::ty::AdtRefId, /// Type lowering. + ty::lower::lower_hir_ty, ty::lower::lower_adt, ty::lower::lower_type_alias, // Trait resolution. diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index c419850004..a36746e68c 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,12 +1,8 @@ use either::Either; -use hir::{ - hir_def::{ - kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, - ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, - TypeKind as HirTyKind, VariantDefListId, - }, - span::{types::LazyTySpan, DynLazySpan}, - visitor::prelude::{LazyGenericArgSpan, LazyPathTypeSpan, LazyPtrTypeSpan, LazyTupleTypeSpan}, +use hir::hir_def::{ + kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, ItemKind, + Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, + VariantDefListId, }; use crate::{ @@ -22,6 +18,11 @@ use super::{ ty::{AdtDef, AdtRef, AdtRefId, AdtVariant, InvalidCause, Kind, TyData, TyId, TyParam}, }; +#[salsa::tracked] +pub fn lower_hir_ty(db: &dyn HirAnalysisDb, ty: HirTyId, scope: ScopeId) -> TyId { + TyBuilder::new(db, scope).lower_ty(ty) +} + #[salsa::tracked] pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> TyId { let (ty, diags) = AdtTyBuilder::new(db, adt).build(); @@ -57,27 +58,22 @@ impl TyAlias { pub(crate) struct TyBuilder<'db> { db: &'db dyn HirAnalysisDb, scope: ScopeId, - diags: Vec, } impl<'db> TyBuilder<'db> { pub(super) fn new(db: &'db dyn HirAnalysisDb, scope: ScopeId) -> Self { - Self { - db, - scope, - diags: Vec::new(), - } + Self { db, scope } } - pub(super) fn lower_ty(&mut self, ty: HirTyId, span: LazyTySpan) -> TyId { + pub(super) fn lower_ty(&mut self, ty: HirTyId) -> TyId { match ty.data(self.db.as_hir_db()) { - HirTyKind::Ptr(pointee) => self.lower_ptr(*pointee, span.into_ptr_type()), + HirTyKind::Ptr(pointee) => self.lower_ptr(*pointee), - HirTyKind::Path(path, args) => self.lower_path(*path, *args, span.into_path_type()), + HirTyKind::Path(path, args) => self.lower_path(*path, *args), - HirTyKind::SelfType => self.lower_self_ty(span), + HirTyKind::SelfType => self.lower_self_ty(), - HirTyKind::Tuple(elems) => self.lower_tuple(elems, span.into_tuple_type()), + HirTyKind::Tuple(elems) => self.lower_tuple(elems), HirTyKind::Array(_, _) => { todo!() @@ -85,79 +81,62 @@ impl<'db> TyBuilder<'db> { } } - pub(super) fn lower_path( - &mut self, - path: Partial, - args: GenericArgListId, - span: LazyPathTypeSpan, - ) -> TyId { + pub(super) fn lower_path(&mut self, path: Partial, args: GenericArgListId) -> TyId { let path_ty = path .to_opt() .map(|path| { let res = resolve_path_early(self.db, path, self.scope); - self.lower_resolved_path(&res, span.path().into()) + self.lower_resolved_path(&res) }) .unwrap_or_else(|| Either::Left(TyId::invalid(self.db, InvalidCause::Other))); - let generic_arg_span = span.generic_args(); - let arg_tys: Vec<_> = args .data(self.db.as_hir_db()) .iter() - .enumerate() - .map(|(idx, arg)| self.lower_generic_arg(arg, generic_arg_span.arg(idx))) + .map(|arg| self.lower_generic_arg(arg)) .collect(); match path_ty { - Either::Left(ty) => arg_tys.into_iter().enumerate().fold(ty, |acc, (idx, arg)| { - self.ty_app(acc, arg, generic_arg_span.arg(idx).into()) - }), + Either::Left(ty) => arg_tys + .into_iter() + .fold(ty, |acc, arg| TyId::app(self.db, acc, arg)), Either::Right(alias) => alias.subst_with(self.db, &arg_tys), } } - pub(super) fn lower_self_ty(&mut self, span: LazyTySpan) -> TyId { + pub(super) fn lower_self_ty(&mut self) -> TyId { let res = resolve_segments_early(self.db, &[Partial::Present(kw::SELF_TY)], self.scope); - self.lower_resolved_path(&res, span.into()).unwrap_left() + self.lower_resolved_path(&res).unwrap_left() } - fn lower_ptr(&mut self, pointee: Partial, span: LazyPtrTypeSpan) -> TyId { + fn lower_ptr(&mut self, pointee: Partial) -> TyId { let pointee = pointee .to_opt() - .map(|pointee| self.lower_ty(pointee, span.pointee())) + .map(|pointee| self.lower_ty(pointee)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); let ptr = TyId::ptr(self.db); - self.ty_app(ptr, pointee, span.into()) + TyId::app(self.db, ptr, pointee) } - fn lower_tuple(&mut self, elems: &[Partial], span: LazyTupleTypeSpan) -> TyId { + fn lower_tuple(&mut self, elems: &[Partial]) -> TyId { let len = elems.len(); let tuple = TyId::tuple(self.db, len); - elems.iter().enumerate().fold(tuple, |acc, (idx, elem)| { + elems.iter().fold(tuple, |acc, elem| { let elem_ty = elem .to_opt() - .map(|elem| self.lower_ty(elem, span.elem_ty(idx))) + .map(|elem| self.lower_ty(elem)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); - let elem_ty = verify_fully_applied_type(self.db, elem_ty, span.elem_ty(idx).into()); + if !elem_ty.is_mono_type(self.db) { + return TyId::invalid(self.db, InvalidCause::NotFullyApplied); + } - self.ty_app(acc, elem_ty, span.elem_ty(idx).into()) + TyId::app(self.db, acc, elem_ty) }) } - /// Perform type level application. - /// If type application is not possible for the given `abs`/`arg` pair, - /// diagnostics are accumulated then returns` TyId::invalid()`. - fn ty_app(&mut self, abs: TyId, arg: TyId, span: DynLazySpan) -> TyId { - TyId::apply(self.db, abs, arg) - } - - fn lower_resolved_path( - &mut self, - path: &EarlyResolvedPath, - span: DynLazySpan, - ) -> Either { + fn lower_resolved_path(&mut self, path: &EarlyResolvedPath) -> Either { let res = match path { EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { Ok(res) => res, @@ -204,12 +183,12 @@ impl<'db> TyBuilder<'db> { } } - fn lower_generic_arg(&mut self, arg: &GenericArg, span: LazyGenericArgSpan) -> TyId { + fn lower_generic_arg(&mut self, arg: &GenericArg) -> TyId { match arg { GenericArg::Type(ty_arg) => ty_arg .ty .to_opt() - .map(|ty| self.lower_ty(ty, span.into_type_arg().ty())) + .map(|ty| self.lower_ty(ty)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), GenericArg::Const(_) => todo!(), @@ -261,17 +240,14 @@ impl<'db> AdtTyBuilder<'db> { fn collect_variants(&mut self) { match self.adt.data(self.db) { AdtRef::Struct(struct_) => { - let span = struct_.lazy_span(); self.collect_field_types(struct_.fields(self.db.as_hir_db())); } AdtRef::Contract(contract) => { - let span = contract.lazy_span(); self.collect_field_types(contract.fields(self.db.as_hir_db())) } AdtRef::Enum(enum_) => { - let span = enum_.lazy_span(); self.collect_enum_variant_types(enum_.variants(self.db.as_hir_db())) } }; @@ -291,8 +267,7 @@ impl<'db> AdtTyBuilder<'db> { variants .data(self.db.as_hir_db()) .iter() - .enumerate() - .for_each(|(i, variant)| { + .for_each(|variant| { // TODO: FIX here when record variant is introduced. let tys = match variant.ty { Some(ty) => { @@ -336,11 +311,3 @@ fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> Ty } } } - -fn verify_fully_applied_type(db: &dyn HirAnalysisDb, ty: TyId, span: DynLazySpan) -> TyId { - if ty.is_mono_type(db) { - ty - } else { - TyId::invalid(db, InvalidCause::NotFullyApplied) - } -} diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 44ad6c478a..13adc27da6 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -49,7 +49,7 @@ impl TyId { /// Perform type level application. /// If the kind is mismatched, return `TyData::Invalid`. - pub(super) fn apply(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { + pub(super) fn app(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { if abs.is_invalid(db) || arg.is_invalid(db) { return TyId::invalid(db, InvalidCause::Other); } @@ -192,13 +192,6 @@ impl Kind { _ => false, } } - - fn is_any(&self) -> bool { - match self { - Self::Any => true, - _ => false, - } - } } impl fmt::Display for Kind { From c9a9c1facd6909623a75f5d92653b66ef6c65d49 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 11 Sep 2023 22:42:33 +0200 Subject: [PATCH 296/678] Add `scope` field and `ty` method to `AdtVariant` --- crates/hir-analysis/src/ty/lower.rs | 26 ++++++++++++++------------ crates/hir-analysis/src/ty/ty.rs | 16 ++++++++++++++++ 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index a36746e68c..b16e0d3eed 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -254,20 +254,25 @@ impl<'db> AdtTyBuilder<'db> { } fn collect_field_types(&mut self, fields: FieldDefListId) { - fields.data(self.db.as_hir_db()).iter().for_each(|field| { - let variant = AdtVariant { - name: field.name, - tys: vec![field.ty], - }; - self.variants.push(variant); - }) + fields + .data(self.db.as_hir_db()) + .iter() + .enumerate() + .for_each(|(i, field)| { + let scope = ScopeId::Field(self.adt.as_item(self.db), i); + let variant = AdtVariant::new(field.name, vec![field.ty], scope); + self.variants.push(variant); + }) } fn collect_enum_variant_types(&mut self, variants: VariantDefListId) { variants .data(self.db.as_hir_db()) .iter() - .for_each(|variant| { + .enumerate() + .for_each(|(i, variant)| { + let scope = ScopeId::Variant(self.adt.as_item(self.db), i); + // TODO: FIX here when record variant is introduced. let tys = match variant.ty { Some(ty) => { @@ -276,10 +281,7 @@ impl<'db> AdtTyBuilder<'db> { None => vec![], }; - let variant = AdtVariant { - name: variant.name, - tys, - }; + let variant = AdtVariant::new(variant.name, tys, scope); self.variants.push(variant) }) } diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 13adc27da6..9218e6a76a 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -11,6 +11,8 @@ use hir::{ use crate::HirAnalysisDb; +use super::lower::lower_hir_ty; + #[salsa::interned] pub struct TyId { pub data: TyData, @@ -117,6 +119,20 @@ pub struct AdtVariant { /// If the adt is an struct or contract, the length of the vector is always /// 1. pub tys: Vec>, + scope: ScopeId, +} +impl AdtVariant { + pub fn ty(&self, db: &dyn HirAnalysisDb, i: usize) -> TyId { + if let Some(ty) = self.tys[i].to_opt() { + lower_hir_ty(db, ty, self.scope) + } else { + TyId::invalid(db, InvalidCause::Other) + } + } + + pub(super) fn new(name: Partial, tys: Vec>, scope: ScopeId) -> Self { + Self { name, tys, scope } + } } #[salsa::tracked(return_ref)] From a8fe23fbbe4164836f01d086ea984e1cb3eb91b6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 12 Sep 2023 01:25:44 +0200 Subject: [PATCH 297/678] Implement Adt definition analysis --- crates/hir-analysis/src/lib.rs | 1 + crates/hir-analysis/src/ty/diagnostics.rs | 22 +- crates/hir-analysis/src/ty/lower.rs | 200 ++++++++++++++---- crates/hir-analysis/src/ty/mod.rs | 3 +- crates/hir-analysis/src/ty/ty.rs | 3 - crates/hir/src/visitor.rs | 12 ++ crates/uitest/fixtures/ty/kind_mismatch.snap | 6 +- .../uitest/fixtures/ty/not_fully_applied.snap | 10 +- 8 files changed, 188 insertions(+), 69 deletions(-) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index e87e379c42..71c7031dbe 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -15,6 +15,7 @@ pub struct Jar( /// Type lowering. ty::lower::lower_hir_ty, ty::lower::lower_adt, + ty::lower::analyze_adt, ty::lower::lower_type_alias, // Trait resolution. ty::trait_::TraitDef, diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 03d4ebda7a..dde82a5fd3 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -17,7 +17,6 @@ pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { - InvalidType(DynLazySpan), NotFullyAppliedType(DynLazySpan), KindMismatch(DynLazySpan, String), RecursiveType { @@ -28,10 +27,6 @@ pub enum TyLowerDiag { } impl TyLowerDiag { - pub(super) fn invalid_type(span: DynLazySpan) -> Self { - Self::InvalidType(span) - } - pub fn not_fully_applied_type(span: DynLazySpan) -> Self { Self::NotFullyAppliedType(span) } @@ -63,18 +58,15 @@ impl TyLowerDiag { fn local_code(&self) -> u16 { match self { - Self::InvalidType(_) => 0, - Self::NotFullyAppliedType(_) => 1, - Self::KindMismatch(_, _) => 2, - Self::RecursiveType { .. } => 3, - Self::AssocTy(_) => 4, + Self::NotFullyAppliedType(_) => 0, + Self::KindMismatch(_, _) => 1, + Self::RecursiveType { .. } => 2, + Self::AssocTy(_) => 3, } } fn message(&self) -> String { match self { - Self::InvalidType(_) => "expected type".to_string(), - Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), Self::KindMismatch(_, _) => "kind mismatch in type application".to_string(), @@ -87,12 +79,6 @@ impl TyLowerDiag { fn sub_diags(&self, db: &dyn hir::SpannedHirDb) -> Vec { match self { - Self::InvalidType(span) => vec![SubDiagnostic::new( - LabelStyle::Primary, - "expected type here".to_string(), - span.resolve(db), - )], - Self::NotFullyAppliedType(span) => vec![SubDiagnostic::new( LabelStyle::Primary, "expected fully applied type here".to_string(), diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index b16e0d3eed..21e381724a 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,8 +1,11 @@ use either::Either; -use hir::hir_def::{ - kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, ItemKind, - Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, - VariantDefListId, +use hir::{ + hir_def::{ + kw, scope_graph::ScopeId, FieldDef, FieldDefListId, GenericArg, GenericArgListId, + GenericParam, ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, + TypeKind as HirTyKind, VariantDefListId, + }, + visitor::prelude::*, }; use crate::{ @@ -24,16 +27,29 @@ pub fn lower_hir_ty(db: &dyn HirAnalysisDb, ty: HirTyId, scope: ScopeId) -> TyId } #[salsa::tracked] -pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> TyId { - let (ty, diags) = AdtTyBuilder::new(db, adt).build(); - for diag in diags { - AdtDefDiagAccumulator::push(db, diag) +pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { + AdtTyBuilder::new(db, adt).build() +} + +#[salsa::tracked] +pub fn analyze_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) { + let mut analyzer = AdtDefAnalysisVisitor { + db, + accumulated: Vec::new(), + scope: adt.scope(db), + }; + let item = adt.as_item(db); + + let mut ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + analyzer.visit_item(&mut ctxt, item); + + for diag in analyzer.accumulated { + AdtDefDiagAccumulator::push(db, diag); } - ty } #[salsa::tracked] -pub fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlias { +pub(crate) fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlias { todo!() } @@ -44,18 +60,130 @@ pub fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlia /// NOTE: `TyAlias` can't become an alias to partial applied types, i.e., the /// right hand side of the alias declaration must be a fully applied type. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct TyAlias { +pub(crate) struct TyAlias { alias_to: TyId, params: Vec, } +pub(super) fn collect_ty_lower_diags( + db: &dyn HirAnalysisDb, + ty: HirTyId, + span: LazyTySpan, + scope: ScopeId, +) -> Vec { + let mut ctxt = VisitorCtxt::new(db.as_hir_db(), span); + let mut accumulator = TyDiagAccumulator { + db, + accumulated: Vec::new(), + scope, + }; + + accumulator.visit_ty(&mut ctxt, ty); + accumulator.accumulated +} + impl TyAlias { fn subst_with(&self, _db: &dyn HirAnalysisDb, _substs: &[TyId]) -> TyId { todo!() } } -pub(crate) struct TyBuilder<'db> { +struct TyDiagAccumulator<'db> { + db: &'db dyn HirAnalysisDb, + accumulated: Vec, + scope: ScopeId, +} + +impl<'db> TyDiagAccumulator<'db> { + fn accumulate(&mut self, cause: InvalidCause, span: LazyTySpan) { + let span: DynLazySpan = span.into(); + match cause { + InvalidCause::NotFullyApplied => { + let diag = TyLowerDiag::not_fully_applied_type(span); + self.accumulated.push(diag); + } + + InvalidCause::KindMismatch { abs, arg } => { + let diag = TyLowerDiag::kind_mismatch(self.db, abs, arg, span); + self.accumulated.push(diag); + } + + InvalidCause::AssocTy => { + let diag = TyLowerDiag::assoc_ty(span); + self.accumulated.push(diag); + } + + // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. + InvalidCause::Other => {} + } + } +} + +impl<'db> Visitor for TyDiagAccumulator<'db> { + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { + let ty = lower_hir_ty(self.db, hir_ty, self.scope); + if let Some(cause) = ty.invalid_cause(self.db) { + self.accumulate(cause, ctxt.span().unwrap()); + } + + walk_ty(self, ctxt, hir_ty); + } +} + +struct AdtDefAnalysisVisitor<'db> { + db: &'db dyn HirAnalysisDb, + accumulated: Vec, + scope: ScopeId, +} + +impl<'db> AdtDefAnalysisVisitor<'db> { + // This method ensures that field/variant types are fully applied. + fn verify_fully_applied(&mut self, ty: HirTyId, span: DynLazySpan) { + let ty = lower_hir_ty(self.db, ty, self.scope); + if !ty.is_mono_type(self.db) { + self.accumulated + .push(TyLowerDiag::not_fully_applied_type(span)); + } + } +} + +impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { + self.accumulated.extend(collect_ty_lower_diags( + self.db, + hir_ty, + ctxt.span().unwrap(), + self.scope, + )); + + // We don't call `walk_ty` to make sure that we don't visit ty + // recursively, which is visited by `collect_ty_lower_diags`. + } + + fn visit_field_def(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, field: &FieldDef) { + if let Some(ty) = field.ty.to_opt() { + self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); + } + + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, + variant: &hir::hir_def::VariantDef, + ) { + if let Some(ty) = variant.ty { + self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); + } + + walk_variant_def(self, ctxt, variant); + } + + // TODO: We need to check cycle type. +} + +struct TyBuilder<'db> { db: &'db dyn HirAnalysisDb, scope: ScopeId, } @@ -113,7 +241,7 @@ impl<'db> TyBuilder<'db> { fn lower_ptr(&mut self, pointee: Partial) -> TyId { let pointee = pointee .to_opt() - .map(|pointee| self.lower_ty(pointee)) + .map(|pointee| lower_hir_ty(self.db, pointee, self.scope)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); let ptr = TyId::ptr(self.db); @@ -126,7 +254,7 @@ impl<'db> TyBuilder<'db> { elems.iter().fold(tuple, |acc, elem| { let elem_ty = elem .to_opt() - .map(|elem| self.lower_ty(elem)) + .map(|elem| lower_hir_ty(self.db, elem, self.scope)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); if !elem_ty.is_mono_type(self.db) { return TyId::invalid(self.db, InvalidCause::NotFullyApplied); @@ -168,18 +296,22 @@ impl<'db> TyBuilder<'db> { match item { ItemKind::Enum(enum_) => { let adt_ref = AdtRefId::from_enum(self.db, enum_); - Either::Left(lower_adt(self.db, adt_ref)) + let adt = lower_adt(self.db, adt_ref); + Either::Left(TyId::adt(self.db, adt)) } ItemKind::Struct(struct_) => { let adt_ref = AdtRefId::from_struct(self.db, struct_); - Either::Left(lower_adt(self.db, adt_ref)) + let adt = lower_adt(self.db, adt_ref); + Either::Left(TyId::adt(self.db, adt)) } ItemKind::Contract(contract) => { let adt_ref = AdtRefId::from_contract(self.db, contract); - Either::Left(lower_adt(self.db, adt_ref)) + let adt = lower_adt(self.db, adt_ref); + Either::Left(TyId::adt(self.db, adt)) } ItemKind::TypeAlias(alias) => Either::Right(lower_type_alias(self.db, alias)), - _ => Either::Left(TyId::invalid(self.db, InvalidCause::ReferenceToNonType)), + // This should be handled in the name resolution. + _ => Either::Left(TyId::invalid(self.db, InvalidCause::Other)), } } @@ -188,7 +320,7 @@ impl<'db> TyBuilder<'db> { GenericArg::Type(ty_arg) => ty_arg .ty .to_opt() - .map(|ty| self.lower_ty(ty)) + .map(|ty| lower_hir_ty(self.db, ty, self.scope)) .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), GenericArg::Const(_) => todo!(), @@ -201,7 +333,6 @@ struct AdtTyBuilder<'db> { adt: AdtRefId, params: Vec, variants: Vec, - diags: Vec, } impl<'db> AdtTyBuilder<'db> { @@ -211,16 +342,13 @@ impl<'db> AdtTyBuilder<'db> { adt, params: Vec::new(), variants: Vec::new(), - diags: Vec::new(), } } - fn build(mut self) -> (TyId, Vec) { + fn build(mut self) -> AdtDef { self.collect_params(); self.collect_variants(); - - let adt_def = AdtDef::new(self.db, self.adt, self.params, self.variants); - (TyId::adt(self.db, adt_def), self.diags) + AdtDef::new(self.db, self.adt, self.params, self.variants) } fn collect_params(&mut self) { @@ -254,25 +382,21 @@ impl<'db> AdtTyBuilder<'db> { } fn collect_field_types(&mut self, fields: FieldDefListId) { - fields - .data(self.db.as_hir_db()) - .iter() - .enumerate() - .for_each(|(i, field)| { - let scope = ScopeId::Field(self.adt.as_item(self.db), i); - let variant = AdtVariant::new(field.name, vec![field.ty], scope); - self.variants.push(variant); - }) + let scope = self.adt.scope(self.db); + + fields.data(self.db.as_hir_db()).iter().for_each(|field| { + let variant = AdtVariant::new(field.name, vec![field.ty], scope); + self.variants.push(variant); + }) } fn collect_enum_variant_types(&mut self, variants: VariantDefListId) { + let scope = self.adt.scope(self.db); + variants .data(self.db.as_hir_db()) .iter() - .enumerate() - .for_each(|(i, variant)| { - let scope = ScopeId::Variant(self.adt.as_item(self.db), i); - + .for_each(|variant| { // TODO: FIX here when record variant is introduced. let tys = match variant.ty { Some(ty) => { diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 736824657f..9b40497755 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -43,8 +43,7 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { ); adts.map(|adt| { - lower::lower_adt(self.db, adt); - lower::lower_adt::accumulated::(self.db, adt) + lower::analyze_adt::accumulated::(self.db, adt) .into_iter() .map(|diag| Box::new(diag) as _) }) diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 9218e6a76a..d4363aa866 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -175,9 +175,6 @@ pub enum InvalidCause { /// Associated Type is not allowed at the moment. AssocTy, - /// Type is not defined even though the name resolution is succeeded. - ReferenceToNonType, - /// `Other` indicates the cause is already reported in other analysis /// passes, e.g., parser or name resolution. Other, diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 68f021690a..7772610a27 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1803,6 +1803,18 @@ impl<'db, T> VisitorCtxt<'db, T> where T: LazySpan, { + pub fn new(db: &'db dyn HirDb, span: T) -> Self + where + T: Into, + { + Self { + db, + span: span.into(), + scope_stack: Vec::new(), + _t: PhantomData, + } + } + pub fn db(&self) -> &'db dyn HirDb { self.db } diff --git a/crates/uitest/fixtures/ty/kind_mismatch.snap b/crates/uitest/fixtures/ty/kind_mismatch.snap index 9b784f47bb..2139d7f371 100644 --- a/crates/uitest/fixtures/ty/kind_mismatch.snap +++ b/crates/uitest/fixtures/ty/kind_mismatch.snap @@ -3,10 +3,10 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/kind_mismatch.fe --- -error[3-0002]: kind mismatch in type application - ┌─ kind_mismatch.fe:7:19 +error[3-0001]: kind mismatch in type application + ┌─ kind_mismatch.fe:7:10 │ 7 │ foo: Foo - │ ^^^ can't apply `(* -> (* -> *))` kind to `(* -> *)` kind + │ ^^^^^^^^^^^^^ can't apply `(* -> (* -> *))` kind to `(* -> *)` kind diff --git a/crates/uitest/fixtures/ty/not_fully_applied.snap b/crates/uitest/fixtures/ty/not_fully_applied.snap index 3317604041..feadb17246 100644 --- a/crates/uitest/fixtures/ty/not_fully_applied.snap +++ b/crates/uitest/fixtures/ty/not_fully_applied.snap @@ -3,19 +3,19 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/not_fully_applied.fe --- -error[3-0001]: expected fully applied type +error[3-0000]: expected fully applied type ┌─ not_fully_applied.fe:7:8 │ 7 │ f: Gen │ ^^^^^^^^ expected fully applied type here -error[3-0001]: expected fully applied type - ┌─ not_fully_applied.fe:12:13 +error[3-0000]: expected fully applied type + ┌─ not_fully_applied.fe:12:12 │ 12 │ Variant(Gen) - │ ^^^^^^^^ expected fully applied type here + │ ^^^^^^^^^^ expected fully applied type here -error[3-0001]: expected fully applied type +error[3-0000]: expected fully applied type ┌─ not_fully_applied.fe:16:8 │ 16 │ f: Gen From b26a8a6314e702b243c49800b1715ff032b5d599 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 12 Sep 2023 21:03:41 +0200 Subject: [PATCH 298/678] Implement `TyVisitor` --- crates/hir-analysis/src/ty/mod.rs | 1 + crates/hir-analysis/src/ty/visitor.rs | 62 +++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 crates/hir-analysis/src/ty/visitor.rs diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 9b40497755..74d7c32e56 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -8,6 +8,7 @@ pub mod diagnostics; pub mod lower; pub mod trait_; pub mod ty; +pub mod visitor; pub struct TypeDefAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs new file mode 100644 index 0000000000..ca58ff86c6 --- /dev/null +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -0,0 +1,62 @@ +use crate::HirAnalysisDb; + +use super::ty::{AdtDef, InvalidCause, PrimTy, TyConcrete, TyData, TyId, TyParam, TyVar}; + +pub trait TyVisitor { + fn visit_ty(&mut self, db: &dyn HirAnalysisDb, ty: TyId) { + walk_ty(self, db, ty) + } + + #[allow(unused_variables)] + fn visit_var(&mut self, db: &dyn HirAnalysisDb, var: &TyVar) {} + + #[allow(unused_variables)] + fn visit_param(&self, db: &dyn HirAnalysisDb, ty_param: &TyParam) {} + + fn visit_app(&mut self, db: &dyn HirAnalysisDb, abs: TyId, arg: TyId) { + self.visit_ty(db, abs); + self.visit_ty(db, arg); + } + + #[allow(unused_variables)] + fn visit_ty_con(&mut self, db: &dyn HirAnalysisDb, ty_con: &TyConcrete) { + walk_ty_con(self, db, ty_con); + } + + #[allow(unused_variables)] + fn visit_invalid(&mut self, db: &dyn HirAnalysisDb, cause: &InvalidCause) {} + + #[allow(unused_variables)] + fn visit_prim(&mut self, db: &dyn HirAnalysisDb, prim: &PrimTy) {} + + #[allow(unused_variables)] + fn visit_adt(&mut self, db: &dyn HirAnalysisDb, adt: AdtDef) {} +} + +pub fn walk_ty(visitor: &mut V, db: &dyn HirAnalysisDb, ty: TyId) +where + V: TyVisitor + ?Sized, +{ + match ty.data(db) { + TyData::TyVar(var) => visitor.visit_var(db, &var), + + TyData::TyParam(param) => visitor.visit_param(db, ¶m), + + TyData::TyApp(abs, arg) => visitor.visit_app(db, abs, arg), + + TyData::TyCon(ty_con) => visitor.visit_ty_con(db, &ty_con), + + TyData::Invalid(cause) => visitor.visit_invalid(db, &cause), + } +} + +pub fn walk_ty_con(visitor: &mut V, db: &dyn HirAnalysisDb, ty_con: &TyConcrete) +where + V: TyVisitor + ?Sized, +{ + match ty_con { + TyConcrete::Prim(prim) => visitor.visit_prim(db, prim), + TyConcrete::Adt(adt) => visitor.visit_adt(db, *adt), + TyConcrete::Abs => {} + } +} From cf70432730d442b19ba85df9ec20709a79245984 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 12 Sep 2023 21:52:51 +0200 Subject: [PATCH 299/678] Implement recursive type check --- crates/hir-analysis/src/lib.rs | 1 + crates/hir-analysis/src/ty/diagnostics.rs | 39 +++----- crates/hir-analysis/src/ty/lower.rs | 91 +++++++++++++++++-- crates/hir-analysis/src/ty/ty.rs | 58 +++++++++++- crates/hir/src/hir_def/scope_graph.rs | 3 +- crates/uitest/fixtures/ty/recursive_type.fe | 24 +++++ crates/uitest/fixtures/ty/recursive_type.snap | 46 ++++++++++ 7 files changed, 227 insertions(+), 35 deletions(-) create mode 100644 crates/uitest/fixtures/ty/recursive_type.fe create mode 100644 crates/uitest/fixtures/ty/recursive_type.snap diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 71c7031dbe..3dd0f21e2b 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -17,6 +17,7 @@ pub struct Jar( ty::lower::lower_adt, ty::lower::analyze_adt, ty::lower::lower_type_alias, + ty::lower::check_recursive_adt, // Trait resolution. ty::trait_::TraitDef, ty::trait_::TraitInstId, diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index dde82a5fd3..30d79cb830 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -8,7 +8,7 @@ use hir::{ use crate::HirAnalysisDb; -use super::ty::{AdtRefId, TyId}; +use super::ty::TyId; #[salsa::accumulator] pub struct AdtDefDiagAccumulator(pub(super) TyLowerDiag); @@ -21,7 +21,7 @@ pub enum TyLowerDiag { KindMismatch(DynLazySpan, String), RecursiveType { primary_span: DynLazySpan, - cycle_participants: Vec, + field_span: DynLazySpan, }, AssocTy(DynLazySpan), } @@ -39,16 +39,10 @@ impl TyLowerDiag { Self::KindMismatch(span, msg.into()) } - pub(super) fn recursive_type( - db: &dyn HirAnalysisDb, - primary_span: DynLazySpan, - participants: Vec, - ) -> Self { - let cycle_participants = participants.into_iter().map(|p| p.name_span(db)).collect(); - + pub(super) fn recursive_type(primary_span: DynLazySpan, field_span: DynLazySpan) -> Self { Self::RecursiveType { primary_span, - cycle_participants, + field_span, } } @@ -93,23 +87,20 @@ impl TyLowerDiag { Self::RecursiveType { primary_span, - cycle_participants, + field_span, } => { - let mut diags = vec![SubDiagnostic::new( - LabelStyle::Primary, - "causing cycle here".to_string(), - primary_span.resolve(db), - )]; - - diags.extend(cycle_participants.iter().map(|span| { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "recursive type definition".to_string(), + primary_span.resolve(db), + ), SubDiagnostic::new( LabelStyle::Secondary, - format!("this type is part of the cycle"), - span.resolve(db), - ) - })); - - diags + "recursion occurs here".to_string(), + field_span.resolve(db), + ), + ] } Self::AssocTy(span) => vec![SubDiagnostic::new( diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 21e381724a..73e8c33f3f 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -5,8 +5,10 @@ use hir::{ GenericParam, ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, }, - visitor::prelude::*, + visitor::prelude::{walk_ty as hir_walk_ty, *}, }; +use rustc_hash::FxHashSet; +use salsa::function::Configuration; use crate::{ name_resolution::{ @@ -18,7 +20,8 @@ use crate::{ use super::{ diagnostics::TyLowerDiag, - ty::{AdtDef, AdtRef, AdtRefId, AdtVariant, InvalidCause, Kind, TyData, TyId, TyParam}, + ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, TyData, TyId, TyParam}, + visitor::{walk_ty, TyVisitor}, }; #[salsa::tracked] @@ -46,6 +49,10 @@ pub fn analyze_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) { for diag in analyzer.accumulated { AdtDefDiagAccumulator::push(db, diag); } + + if let Some(diag) = check_recursive_adt(db, adt) { + AdtDefDiagAccumulator::push(db, diag); + } } #[salsa::tracked] @@ -126,7 +133,7 @@ impl<'db> Visitor for TyDiagAccumulator<'db> { self.accumulate(cause, ctxt.span().unwrap()); } - walk_ty(self, ctxt, hir_ty); + hir_walk_ty(self, ctxt, hir_ty); } } @@ -332,7 +339,7 @@ struct AdtTyBuilder<'db> { db: &'db dyn HirAnalysisDb, adt: AdtRefId, params: Vec, - variants: Vec, + variants: Vec, } impl<'db> AdtTyBuilder<'db> { @@ -385,7 +392,7 @@ impl<'db> AdtTyBuilder<'db> { let scope = self.adt.scope(self.db); fields.data(self.db.as_hir_db()).iter().for_each(|field| { - let variant = AdtVariant::new(field.name, vec![field.ty], scope); + let variant = AdtField::new(field.name, vec![field.ty], scope); self.variants.push(variant); }) } @@ -405,7 +412,7 @@ impl<'db> AdtTyBuilder<'db> { None => vec![], }; - let variant = AdtVariant::new(variant.name, tys, scope); + let variant = AdtField::new(variant.name, tys, scope); self.variants.push(variant) }) } @@ -437,3 +444,75 @@ fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> Ty } } } + +#[salsa::tracked(recovery_fn = check_recursive_adt_impl)] +pub(crate) fn check_recursive_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> Option { + let adt_def = lower_adt(db, adt); + + for field in adt_def.fields(db) { + for ty in field.iter_types(db) { + for adt_ref in ty.collect_direct_adts(db) { + check_recursive_adt(db, adt_ref); + } + } + } + + None +} + +fn check_recursive_adt_impl( + db: &dyn HirAnalysisDb, + cycle: &salsa::Cycle, + adt: AdtRefId, +) -> Option { + let participants: FxHashSet<_> = cycle + .participant_keys() + .map(|key| check_recursive_adt::key_from_id(key.key_index())) + .collect(); + + let adt_def = lower_adt(db, adt); + for (i, field) in adt_def.fields(db).iter().enumerate() { + for ty in field.iter_types(db) { + for field_adt_ref in ty.collect_direct_adts(db) { + if participants.contains(&field_adt_ref) && participants.contains(&adt) { + let diag = TyLowerDiag::recursive_type( + adt.name_span(db), + adt_def.variant_ty_span(db, i), + ); + return Some(diag); + } + } + } + } + + None +} + +impl TyId { + /// Collect all adts inside types which are not wrapped by indirect type + /// wrapper like pointer or reference. + fn collect_direct_adts(self, db: &dyn HirAnalysisDb) -> FxHashSet { + let mut collector = AdtCollector { + adts: FxHashSet::default(), + }; + + walk_ty(&mut collector, db, self); + collector.adts + } +} + +struct AdtCollector { + adts: FxHashSet, +} + +impl TyVisitor for AdtCollector { + fn visit_app(&mut self, db: &dyn HirAnalysisDb, abs: TyId, arg: TyId) { + if !abs.is_indirect(db) { + walk_ty(self, db, arg) + } + } + + fn visit_adt(&mut self, db: &dyn HirAnalysisDb, adt: AdtDef) { + self.adts.insert(adt.adt_ref(db)); + } +} diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index d4363aa866..be7516dbb6 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -66,6 +66,20 @@ impl TyId { } } + /// Returns `true` if the type is a pointer or a pointer application. + pub(super) fn is_ptr(self, db: &dyn HirAnalysisDb) -> bool { + match self.data(db) { + TyData::TyCon(TyConcrete::Prim(PrimTy::Ptr)) => true, + TyData::TyApp(abs, _) => abs.is_ptr(db), + _ => false, + } + } + + pub(super) fn is_indirect(self, db: &dyn HirAnalysisDb) -> bool { + // TODO: FiX here when reference type is introduced. + self.is_ptr(db) + } + pub(super) fn invalid(db: &dyn HirAnalysisDb, cause: InvalidCause) -> Self { Self::new(db, TyData::Invalid(cause)) } @@ -106,14 +120,42 @@ impl TyId { #[salsa::tracked] pub struct AdtDef { - pub adt: AdtRefId, + pub adt_ref: AdtRefId, #[return_ref] pub params: Vec, - pub variants: Vec, + #[return_ref] + pub fields: Vec, +} + +impl AdtDef { + pub fn variant_ty_span(self, db: &dyn HirAnalysisDb, idx: usize) -> DynLazySpan { + match self.adt_ref(db).data(db) { + AdtRef::Enum(e) => e + .lazy_span() + .variants_moved() + .variant_moved(idx) + .ty_moved() + .into(), + + AdtRef::Struct(s) => s + .lazy_span() + .fields_moved() + .field_moved(idx) + .ty_moved() + .into(), + + AdtRef::Contract(c) => c + .lazy_span() + .fields_moved() + .field_moved(idx) + .ty_moved() + .into(), + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AdtVariant { +pub struct AdtField { pub name: Partial, /// Fields of the variant. /// If the adt is an struct or contract, the length of the vector is always @@ -121,7 +163,7 @@ pub struct AdtVariant { pub tys: Vec>, scope: ScopeId, } -impl AdtVariant { +impl AdtField { pub fn ty(&self, db: &dyn HirAnalysisDb, i: usize) -> TyId { if let Some(ty) = self.tys[i].to_opt() { lower_hir_ty(db, ty, self.scope) @@ -130,6 +172,14 @@ impl AdtVariant { } } + pub fn iter_types<'a>(&'a self, db: &'a dyn HirAnalysisDb) -> impl Iterator + 'a { + (0..self.num_types()).into_iter().map(|i| self.ty(db, i)) + } + + pub fn num_types(&self) -> usize { + self.tys.len() + } + pub(super) fn new(name: Partial, tys: Vec>, scope: ScopeId) -> Self { Self { name, tys, scope } } diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs index 66699efa9c..74ab87753a 100644 --- a/crates/hir/src/hir_def/scope_graph.rs +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -117,7 +117,8 @@ impl ScopeId { } } - /// Returns the nearest enclosing item. + /// Returns the nearest item that contains this scope. + /// If the scope is item itself, returns the item. pub fn item(self) -> ItemKind { match self { ScopeId::Item(item) => item, diff --git a/crates/uitest/fixtures/ty/recursive_type.fe b/crates/uitest/fixtures/ty/recursive_type.fe new file mode 100644 index 0000000000..3127e7e2d1 --- /dev/null +++ b/crates/uitest/fixtures/ty/recursive_type.fe @@ -0,0 +1,24 @@ +pub struct S1 { + s: S1 +} + +pub struct S2 { + s: S3 +} + +pub struct S3 { + s: S4 +} + +pub struct S4 { + s: S2 +} + +pub struct S5 { + s: S6 + t: T +} + +pub struct S6 { + s: S5 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/recursive_type.snap b/crates/uitest/fixtures/ty/recursive_type.snap new file mode 100644 index 0000000000..daf6f6bf12 --- /dev/null +++ b/crates/uitest/fixtures/ty/recursive_type.snap @@ -0,0 +1,46 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/recursive_type.fe +--- +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:1:12 + │ +1 │ pub struct S1 { + │ ^^ recursive type definition +2 │ s: S1 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:5:12 + │ +5 │ pub struct S2 { + │ ^^ recursive type definition +6 │ s: S3 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:9:12 + │ + 9 │ pub struct S3 { + │ ^^ recursive type definition +10 │ s: S4 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:13:12 + │ +13 │ pub struct S4 { + │ ^^ recursive type definition +14 │ s: S2 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:22:12 + │ +22 │ pub struct S6 { + │ ^^ recursive type definition +23 │ s: S5 + │ ------ recursion occurs here + + From 22536884aa660b002f9a06061fab2b1ca8b56445 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Tue, 12 Sep 2023 22:10:35 +0200 Subject: [PATCH 300/678] Separate ADT analysis implementation from lowering implementation --- crates/hir-analysis/src/lib.rs | 5 +- crates/hir-analysis/src/ty/adt_analysis.rs | 217 ++++++++++++++++++++ crates/hir-analysis/src/ty/lower.rs | 225 +-------------------- crates/hir-analysis/src/ty/mod.rs | 3 +- 4 files changed, 227 insertions(+), 223 deletions(-) create mode 100644 crates/hir-analysis/src/ty/adt_analysis.rs diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 3dd0f21e2b..16acfde2e7 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -15,9 +15,10 @@ pub struct Jar( /// Type lowering. ty::lower::lower_hir_ty, ty::lower::lower_adt, - ty::lower::analyze_adt, ty::lower::lower_type_alias, - ty::lower::check_recursive_adt, + /// ADT analysis. + ty::adt_analysis::check_recursive_adt, + ty::adt_analysis::analyze_adt, // Trait resolution. ty::trait_::TraitDef, ty::trait_::TraitInstId, diff --git a/crates/hir-analysis/src/ty/adt_analysis.rs b/crates/hir-analysis/src/ty/adt_analysis.rs new file mode 100644 index 0000000000..b9cada715c --- /dev/null +++ b/crates/hir-analysis/src/ty/adt_analysis.rs @@ -0,0 +1,217 @@ +use hir::{ + hir_def::{scope_graph::ScopeId, FieldDef, TypeId as HirTyId}, + visitor::prelude::{walk_ty as hir_walk_ty, *}, +}; +use rustc_hash::FxHashSet; +use salsa::function::Configuration; + +use crate::{ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb}; + +use super::{ + diagnostics::TyLowerDiag, + lower::{lower_adt, lower_hir_ty}, + ty::{AdtDef, AdtRefId, InvalidCause, TyId}, + visitor::{walk_ty, TyVisitor}, +}; + +#[salsa::tracked] +pub fn analyze_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) { + let mut analyzer = AdtDefAnalysisVisitor { + db, + accumulated: Vec::new(), + scope: adt.scope(db), + }; + let item = adt.as_item(db); + + let mut ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + analyzer.visit_item(&mut ctxt, item); + + for diag in analyzer.accumulated { + AdtDefDiagAccumulator::push(db, diag); + } + + if let Some(diag) = check_recursive_adt(db, adt) { + AdtDefDiagAccumulator::push(db, diag); + } +} + +#[salsa::tracked(recovery_fn = check_recursive_adt_impl)] +pub(crate) fn check_recursive_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> Option { + let adt_def = lower_adt(db, adt); + for field in adt_def.fields(db) { + for ty in field.iter_types(db) { + for adt_ref in ty.collect_direct_adts(db) { + check_recursive_adt(db, adt_ref); + } + } + } + + None +} + +struct AdtDefAnalysisVisitor<'db> { + db: &'db dyn HirAnalysisDb, + accumulated: Vec, + scope: ScopeId, +} + +impl<'db> AdtDefAnalysisVisitor<'db> { + // This method ensures that field/variant types are fully applied. + fn verify_fully_applied(&mut self, ty: HirTyId, span: DynLazySpan) { + let ty = lower_hir_ty(self.db, ty, self.scope); + if !ty.is_mono_type(self.db) { + self.accumulated + .push(TyLowerDiag::not_fully_applied_type(span)); + } + } +} + +impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { + self.accumulated.extend(collect_ty_lower_diags( + self.db, + hir_ty, + ctxt.span().unwrap(), + self.scope, + )); + + // We don't call `walk_ty` to make sure that we don't visit ty + // recursively, which is visited by `collect_ty_lower_diags`. + } + + fn visit_field_def(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, field: &FieldDef) { + if let Some(ty) = field.ty.to_opt() { + self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); + } + + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, + variant: &hir::hir_def::VariantDef, + ) { + if let Some(ty) = variant.ty { + self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); + } + + walk_variant_def(self, ctxt, variant); + } +} + +pub(super) fn collect_ty_lower_diags( + db: &dyn HirAnalysisDb, + ty: HirTyId, + span: LazyTySpan, + scope: ScopeId, +) -> Vec { + let mut ctxt = VisitorCtxt::new(db.as_hir_db(), span); + let mut accumulator = TyDiagAccumulator { + db, + accumulated: Vec::new(), + scope, + }; + + accumulator.visit_ty(&mut ctxt, ty); + accumulator.accumulated +} + +struct TyDiagAccumulator<'db> { + db: &'db dyn HirAnalysisDb, + accumulated: Vec, + scope: ScopeId, +} + +impl<'db> TyDiagAccumulator<'db> { + fn accumulate(&mut self, cause: InvalidCause, span: LazyTySpan) { + let span: DynLazySpan = span.into(); + match cause { + InvalidCause::NotFullyApplied => { + let diag = TyLowerDiag::not_fully_applied_type(span); + self.accumulated.push(diag); + } + + InvalidCause::KindMismatch { abs, arg } => { + let diag = TyLowerDiag::kind_mismatch(self.db, abs, arg, span); + self.accumulated.push(diag); + } + + InvalidCause::AssocTy => { + let diag = TyLowerDiag::assoc_ty(span); + self.accumulated.push(diag); + } + + // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. + InvalidCause::Other => {} + } + } +} + +impl<'db> Visitor for TyDiagAccumulator<'db> { + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { + let ty = lower_hir_ty(self.db, hir_ty, self.scope); + if let Some(cause) = ty.invalid_cause(self.db) { + self.accumulate(cause, ctxt.span().unwrap()); + } + + hir_walk_ty(self, ctxt, hir_ty); + } +} + +fn check_recursive_adt_impl( + db: &dyn HirAnalysisDb, + cycle: &salsa::Cycle, + adt: AdtRefId, +) -> Option { + let participants: FxHashSet<_> = cycle + .participant_keys() + .map(|key| check_recursive_adt::key_from_id(key.key_index())) + .collect(); + + let adt_def = lower_adt(db, adt); + for (i, field) in adt_def.fields(db).iter().enumerate() { + for ty in field.iter_types(db) { + for field_adt_ref in ty.collect_direct_adts(db) { + if participants.contains(&field_adt_ref) && participants.contains(&adt) { + let diag = TyLowerDiag::recursive_type( + adt.name_span(db), + adt_def.variant_ty_span(db, i), + ); + return Some(diag); + } + } + } + } + + None +} + +impl TyId { + /// Collect all adts inside types which are not wrapped by indirect type + /// wrapper like pointer or reference. + fn collect_direct_adts(self, db: &dyn HirAnalysisDb) -> FxHashSet { + let mut collector = AdtCollector { + adts: FxHashSet::default(), + }; + + walk_ty(&mut collector, db, self); + collector.adts + } +} + +struct AdtCollector { + adts: FxHashSet, +} + +impl TyVisitor for AdtCollector { + fn visit_app(&mut self, db: &dyn HirAnalysisDb, abs: TyId, arg: TyId) { + if !abs.is_indirect(db) { + walk_ty(self, db, arg) + } + } + + fn visit_adt(&mut self, db: &dyn HirAnalysisDb, adt: AdtDef) { + self.adts.insert(adt.adt_ref(db)); + } +} diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 73e8c33f3f..39cc6d88d6 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,28 +1,18 @@ use either::Either; -use hir::{ - hir_def::{ - kw, scope_graph::ScopeId, FieldDef, FieldDefListId, GenericArg, GenericArgListId, - GenericParam, ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, - TypeKind as HirTyKind, VariantDefListId, - }, - visitor::prelude::{walk_ty as hir_walk_ty, *}, +use hir::hir_def::{ + kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, ItemKind, + Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, + VariantDefListId, }; -use rustc_hash::FxHashSet; -use salsa::function::Configuration; use crate::{ name_resolution::{ resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, }, - ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb, }; -use super::{ - diagnostics::TyLowerDiag, - ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, TyData, TyId, TyParam}, - visitor::{walk_ty, TyVisitor}, -}; +use super::ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, TyData, TyId, TyParam}; #[salsa::tracked] pub fn lower_hir_ty(db: &dyn HirAnalysisDb, ty: HirTyId, scope: ScopeId) -> TyId { @@ -34,27 +24,6 @@ pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { AdtTyBuilder::new(db, adt).build() } -#[salsa::tracked] -pub fn analyze_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) { - let mut analyzer = AdtDefAnalysisVisitor { - db, - accumulated: Vec::new(), - scope: adt.scope(db), - }; - let item = adt.as_item(db); - - let mut ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); - analyzer.visit_item(&mut ctxt, item); - - for diag in analyzer.accumulated { - AdtDefDiagAccumulator::push(db, diag); - } - - if let Some(diag) = check_recursive_adt(db, adt) { - AdtDefDiagAccumulator::push(db, diag); - } -} - #[salsa::tracked] pub(crate) fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlias { todo!() @@ -72,124 +41,12 @@ pub(crate) struct TyAlias { params: Vec, } -pub(super) fn collect_ty_lower_diags( - db: &dyn HirAnalysisDb, - ty: HirTyId, - span: LazyTySpan, - scope: ScopeId, -) -> Vec { - let mut ctxt = VisitorCtxt::new(db.as_hir_db(), span); - let mut accumulator = TyDiagAccumulator { - db, - accumulated: Vec::new(), - scope, - }; - - accumulator.visit_ty(&mut ctxt, ty); - accumulator.accumulated -} - impl TyAlias { fn subst_with(&self, _db: &dyn HirAnalysisDb, _substs: &[TyId]) -> TyId { todo!() } } -struct TyDiagAccumulator<'db> { - db: &'db dyn HirAnalysisDb, - accumulated: Vec, - scope: ScopeId, -} - -impl<'db> TyDiagAccumulator<'db> { - fn accumulate(&mut self, cause: InvalidCause, span: LazyTySpan) { - let span: DynLazySpan = span.into(); - match cause { - InvalidCause::NotFullyApplied => { - let diag = TyLowerDiag::not_fully_applied_type(span); - self.accumulated.push(diag); - } - - InvalidCause::KindMismatch { abs, arg } => { - let diag = TyLowerDiag::kind_mismatch(self.db, abs, arg, span); - self.accumulated.push(diag); - } - - InvalidCause::AssocTy => { - let diag = TyLowerDiag::assoc_ty(span); - self.accumulated.push(diag); - } - - // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. - InvalidCause::Other => {} - } - } -} - -impl<'db> Visitor for TyDiagAccumulator<'db> { - fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { - let ty = lower_hir_ty(self.db, hir_ty, self.scope); - if let Some(cause) = ty.invalid_cause(self.db) { - self.accumulate(cause, ctxt.span().unwrap()); - } - - hir_walk_ty(self, ctxt, hir_ty); - } -} - -struct AdtDefAnalysisVisitor<'db> { - db: &'db dyn HirAnalysisDb, - accumulated: Vec, - scope: ScopeId, -} - -impl<'db> AdtDefAnalysisVisitor<'db> { - // This method ensures that field/variant types are fully applied. - fn verify_fully_applied(&mut self, ty: HirTyId, span: DynLazySpan) { - let ty = lower_hir_ty(self.db, ty, self.scope); - if !ty.is_mono_type(self.db) { - self.accumulated - .push(TyLowerDiag::not_fully_applied_type(span)); - } - } -} - -impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { - fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { - self.accumulated.extend(collect_ty_lower_diags( - self.db, - hir_ty, - ctxt.span().unwrap(), - self.scope, - )); - - // We don't call `walk_ty` to make sure that we don't visit ty - // recursively, which is visited by `collect_ty_lower_diags`. - } - - fn visit_field_def(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, field: &FieldDef) { - if let Some(ty) = field.ty.to_opt() { - self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); - } - - walk_field_def(self, ctxt, field); - } - - fn visit_variant_def( - &mut self, - ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, - variant: &hir::hir_def::VariantDef, - ) { - if let Some(ty) = variant.ty { - self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); - } - - walk_variant_def(self, ctxt, variant); - } - - // TODO: We need to check cycle type. -} - struct TyBuilder<'db> { db: &'db dyn HirAnalysisDb, scope: ScopeId, @@ -444,75 +301,3 @@ fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> Ty } } } - -#[salsa::tracked(recovery_fn = check_recursive_adt_impl)] -pub(crate) fn check_recursive_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> Option { - let adt_def = lower_adt(db, adt); - - for field in adt_def.fields(db) { - for ty in field.iter_types(db) { - for adt_ref in ty.collect_direct_adts(db) { - check_recursive_adt(db, adt_ref); - } - } - } - - None -} - -fn check_recursive_adt_impl( - db: &dyn HirAnalysisDb, - cycle: &salsa::Cycle, - adt: AdtRefId, -) -> Option { - let participants: FxHashSet<_> = cycle - .participant_keys() - .map(|key| check_recursive_adt::key_from_id(key.key_index())) - .collect(); - - let adt_def = lower_adt(db, adt); - for (i, field) in adt_def.fields(db).iter().enumerate() { - for ty in field.iter_types(db) { - for field_adt_ref in ty.collect_direct_adts(db) { - if participants.contains(&field_adt_ref) && participants.contains(&adt) { - let diag = TyLowerDiag::recursive_type( - adt.name_span(db), - adt_def.variant_ty_span(db, i), - ); - return Some(diag); - } - } - } - } - - None -} - -impl TyId { - /// Collect all adts inside types which are not wrapped by indirect type - /// wrapper like pointer or reference. - fn collect_direct_adts(self, db: &dyn HirAnalysisDb) -> FxHashSet { - let mut collector = AdtCollector { - adts: FxHashSet::default(), - }; - - walk_ty(&mut collector, db, self); - collector.adts - } -} - -struct AdtCollector { - adts: FxHashSet, -} - -impl TyVisitor for AdtCollector { - fn visit_app(&mut self, db: &dyn HirAnalysisDb, abs: TyId, arg: TyId) { - if !abs.is_indirect(db) { - walk_ty(self, db, arg) - } - } - - fn visit_adt(&mut self, db: &dyn HirAnalysisDb, adt: AdtDef) { - self.adts.insert(adt.adt_ref(db)); - } -} diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 74d7c32e56..ec17f0fec6 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -4,6 +4,7 @@ use crate::HirAnalysisDb; use self::{diagnostics::AdtDefDiagAccumulator, ty::AdtRefId}; +pub mod adt_analysis; pub mod diagnostics; pub mod lower; pub mod trait_; @@ -44,7 +45,7 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { ); adts.map(|adt| { - lower::analyze_adt::accumulated::(self.db, adt) + adt_analysis::analyze_adt::accumulated::(self.db, adt) .into_iter() .map(|diag| Box::new(diag) as _) }) From 2c8513618debe82604138298f26b919fcf762d47 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 00:58:18 +0200 Subject: [PATCH 301/678] Implement `Subst` and `apply_subst` --- crates/hir-analysis/src/ty/ty.rs | 49 ++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index be7516dbb6..4d7ebb1457 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -8,6 +8,7 @@ use hir::{ }, span::DynLazySpan, }; +use rustc_hash::FxHashMap; use crate::HirAnalysisDb; @@ -49,6 +50,14 @@ impl TyId { Self::new(db, TyData::TyCon(TyConcrete::Adt(adt))) } + pub(super) fn is_ty_param(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.data(db), TyData::TyParam(_)) + } + + pub(super) fn is_ty_var(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.data(db), TyData::TyVar(_)) + } + /// Perform type level application. /// If the kind is mismatched, return `TyData::Invalid`. pub(super) fn app(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { @@ -66,6 +75,21 @@ impl TyId { } } + pub(crate) fn apply_subst(self, db: &dyn HirAnalysisDb, subst: &Subst) -> TyId { + if let Some(to) = subst.get(self) { + return to; + } + + match self.data(db) { + TyData::TyApp(lhs, rhs) => { + let lhs = lhs.apply_subst(db, subst); + let rhs = rhs.apply_subst(db, subst); + TyId::app(db, lhs, rhs) + } + _ => self, + } + } + /// Returns `true` if the type is a pointer or a pointer application. pub(super) fn is_ptr(self, db: &dyn HirAnalysisDb) -> bool { match self.data(db) { @@ -379,6 +403,31 @@ impl AdtRef { } } +#[derive(Default, Clone)] +pub(crate) struct Subst { + inner: FxHashMap, +} + +impl Subst { + pub(crate) fn new() -> Self { + Self::default() + } + + /// Insert a substitution mapping. + /// This method panics when + /// 1. `from` and `to` have different kinds. + /// 2. `from` is not a `TyVar` or `TyParam`. + pub(crate) fn insert(&mut self, db: &dyn HirAnalysisDb, from: TyId, to: TyId) { + debug_assert!(from.kind(db) == to.kind(db)); + debug_assert!(from.is_ty_var(db,) || from.is_ty_param(db)); + self.inner.insert(from, to); + } + + pub(crate) fn get(&self, from: TyId) -> Option { + self.inner.get(&from).copied() + } +} + pub(super) trait HasKind { fn kind(&self, db: &dyn HirAnalysisDb) -> Kind; } From 09a711287b0635ceb74f1c437d2501ab2bdeece0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 04:21:52 +0200 Subject: [PATCH 302/678] Implement type alias lowering --- crates/driver2/src/lib.rs | 3 +- crates/hir-analysis/src/ty/adt_analysis.rs | 61 +------- crates/hir-analysis/src/ty/diagnostics.rs | 112 +++++++++++-- crates/hir-analysis/src/ty/lower.rs | 147 ++++++++++++++---- crates/hir-analysis/src/ty/mod.rs | 37 ++++- crates/hir-analysis/src/ty/ty.rs | 35 +++-- crates/hir-analysis/src/ty/visitor.rs | 81 +++++++++- crates/hir/src/hir_def/item.rs | 16 ++ crates/hir/src/lib.rs | 1 + .../uitest/fixtures/ty/alias_arg_mismatch.fe | 14 ++ .../fixtures/ty/alias_arg_mismatch.snap | 32 ++++ crates/uitest/fixtures/ty/alias_cycle.fe | 5 + crates/uitest/fixtures/ty/alias_cycle.snap | 30 ++++ crates/uitest/fixtures/ty/alias_non_mono.fe | 7 + crates/uitest/fixtures/ty/alias_non_mono.snap | 12 ++ crates/uitest/fixtures/ty/recursive_type.snap | 10 +- 16 files changed, 484 insertions(+), 119 deletions(-) create mode 100644 crates/uitest/fixtures/ty/alias_arg_mismatch.fe create mode 100644 crates/uitest/fixtures/ty/alias_arg_mismatch.snap create mode 100644 crates/uitest/fixtures/ty/alias_cycle.fe create mode 100644 crates/uitest/fixtures/ty/alias_cycle.snap create mode 100644 crates/uitest/fixtures/ty/alias_non_mono.fe create mode 100644 crates/uitest/fixtures/ty/alias_non_mono.snap diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index b045ec784b..50de2f3d03 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -17,7 +17,7 @@ use hir::{ }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, - ty::TypeDefAnalysisPass, + ty::{TypeAliasAnalysisPass, TypeDefAnalysisPass}, HirAnalysisDb, }; @@ -146,5 +146,6 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(TypeDefAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir-analysis/src/ty/adt_analysis.rs b/crates/hir-analysis/src/ty/adt_analysis.rs index b9cada715c..574f2dd745 100644 --- a/crates/hir-analysis/src/ty/adt_analysis.rs +++ b/crates/hir-analysis/src/ty/adt_analysis.rs @@ -1,6 +1,6 @@ use hir::{ hir_def::{scope_graph::ScopeId, FieldDef, TypeId as HirTyId}, - visitor::prelude::{walk_ty as hir_walk_ty, *}, + visitor::prelude::*, }; use rustc_hash::FxHashSet; use salsa::function::Configuration; @@ -10,8 +10,8 @@ use crate::{ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb}; use super::{ diagnostics::TyLowerDiag, lower::{lower_adt, lower_hir_ty}, - ty::{AdtDef, AdtRefId, InvalidCause, TyId}, - visitor::{walk_ty, TyVisitor}, + ty::{AdtDef, AdtRefId, TyId}, + visitor::{walk_ty, TyDiagCollector, TyVisitor}, }; #[salsa::tracked] @@ -102,61 +102,12 @@ impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { pub(super) fn collect_ty_lower_diags( db: &dyn HirAnalysisDb, - ty: HirTyId, + hir_ty: HirTyId, span: LazyTySpan, scope: ScopeId, ) -> Vec { - let mut ctxt = VisitorCtxt::new(db.as_hir_db(), span); - let mut accumulator = TyDiagAccumulator { - db, - accumulated: Vec::new(), - scope, - }; - - accumulator.visit_ty(&mut ctxt, ty); - accumulator.accumulated -} - -struct TyDiagAccumulator<'db> { - db: &'db dyn HirAnalysisDb, - accumulated: Vec, - scope: ScopeId, -} - -impl<'db> TyDiagAccumulator<'db> { - fn accumulate(&mut self, cause: InvalidCause, span: LazyTySpan) { - let span: DynLazySpan = span.into(); - match cause { - InvalidCause::NotFullyApplied => { - let diag = TyLowerDiag::not_fully_applied_type(span); - self.accumulated.push(diag); - } - - InvalidCause::KindMismatch { abs, arg } => { - let diag = TyLowerDiag::kind_mismatch(self.db, abs, arg, span); - self.accumulated.push(diag); - } - - InvalidCause::AssocTy => { - let diag = TyLowerDiag::assoc_ty(span); - self.accumulated.push(diag); - } - - // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. - InvalidCause::Other => {} - } - } -} - -impl<'db> Visitor for TyDiagAccumulator<'db> { - fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { - let ty = lower_hir_ty(self.db, hir_ty, self.scope); - if let Some(cause) = ty.invalid_cause(self.db) { - self.accumulate(cause, ctxt.span().unwrap()); - } - - hir_walk_ty(self, ctxt, hir_ty); - } + let collector = TyDiagCollector::new(db, scope); + collector.collect(hir_ty, span) } fn check_recursive_adt_impl( diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 30d79cb830..f2da438b2f 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -3,7 +3,9 @@ use common::diagnostics::{ }; use hir::{ diagnostics::DiagnosticVoucher, + hir_def::TypeAlias as HirTypeAlias, span::{DynLazySpan, LazySpan}, + HirDb, }; use crate::HirAnalysisDb; @@ -18,11 +20,20 @@ pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { NotFullyAppliedType(DynLazySpan), + TyAppFailed(DynLazySpan, String), KindMismatch(DynLazySpan, String), RecursiveType { primary_span: DynLazySpan, field_span: DynLazySpan, }, + + TypeAliasArgumentMismatch { + span: DynLazySpan, + type_alias: HirTypeAlias, + n_given_arg: usize, + }, + TypeAliasCycle(DynLazySpan), + AssocTy(DynLazySpan), } @@ -31,12 +42,28 @@ impl TyLowerDiag { Self::NotFullyAppliedType(span) } - pub fn kind_mismatch(db: &dyn HirAnalysisDb, abs: TyId, arg: TyId, span: DynLazySpan) -> Self { + pub fn ty_app_failed(db: &dyn HirAnalysisDb, span: DynLazySpan, abs: TyId, arg: TyId) -> Self { let k_abs = abs.kind(db); let k_arg = arg.kind(db); let msg = format!("can't apply `{}` kind to `{}` kind", k_arg, k_abs); - Self::KindMismatch(span, msg.into()) + Self::TyAppFailed(span, msg.into()) + } + + pub fn kind_mismatch( + db: &dyn HirAnalysisDb, + span: DynLazySpan, + expected: TyId, + actual: TyId, + ) -> Self { + debug_assert!(expected.kind(db) != actual.kind(db)); + + let msg = format!( + "expected `{}` kind, but found `{}` kind", + expected.kind(db), + actual.kind(db) + ); + Self::KindMismatch(span, msg) } pub(super) fn recursive_type(primary_span: DynLazySpan, field_span: DynLazySpan) -> Self { @@ -46,6 +73,22 @@ impl TyLowerDiag { } } + pub(super) fn type_alias_argument_mismatch( + span: DynLazySpan, + type_alias: HirTypeAlias, + n_given_arg: usize, + ) -> Self { + Self::TypeAliasArgumentMismatch { + span, + type_alias, + n_given_arg, + } + } + + pub(super) fn type_alias_cycle(span: DynLazySpan) -> Self { + Self::TypeAliasCycle(span) + } + pub(super) fn assoc_ty(span: DynLazySpan) -> Self { Self::AssocTy(span) } @@ -53,20 +96,33 @@ impl TyLowerDiag { fn local_code(&self) -> u16 { match self { Self::NotFullyAppliedType(_) => 0, - Self::KindMismatch(_, _) => 1, - Self::RecursiveType { .. } => 2, - Self::AssocTy(_) => 3, + Self::TyAppFailed(_, _) => 1, + Self::KindMismatch(_, _) => 2, + Self::RecursiveType { .. } => 3, + Self::TypeAliasArgumentMismatch { .. } => 4, + Self::TypeAliasCycle(_) => 5, + Self::AssocTy(_) => 6, } } - fn message(&self) -> String { + fn message(&self, db: &dyn HirDb) -> String { match self { Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), - - Self::KindMismatch(_, _) => "kind mismatch in type application".to_string(), - + Self::TyAppFailed(_, _) => "kind mismatch in type application".to_string(), + Self::KindMismatch(_, _) => "kind mismatch between two types".to_string(), Self::RecursiveType { .. } => "recursive type is not allowed".to_string(), + Self::TypeAliasArgumentMismatch { + type_alias, + n_given_arg, + .. + } => format!( + "type alias expects {} generic arguments, but {} given", + type_alias.generic_params(db).len(db), + n_given_arg + ), + Self::TypeAliasCycle(_) => "recursive type alias cycle is detected".to_string(), + Self::AssocTy(_) => "associated type is not supported ".to_string(), } } @@ -79,6 +135,12 @@ impl TyLowerDiag { span.resolve(db), )], + Self::TyAppFailed(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + Self::KindMismatch(span, msg) => vec![SubDiagnostic::new( LabelStyle::Primary, msg.clone(), @@ -103,6 +165,36 @@ impl TyLowerDiag { ] } + Self::TypeAliasArgumentMismatch { + span: primary_span, + type_alias, + .. + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected {} arguments here", + type_alias + .generic_params(db.as_hir_db()) + .len(db.as_hir_db()) + ), + primary_span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + format!("type alias defined here"), + type_alias.lazy_span().resolve(db), + ), + ] + } + + Self::TypeAliasCycle(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "cycle happens here".to_string(), + span.resolve(db), + )], + Self::AssocTy(span) => vec![SubDiagnostic::new( LabelStyle::Primary, "associated type is not implemented".to_string(), @@ -124,7 +216,7 @@ impl DiagnosticVoucher for TyLowerDiag { fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { let severity = self.severity(); let error_code = self.error_code(); - let message = self.message(); + let message = self.message(db.as_hir_db()); let sub_diags = self.sub_diags(db); CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 39cc6d88d6..1aa8e2c45b 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,18 +1,24 @@ use either::Either; use hir::hir_def::{ - kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, ItemKind, - Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, - VariantDefListId, + kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, + GenericParam as HirGenericParam, GenericParamOwner, ItemKind, Partial, PathId, + TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, }; use crate::{ name_resolution::{ resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, }, + ty::{ + diagnostics::{TyLowerDiag, TypeAliasDefDiagAccumulator}, + visitor::TyDiagCollector, + }, HirAnalysisDb, }; -use super::ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, TyData, TyId, TyParam}; +use super::ty::{ + AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, Subst, TyData, TyId, TyParam, +}; #[salsa::tracked] pub fn lower_hir_ty(db: &dyn HirAnalysisDb, ty: HirTyId, scope: ScopeId) -> TyId { @@ -24,9 +30,60 @@ pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { AdtTyBuilder::new(db, adt).build() } -#[salsa::tracked] -pub(crate) fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> TyAlias { - todo!() +#[salsa::tracked(return_ref, recovery_fn = recover_lower_type_alias_cycle)] +pub(crate) fn lower_type_alias(db: &dyn HirAnalysisDb, alias: HirTypeAlias) -> TyAlias { + let params = lower_generic_param_list(db, alias.into()); + + let Some(hir_ty) = alias.ty(db.as_hir_db()).to_opt() else { + return TyAlias { + alias, + alias_to: TyId::invalid(db, InvalidCause::Other), + params, + }; + }; + + let ty = lower_hir_ty(db, hir_ty, alias.scope()); + let alias_to = if ty.is_mono_type(db) { + let collector = TyDiagCollector::new(db, alias.scope()); + let diags = collector.collect(hir_ty, alias.lazy_span().ty()); + if diags.is_empty() { + ty + } else { + diags.into_iter().for_each(|diag| { + TypeAliasDefDiagAccumulator::push(db, diag); + }); + TyId::invalid(db, InvalidCause::Other) + } + } else { + TypeAliasDefDiagAccumulator::push( + db, + TyLowerDiag::not_fully_applied_type(alias.lazy_span().ty().into()), + ); + TyId::invalid(db, InvalidCause::Other) + }; + + TyAlias { + alias, + alias_to, + params, + } +} + +fn recover_lower_type_alias_cycle( + db: &dyn HirAnalysisDb, + _cycle: &salsa::Cycle, + alias: HirTypeAlias, +) -> TyAlias { + let diag = TyLowerDiag::type_alias_cycle(alias.lazy_span().ty().into()); + TypeAliasDefDiagAccumulator::push(db, diag); + + let alias_to = TyId::invalid(db, InvalidCause::Other); + let params = lower_generic_param_list(db, alias.into()); + TyAlias { + alias, + alias_to, + params, + } } /// Represents a lowered type alias. `TyAlias` itself isn't a type, but @@ -37,13 +94,40 @@ pub(crate) fn lower_type_alias(_db: &dyn HirAnalysisDb, _alias: HirTypeAlias) -> /// right hand side of the alias declaration must be a fully applied type. #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct TyAlias { + alias: HirTypeAlias, alias_to: TyId, params: Vec, } impl TyAlias { - fn subst_with(&self, _db: &dyn HirAnalysisDb, _substs: &[TyId]) -> TyId { - todo!() + fn apply_subst(&self, db: &dyn HirAnalysisDb, arg_tys: &[TyId]) -> TyId { + if arg_tys.len() != self.params.len() { + return TyId::invalid( + db, + InvalidCause::TypeAliasArgumentMismatch { + alias: self.alias, + n_given_args: arg_tys.len(), + }, + ); + } + let mut subst = Subst::new(); + + for (¶m, &arg) in self.params.iter().zip(arg_tys.iter()) { + let arg = if param.kind(db) != arg.kind(db) { + TyId::invalid( + db, + InvalidCause::KindMismatch { + expected: param, + given: arg, + }, + ) + } else { + arg + }; + subst.insert(db, param, arg); + } + + self.alias_to.apply_subst(db, &subst) } } @@ -93,7 +177,7 @@ impl<'db> TyBuilder<'db> { .into_iter() .fold(ty, |acc, arg| TyId::app(self.db, acc, arg)), - Either::Right(alias) => alias.subst_with(self.db, &arg_tys), + Either::Right(alias) => alias.apply_subst(self.db, &arg_tys), } } @@ -128,7 +212,7 @@ impl<'db> TyBuilder<'db> { }) } - fn lower_resolved_path(&mut self, path: &EarlyResolvedPath) -> Either { + fn lower_resolved_path(&mut self, path: &EarlyResolvedPath) -> Either { let res = match path { EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { Ok(res) => res, @@ -152,7 +236,11 @@ impl<'db> TyBuilder<'db> { let item = match scope { ScopeId::Item(item) => item, ScopeId::GenericParam(item, idx) => { - return Either::Left(lower_generic_param(self.db, item, idx)); + let params = GenericParamOwner::from_item_opt(item) + .unwrap() + .params(self.db.as_hir_db()); + let ty = lower_generic_param(self.db, ¶ms.data(self.db.as_hir_db())[idx], idx); + return Either::Left(ty); } _ => unreachable!(), }; @@ -210,23 +298,13 @@ impl<'db> AdtTyBuilder<'db> { } fn build(mut self) -> AdtDef { - self.collect_params(); + self.collect_generic_params(); self.collect_variants(); AdtDef::new(self.db, self.adt, self.params, self.variants) } - fn collect_params(&mut self) { - let hir_db = self.db.as_hir_db(); - let params = match self.adt.data(self.db) { - AdtRef::Struct(struct_) => struct_.generic_params(hir_db), - AdtRef::Enum(enum_) => enum_.generic_params(hir_db), - AdtRef::Contract(_) => return, - }; - - for idx in 0..params.len(hir_db) { - let param = lower_generic_param(self.db, self.adt.as_item(self.db), idx); - self.params.push(param); - } + fn collect_generic_params(&mut self) { + self.params = lower_generic_param_list(self.db, self.adt.as_item(self.db)); } fn collect_variants(&mut self) { @@ -275,14 +353,21 @@ impl<'db> AdtTyBuilder<'db> { } } -fn lower_generic_param(db: &dyn HirAnalysisDb, item: ItemKind, idx: usize) -> TyId { - let params = match item { - ItemKind::Struct(struct_) => struct_.generic_params(db.as_hir_db()), - ItemKind::Enum(enum_) => enum_.generic_params(db.as_hir_db()), - _ => unreachable!(), +fn lower_generic_param_list(db: &dyn HirAnalysisDb, item: ItemKind) -> Vec { + let Some(params_owner) = GenericParamOwner::from_item_opt(item) else { + return Vec::new(); }; - let param = ¶ms.data(db.as_hir_db())[idx]; + params_owner + .params(db.as_hir_db()) + .data(db.as_hir_db()) + .iter() + .enumerate() + .map(|(idx, param)| lower_generic_param(db, param, idx)) + .collect() +} + +fn lower_generic_param(db: &dyn HirAnalysisDb, param: &HirGenericParam, idx: usize) -> TyId { match param { GenericParam::Type(param) => { if let Some(name) = param.name.to_opt() { diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index ec17f0fec6..5f99ded813 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -1,8 +1,11 @@ -use hir::analysis_pass::ModuleAnalysisPass; - use crate::HirAnalysisDb; +use hir::analysis_pass::ModuleAnalysisPass; +use rustc_hash::FxHashSet; -use self::{diagnostics::AdtDefDiagAccumulator, ty::AdtRefId}; +use self::{ + diagnostics::{AdtDefDiagAccumulator, TyLowerDiag, TypeAliasDefDiagAccumulator}, + ty::AdtRefId, +}; pub mod adt_analysis; pub mod diagnostics; @@ -53,3 +56,31 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { .collect() } } + +pub struct TypeAliasAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> TypeAliasAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} +impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: hir::hir_def::TopLevelMod, + ) -> Vec> { + let diags: FxHashSet = top_mod + .all_type_aliases(self.db.as_hir_db()) + .iter() + .map(|&alias| { + lower::lower_type_alias::accumulated::(self.db, alias) + .into_iter() + }) + .flatten() + .collect(); + + diags.into_iter().map(|diag| Box::new(diag) as _).collect() + } +} diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 4d7ebb1457..e0ff87ceb0 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -4,7 +4,8 @@ use hir::{ hir_def::{ prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, scope_graph::ScopeId, - Contract, Enum, IdentId, ItemKind, Partial, Struct, TypeId as HirTyId, + Contract, Enum, IdentId, ItemKind, Partial, Struct, TypeAlias as HirTypeAlias, + TypeId as HirTyId, }, span::DynLazySpan, }; @@ -38,6 +39,15 @@ impl TyId { } } + /// Returns `true` if the type is declared as a monotype or fully applied + /// type. + pub fn is_mono_type(self, db: &dyn HirAnalysisDb) -> bool { + match self.kind(db) { + Kind::Abs(_, _) => false, + _ => true, + } + } + pub(super) fn ptr(db: &dyn HirAnalysisDb) -> Self { Self::new(db, TyData::TyCon(TyConcrete::Prim(PrimTy::Ptr))) } @@ -71,7 +81,7 @@ impl TyId { if k_ty.is_applicable(&k_arg) { Self::new(db, TyData::TyApp(abs, arg)) } else { - Self::invalid(db, InvalidCause::KindMismatch { abs, arg }) + Self::invalid(db, InvalidCause::TyAppFailed { abs, arg }) } } @@ -131,15 +141,6 @@ impl TyId { }, } } - - /// Returns true if the type is declared as a monotype or fully applied - /// type. - pub(super) fn is_mono_type(self, db: &dyn HirAnalysisDb) -> bool { - match self.kind(db) { - Kind::Abs(_, _) => false, - _ => true, - } - } } #[salsa::tracked] @@ -243,12 +244,20 @@ pub enum InvalidCause { /// Type is not fully applied where it is required. NotFullyApplied, - /// Kind mismatch in type level application. - KindMismatch { abs: TyId, arg: TyId }, + /// Type application faield due to Kind mismatch. + TyAppFailed { abs: TyId, arg: TyId }, + + /// Kind mismatch between two types. + KindMismatch { expected: TyId, given: TyId }, /// Associated Type is not allowed at the moment. AssocTy, + TypeAliasArgumentMismatch { + alias: HirTypeAlias, + n_given_args: usize, + }, + /// `Other` indicates the cause is already reported in other analysis /// passes, e.g., parser or name resolution. Other, diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index ca58ff86c6..532297e809 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -1,6 +1,16 @@ +use hir::{ + hir_def::{scope_graph::ScopeId, TypeId as HirTyId}, + span::DynLazySpan, + visitor::prelude::{walk_ty as hir_walk_ty, *}, +}; + use crate::HirAnalysisDb; -use super::ty::{AdtDef, InvalidCause, PrimTy, TyConcrete, TyData, TyId, TyParam, TyVar}; +use super::{ + diagnostics::TyLowerDiag, + lower::lower_hir_ty, + ty::{AdtDef, InvalidCause, PrimTy, TyConcrete, TyData, TyId, TyParam, TyVar}, +}; pub trait TyVisitor { fn visit_ty(&mut self, db: &dyn HirAnalysisDb, ty: TyId) { @@ -60,3 +70,72 @@ where TyConcrete::Abs => {} } } + +pub(super) struct TyDiagCollector<'db> { + db: &'db dyn HirAnalysisDb, + accumulated: Vec, + scope: ScopeId, +} + +impl<'db> TyDiagCollector<'db> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, scope: ScopeId) -> Self { + Self { + db, + accumulated: Vec::new(), + scope, + } + } + + pub(super) fn collect(mut self, hir_ty: HirTyId, span: LazyTySpan) -> Vec { + let mut ctxt = VisitorCtxt::new(self.db.as_hir_db(), span); + self.visit_ty(&mut ctxt, hir_ty); + self.accumulated + } + + fn collect_impl(&mut self, cause: InvalidCause, span: LazyTySpan) { + let span: DynLazySpan = span.into(); + match cause { + InvalidCause::NotFullyApplied => { + let diag = TyLowerDiag::not_fully_applied_type(span); + self.accumulated.push(diag); + } + + InvalidCause::TyAppFailed { abs, arg } => { + let diag = TyLowerDiag::ty_app_failed(self.db, span, abs, arg); + self.accumulated.push(diag); + } + + InvalidCause::KindMismatch { expected, given } => { + let diag = TyLowerDiag::kind_mismatch(self.db, span, expected, given); + self.accumulated.push(diag); + } + + InvalidCause::TypeAliasArgumentMismatch { + alias, + n_given_args: n_given_arg, + } => { + let diag = TyLowerDiag::type_alias_argument_mismatch(span, alias, n_given_arg); + self.accumulated.push(diag); + } + + InvalidCause::AssocTy => { + let diag = TyLowerDiag::assoc_ty(span); + self.accumulated.push(diag); + } + + // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. + InvalidCause::Other => {} + } + } +} + +impl<'db> Visitor for TyDiagCollector<'db> { + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { + let ty = lower_hir_ty(self.db, hir_ty, self.scope); + if let Some(cause) = ty.invalid_cause(self.db) { + self.collect_impl(cause, ctxt.span().unwrap()); + } + + hir_walk_ty(self, ctxt, hir_ty); + } +} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index a9afccb8e3..1efe8fb28b 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -301,6 +301,12 @@ impl TopLevelMod { pub fn all_contracts<'db>(self, db: &'db dyn HirDb) -> &'db Vec { all_contracts_in_top_mod(db, self) } + + /// Returns all type aliases in the top level module including ones in + /// nested + pub fn all_type_aliases<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_type_aliases_in_top_mod(db, self) + } } #[salsa::tracked(return_ref)] @@ -325,6 +331,16 @@ pub fn all_enums_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { .collect() } +#[salsa::tracked(return_ref)] +pub fn all_type_aliases_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + top_mod + .children_non_nested(db) + .filter_map(|item| match item { + ItemKind::TypeAlias(alias) => Some(alias), + _ => None, + }) + .collect() +} #[salsa::tracked(return_ref)] pub fn all_contracts_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { top_mod diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 21fca1ecd2..bfa63bb91f 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -53,6 +53,7 @@ pub struct Jar( hir_def::all_structs_in_top_mod, hir_def::all_enums_in_top_mod, hir_def::all_contracts_in_top_mod, + hir_def::all_type_aliases_in_top_mod, /// Accumulated diagnostics. ParseErrorAccumulator, /// Private tracked functions. These are not part of the public API, and diff --git a/crates/uitest/fixtures/ty/alias_arg_mismatch.fe b/crates/uitest/fixtures/ty/alias_arg_mismatch.fe new file mode 100644 index 0000000000..2e4b6b85c6 --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_arg_mismatch.fe @@ -0,0 +1,14 @@ +struct S2 { + t: T1 + t2: T1 + u: T2 + u2: T2 +} + +pub struct S { + t: T + u: U +} +type T1 = S +type T2 = T1 + diff --git a/crates/uitest/fixtures/ty/alias_arg_mismatch.snap b/crates/uitest/fixtures/ty/alias_arg_mismatch.snap new file mode 100644 index 0000000000..9d88d22aed --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_arg_mismatch.snap @@ -0,0 +1,32 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_arg_mismatch.fe +--- +error[3-0004]: type alias expects 2 generic arguments, but 1 given + ┌─ alias_arg_mismatch.fe:2:8 + │ + 2 │ t: T1 + │ ^^^^^^^ expected 2 arguments here + · +12 │ type T1 = S + │ ----------------------- type alias defined here + +error[3-0004]: type alias expects 1 generic arguments, but 2 given + ┌─ alias_arg_mismatch.fe:4:8 + │ + 4 │ u: T2 + │ ^^^^^^^^^^^^ expected 1 arguments here + · +13 │ type T2 = T1 + │ ------------------ type alias defined here + +error[3-0004]: type alias expects 2 generic arguments, but 1 given + ┌─ alias_arg_mismatch.fe:13:14 + │ +12 │ type T1 = S + │ ----------------------- type alias defined here +13 │ type T2 = T1 + │ ^^^^^ expected 2 arguments here + + diff --git a/crates/uitest/fixtures/ty/alias_cycle.fe b/crates/uitest/fixtures/ty/alias_cycle.fe new file mode 100644 index 0000000000..1249e93804 --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_cycle.fe @@ -0,0 +1,5 @@ +type T1 = T1 + +type T2 = T3 +type T3 = T4 +type T4 = T2 diff --git a/crates/uitest/fixtures/ty/alias_cycle.snap b/crates/uitest/fixtures/ty/alias_cycle.snap new file mode 100644 index 0000000000..f8f22288b2 --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_cycle.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_cycle.fe +--- +error[3-0005]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:1:11 + │ +1 │ type T1 = T1 + │ ^^ cycle happens here + +error[3-0005]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:3:11 + │ +3 │ type T2 = T3 + │ ^^ cycle happens here + +error[3-0005]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:4:11 + │ +4 │ type T3 = T4 + │ ^^ cycle happens here + +error[3-0005]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:5:11 + │ +5 │ type T4 = T2 + │ ^^ cycle happens here + + diff --git a/crates/uitest/fixtures/ty/alias_non_mono.fe b/crates/uitest/fixtures/ty/alias_non_mono.fe new file mode 100644 index 0000000000..22a70a9346 --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_non_mono.fe @@ -0,0 +1,7 @@ +pub struct Foo { + t: T + u: U +} + +pub type T = Foo +pub type T2 = Foo \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/alias_non_mono.snap b/crates/uitest/fixtures/ty/alias_non_mono.snap new file mode 100644 index 0000000000..f8b7d3152d --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_non_mono.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_non_mono.fe +--- +error[3-0000]: expected fully applied type + ┌─ alias_non_mono.fe:7:15 + │ +7 │ pub type T2 = Foo + │ ^^^^^^^^ expected fully applied type here + + diff --git a/crates/uitest/fixtures/ty/recursive_type.snap b/crates/uitest/fixtures/ty/recursive_type.snap index daf6f6bf12..666ca1e778 100644 --- a/crates/uitest/fixtures/ty/recursive_type.snap +++ b/crates/uitest/fixtures/ty/recursive_type.snap @@ -3,7 +3,7 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/recursive_type.fe --- -error[3-0002]: recursive type is not allowed +error[3-0003]: recursive type is not allowed ┌─ recursive_type.fe:1:12 │ 1 │ pub struct S1 { @@ -11,7 +11,7 @@ error[3-0002]: recursive type is not allowed 2 │ s: S1 │ -- recursion occurs here -error[3-0002]: recursive type is not allowed +error[3-0003]: recursive type is not allowed ┌─ recursive_type.fe:5:12 │ 5 │ pub struct S2 { @@ -19,7 +19,7 @@ error[3-0002]: recursive type is not allowed 6 │ s: S3 │ -- recursion occurs here -error[3-0002]: recursive type is not allowed +error[3-0003]: recursive type is not allowed ┌─ recursive_type.fe:9:12 │ 9 │ pub struct S3 { @@ -27,7 +27,7 @@ error[3-0002]: recursive type is not allowed 10 │ s: S4 │ -- recursion occurs here -error[3-0002]: recursive type is not allowed +error[3-0003]: recursive type is not allowed ┌─ recursive_type.fe:13:12 │ 13 │ pub struct S4 { @@ -35,7 +35,7 @@ error[3-0002]: recursive type is not allowed 14 │ s: S2 │ -- recursion occurs here -error[3-0002]: recursive type is not allowed +error[3-0003]: recursive type is not allowed ┌─ recursive_type.fe:22:12 │ 22 │ pub struct S6 { From 1fdfad9356e9e16f95f1fdf83e23eaaf5f1809f4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 17:03:09 +0200 Subject: [PATCH 303/678] Improve kind mismatch error message --- crates/hir-analysis/src/ty/diagnostics.rs | 49 +++------ crates/hir-analysis/src/ty/lower.rs | 5 +- crates/hir-analysis/src/ty/ty.rs | 69 +++++++----- crates/hir-analysis/src/ty/visitor.rs | 103 +++++++++++++++--- .../fixtures/ty/alias_arg_mismatch.snap | 6 +- crates/uitest/fixtures/ty/alias_cycle.snap | 8 +- .../uitest/fixtures/ty/alias_kind_mismatch.fe | 13 +++ .../fixtures/ty/alias_kind_mismatch.snap | 24 ++++ crates/uitest/fixtures/ty/kind_mismatch.fe | 3 + crates/uitest/fixtures/ty/kind_mismatch.snap | 32 +++++- crates/uitest/fixtures/ty/recursive_type.snap | 10 +- 11 files changed, 222 insertions(+), 100 deletions(-) create mode 100644 crates/uitest/fixtures/ty/alias_kind_mismatch.fe create mode 100644 crates/uitest/fixtures/ty/alias_kind_mismatch.snap diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index f2da438b2f..440bb224e2 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -8,9 +8,7 @@ use hir::{ HirDb, }; -use crate::HirAnalysisDb; - -use super::ty::TyId; +use super::ty::Kind; #[salsa::accumulator] pub struct AdtDefDiagAccumulator(pub(super) TyLowerDiag); @@ -20,7 +18,6 @@ pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { NotFullyAppliedType(DynLazySpan), - TyAppFailed(DynLazySpan, String), KindMismatch(DynLazySpan, String), RecursiveType { primary_span: DynLazySpan, @@ -42,27 +39,15 @@ impl TyLowerDiag { Self::NotFullyAppliedType(span) } - pub fn ty_app_failed(db: &dyn HirAnalysisDb, span: DynLazySpan, abs: TyId, arg: TyId) -> Self { - let k_abs = abs.kind(db); - let k_arg = arg.kind(db); - - let msg = format!("can't apply `{}` kind to `{}` kind", k_arg, k_abs); - Self::TyAppFailed(span, msg.into()) - } + pub fn kind_mismatch(span: DynLazySpan, expected: Option, actual: Kind) -> Self { + let msg = if let Some(expected) = expected { + debug_assert!(expected != actual); - pub fn kind_mismatch( - db: &dyn HirAnalysisDb, - span: DynLazySpan, - expected: TyId, - actual: TyId, - ) -> Self { - debug_assert!(expected.kind(db) != actual.kind(db)); + format!("expected `{}` kind, but found `{}` kind", expected, actual,) + } else { + "too many generic arguments".to_string() + }; - let msg = format!( - "expected `{}` kind, but found `{}` kind", - expected.kind(db), - actual.kind(db) - ); Self::KindMismatch(span, msg) } @@ -96,19 +81,17 @@ impl TyLowerDiag { fn local_code(&self) -> u16 { match self { Self::NotFullyAppliedType(_) => 0, - Self::TyAppFailed(_, _) => 1, - Self::KindMismatch(_, _) => 2, - Self::RecursiveType { .. } => 3, - Self::TypeAliasArgumentMismatch { .. } => 4, - Self::TypeAliasCycle(_) => 5, - Self::AssocTy(_) => 6, + Self::KindMismatch(_, _) => 1, + Self::RecursiveType { .. } => 2, + Self::TypeAliasArgumentMismatch { .. } => 3, + Self::TypeAliasCycle(_) => 4, + Self::AssocTy(_) => 5, } } fn message(&self, db: &dyn HirDb) -> String { match self { Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), - Self::TyAppFailed(_, _) => "kind mismatch in type application".to_string(), Self::KindMismatch(_, _) => "kind mismatch between two types".to_string(), Self::RecursiveType { .. } => "recursive type is not allowed".to_string(), @@ -135,12 +118,6 @@ impl TyLowerDiag { span.resolve(db), )], - Self::TyAppFailed(span, msg) => vec![SubDiagnostic::new( - LabelStyle::Primary, - msg.clone(), - span.resolve(db), - )], - Self::KindMismatch(span, msg) => vec![SubDiagnostic::new( LabelStyle::Primary, msg.clone(), diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 1aa8e2c45b..a853a8f155 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -116,10 +116,7 @@ impl TyAlias { let arg = if param.kind(db) != arg.kind(db) { TyId::invalid( db, - InvalidCause::KindMismatch { - expected: param, - given: arg, - }, + InvalidCause::kind_mismatch(param.kind(db).into(), arg.kind(db)), ) } else { arg diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index e0ff87ceb0..8f9e5abf53 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -71,18 +71,20 @@ impl TyId { /// Perform type level application. /// If the kind is mismatched, return `TyData::Invalid`. pub(super) fn app(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { - if abs.is_invalid(db) || arg.is_invalid(db) { - return TyId::invalid(db, InvalidCause::Other); - } - - let k_ty = abs.kind(db); + let k_abs = abs.kind(db); let k_arg = arg.kind(db); - if k_ty.is_applicable(&k_arg) { - Self::new(db, TyData::TyApp(abs, arg)) - } else { - Self::invalid(db, InvalidCause::TyAppFailed { abs, arg }) - } + let arg = match k_abs { + Kind::Abs(k_expected, _) if k_expected.as_ref() == k_arg => arg, + Kind::Abs(k_abs_arg, _) => Self::invalid( + db, + InvalidCause::kind_mismatch(k_abs_arg.as_ref().into(), k_arg), + ), + Kind::Star => Self::invalid(db, InvalidCause::kind_mismatch(None, k_arg)), + Kind::Any => arg, + }; + + Self::new(db, TyData::TyApp(abs, arg)) } pub(crate) fn apply_subst(self, db: &dyn HirAnalysisDb, subst: &Subst) -> TyId { @@ -244,11 +246,8 @@ pub enum InvalidCause { /// Type is not fully applied where it is required. NotFullyApplied, - /// Type application faield due to Kind mismatch. - TyAppFailed { abs: TyId, arg: TyId }, - /// Kind mismatch between two types. - KindMismatch { expected: TyId, given: TyId }, + KindMismatch { expected: Option, given: Kind }, /// Associated Type is not allowed at the moment. AssocTy, @@ -263,7 +262,16 @@ pub enum InvalidCause { Other, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +impl InvalidCause { + pub(super) fn kind_mismatch(expected: Option<&Kind>, given: &Kind) -> Self { + Self::KindMismatch { + expected: expected.cloned(), + given: given.clone(), + } + } +} + +#[derive(Debug, Clone, Hash)] pub enum Kind { /// Represents monotypes, `*`. Star, @@ -277,17 +285,23 @@ pub enum Kind { Any, } +impl PartialEq for Kind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Star, Self::Star) => true, + (Self::Abs(lhs1, rhs1), Self::Abs(lhs2, rhs2)) => lhs1 == lhs2 && rhs1 == rhs2, + (Self::Any, _) => true, + (_, Self::Any) => true, + _ => false, + } + } +} +impl Eq for Kind {} + impl Kind { fn abs(lhs: Kind, rhs: Kind) -> Self { Kind::Abs(Box::new(lhs), Box::new(rhs)) } - - fn is_applicable(&self, rhs: &Self) -> bool { - match self { - Self::Abs(k_arg, _) => k_arg.as_ref() == rhs, - _ => false, - } - } } impl fmt::Display for Kind { @@ -447,12 +461,11 @@ impl HasKind for TyData { TyData::TyVar(ty_var) => ty_var.kind(db), TyData::TyParam(ty_param) => ty_param.kind.clone(), TyData::TyCon(ty_const) => ty_const.kind(db), - TyData::TyApp(abs, arg) => match abs.kind(db) { - Kind::Abs(k_arg, k_ret) => { - debug_assert!(k_arg.as_ref() == arg.kind(db)); - k_ret.as_ref().clone() - } - _ => unreachable!(), + TyData::TyApp(abs, _) => match abs.kind(db) { + // `TyId::app` method handles the kind mismatch, so we don't need to verify it again + // here. + Kind::Abs(_, ret) => ret.as_ref().clone(), + _ => Kind::Any, }, TyData::Invalid(_) => Kind::Any, } diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index 532297e809..fcaa60c728 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -73,7 +73,7 @@ where pub(super) struct TyDiagCollector<'db> { db: &'db dyn HirAnalysisDb, - accumulated: Vec, + diags: Vec, scope: ScopeId, } @@ -81,7 +81,7 @@ impl<'db> TyDiagCollector<'db> { pub(super) fn new(db: &'db dyn HirAnalysisDb, scope: ScopeId) -> Self { Self { db, - accumulated: Vec::new(), + diags: Vec::new(), scope, } } @@ -89,25 +89,19 @@ impl<'db> TyDiagCollector<'db> { pub(super) fn collect(mut self, hir_ty: HirTyId, span: LazyTySpan) -> Vec { let mut ctxt = VisitorCtxt::new(self.db.as_hir_db(), span); self.visit_ty(&mut ctxt, hir_ty); - self.accumulated + self.diags } - fn collect_impl(&mut self, cause: InvalidCause, span: LazyTySpan) { - let span: DynLazySpan = span.into(); + fn store_diag(&mut self, cause: InvalidCause, span: DynLazySpan) { match cause { InvalidCause::NotFullyApplied => { let diag = TyLowerDiag::not_fully_applied_type(span); - self.accumulated.push(diag); - } - - InvalidCause::TyAppFailed { abs, arg } => { - let diag = TyLowerDiag::ty_app_failed(self.db, span, abs, arg); - self.accumulated.push(diag); + self.diags.push(diag); } InvalidCause::KindMismatch { expected, given } => { - let diag = TyLowerDiag::kind_mismatch(self.db, span, expected, given); - self.accumulated.push(diag); + let diag = TyLowerDiag::kind_mismatch(span, expected, given); + self.diags.push(diag); } InvalidCause::TypeAliasArgumentMismatch { @@ -115,12 +109,12 @@ impl<'db> TyDiagCollector<'db> { n_given_args: n_given_arg, } => { let diag = TyLowerDiag::type_alias_argument_mismatch(span, alias, n_given_arg); - self.accumulated.push(diag); + self.diags.push(diag); } InvalidCause::AssocTy => { let diag = TyLowerDiag::assoc_ty(span); - self.accumulated.push(diag); + self.diags.push(diag); } // NOTE: We can `InvalidCause::Other` because it's already reported by other passes. @@ -132,10 +126,85 @@ impl<'db> TyDiagCollector<'db> { impl<'db> Visitor for TyDiagCollector<'db> { fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { let ty = lower_hir_ty(self.db, hir_ty, self.scope); - if let Some(cause) = ty.invalid_cause(self.db) { - self.collect_impl(cause, ctxt.span().unwrap()); + match ty.data(self.db) { + TyData::Invalid(cause) => { + self.store_diag(cause, ctxt.span().unwrap().into()); + } + + TyData::TyApp(lhs, arg) => { + let mut args = vec![]; + ty_arg_lexical_order(self.db, &mut args, lhs, arg); + for (idx, arg) in args.into_iter().enumerate() { + match arg.data(self.db) { + TyData::Invalid(cause @ InvalidCause::KindMismatch { .. }) => { + let span = ty_args_span(self.db, hir_ty, ctxt.span().unwrap(), idx); + self.store_diag(cause, span); + return; + } + + _ => {} + } + } + } + + _ => {} } hir_walk_ty(self, ctxt, hir_ty); } } + +/// Returns `TyApp` arguments in recursive order. +/// e.g., +/// `TyApp(TyApp(T, A1), TyApp(U, A2))` returns `[A1, TyApp(U, A2]`. +fn ty_arg_lexical_order(db: &dyn HirAnalysisDb, args: &mut Vec, lhs: TyId, arg: TyId) { + match lhs.data(db) { + TyData::TyApp(deep_lhs, deep_arg) => ty_arg_lexical_order(db, args, deep_lhs, deep_arg), + _ => {} + } + + args.push(arg) +} + +fn ty_args_span( + db: &dyn HirAnalysisDb, + ty: HirTyId, + ty_span: LazyTySpan, + idx: usize, +) -> DynLazySpan { + use hir::hir_def::TypeKind as HirTypeKind; + match ty.data(db.as_hir_db()) { + HirTypeKind::Ptr(_) => { + if idx == 0 { + ty_span.into_ptr_type().pointee().into() + } else { + DynLazySpan::invalid() + } + } + + HirTypeKind::Path(..) => ty_span + .into_path_type() + .generic_args_moved() + .arg_moved(idx) + .into(), + + HirTypeKind::Tuple(_) => ty_span.into_tuple_type().elem_ty_moved(idx).into(), + + HirTypeKind::Array(..) => { + let span = ty_span.into_array_type(); + if idx == 0 { + span.elem_moved().into() + } else if idx == 1 { + span.len_moved().into() + } else { + DynLazySpan::invalid() + } + } + + HirTypeKind::SelfType => { + // TODO: Generic args. + + DynLazySpan::invalid() + } + } +} diff --git a/crates/uitest/fixtures/ty/alias_arg_mismatch.snap b/crates/uitest/fixtures/ty/alias_arg_mismatch.snap index 9d88d22aed..f069661ec7 100644 --- a/crates/uitest/fixtures/ty/alias_arg_mismatch.snap +++ b/crates/uitest/fixtures/ty/alias_arg_mismatch.snap @@ -3,7 +3,7 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/alias_arg_mismatch.fe --- -error[3-0004]: type alias expects 2 generic arguments, but 1 given +error[3-0003]: type alias expects 2 generic arguments, but 1 given ┌─ alias_arg_mismatch.fe:2:8 │ 2 │ t: T1 @@ -12,7 +12,7 @@ error[3-0004]: type alias expects 2 generic arguments, but 1 given 12 │ type T1 = S │ ----------------------- type alias defined here -error[3-0004]: type alias expects 1 generic arguments, but 2 given +error[3-0003]: type alias expects 1 generic arguments, but 2 given ┌─ alias_arg_mismatch.fe:4:8 │ 4 │ u: T2 @@ -21,7 +21,7 @@ error[3-0004]: type alias expects 1 generic arguments, but 2 given 13 │ type T2 = T1 │ ------------------ type alias defined here -error[3-0004]: type alias expects 2 generic arguments, but 1 given +error[3-0003]: type alias expects 2 generic arguments, but 1 given ┌─ alias_arg_mismatch.fe:13:14 │ 12 │ type T1 = S diff --git a/crates/uitest/fixtures/ty/alias_cycle.snap b/crates/uitest/fixtures/ty/alias_cycle.snap index f8f22288b2..c1a9b9078a 100644 --- a/crates/uitest/fixtures/ty/alias_cycle.snap +++ b/crates/uitest/fixtures/ty/alias_cycle.snap @@ -3,25 +3,25 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/alias_cycle.fe --- -error[3-0005]: recursive type alias cycle is detected +error[3-0004]: recursive type alias cycle is detected ┌─ alias_cycle.fe:1:11 │ 1 │ type T1 = T1 │ ^^ cycle happens here -error[3-0005]: recursive type alias cycle is detected +error[3-0004]: recursive type alias cycle is detected ┌─ alias_cycle.fe:3:11 │ 3 │ type T2 = T3 │ ^^ cycle happens here -error[3-0005]: recursive type alias cycle is detected +error[3-0004]: recursive type alias cycle is detected ┌─ alias_cycle.fe:4:11 │ 4 │ type T3 = T4 │ ^^ cycle happens here -error[3-0005]: recursive type alias cycle is detected +error[3-0004]: recursive type alias cycle is detected ┌─ alias_cycle.fe:5:11 │ 5 │ type T4 = T2 diff --git a/crates/uitest/fixtures/ty/alias_kind_mismatch.fe b/crates/uitest/fixtures/ty/alias_kind_mismatch.fe new file mode 100644 index 0000000000..a61811063a --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_kind_mismatch.fe @@ -0,0 +1,13 @@ +pub struct S0 { + t: T + u: U +} + +type T1 = S0 +type T2 = S0 +type T3 = S0 + + +pub struct S1 { + t: T3 +} diff --git a/crates/uitest/fixtures/ty/alias_kind_mismatch.snap b/crates/uitest/fixtures/ty/alias_kind_mismatch.snap new file mode 100644 index 0000000000..26e031e4ee --- /dev/null +++ b/crates/uitest/fixtures/ty/alias_kind_mismatch.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_kind_mismatch.fe +--- +error[3-0001]: kind mismatch between two types + ┌─ alias_kind_mismatch.fe:6:24 + │ +6 │ type T1 = S0 + │ ^^^ too many generic arguments + +error[3-0001]: kind mismatch between two types + ┌─ alias_kind_mismatch.fe:7:14 + │ +7 │ type T2 = S0 + │ ^^ expected `*` kind, but found `(* -> (* -> *))` kind + +error[3-0001]: kind mismatch between two types + ┌─ alias_kind_mismatch.fe:12:16 + │ +12 │ t: T3 + │ ^^ expected `*` kind, but found `(* -> (* -> *))` kind + + diff --git a/crates/uitest/fixtures/ty/kind_mismatch.fe b/crates/uitest/fixtures/ty/kind_mismatch.fe index 3ad62cefe6..ca6715eb04 100644 --- a/crates/uitest/fixtures/ty/kind_mismatch.fe +++ b/crates/uitest/fixtures/ty/kind_mismatch.fe @@ -5,4 +5,7 @@ pub struct Foo { pub struct Bar { foo: Foo + bar: Foo + baz: Foo + baz: Foo> } \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/kind_mismatch.snap b/crates/uitest/fixtures/ty/kind_mismatch.snap index 2139d7f371..390db2011b 100644 --- a/crates/uitest/fixtures/ty/kind_mismatch.snap +++ b/crates/uitest/fixtures/ty/kind_mismatch.snap @@ -3,10 +3,36 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/kind_mismatch.fe --- -error[3-0001]: kind mismatch in type application - ┌─ kind_mismatch.fe:7:10 +error[2-0001]: `baz` conflicts with other definitions + ┌─ kind_mismatch.fe:9:5 + │ + 9 │ baz: Foo + │ ^^^ `baz` is defined here +10 │ baz: Foo> + │ --- `baz` is redefined here + +error[3-0001]: kind mismatch between two types + ┌─ kind_mismatch.fe:7:19 │ 7 │ foo: Foo - │ ^^^^^^^^^^^^^ can't apply `(* -> (* -> *))` kind to `(* -> *)` kind + │ ^^^ expected `*` kind, but found `(* -> (* -> *))` kind + +error[3-0001]: kind mismatch between two types + ┌─ kind_mismatch.fe:8:14 + │ +8 │ bar: Foo + │ ^^^ expected `*` kind, but found `(* -> (* -> *))` kind + +error[3-0001]: kind mismatch between two types + ┌─ kind_mismatch.fe:9:24 + │ +9 │ baz: Foo + │ ^^^^ too many generic arguments + +error[3-0001]: kind mismatch between two types + ┌─ kind_mismatch.fe:10:33 + │ +10 │ baz: Foo> + │ ^^^^ too many generic arguments diff --git a/crates/uitest/fixtures/ty/recursive_type.snap b/crates/uitest/fixtures/ty/recursive_type.snap index 666ca1e778..daf6f6bf12 100644 --- a/crates/uitest/fixtures/ty/recursive_type.snap +++ b/crates/uitest/fixtures/ty/recursive_type.snap @@ -3,7 +3,7 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/recursive_type.fe --- -error[3-0003]: recursive type is not allowed +error[3-0002]: recursive type is not allowed ┌─ recursive_type.fe:1:12 │ 1 │ pub struct S1 { @@ -11,7 +11,7 @@ error[3-0003]: recursive type is not allowed 2 │ s: S1 │ -- recursion occurs here -error[3-0003]: recursive type is not allowed +error[3-0002]: recursive type is not allowed ┌─ recursive_type.fe:5:12 │ 5 │ pub struct S2 { @@ -19,7 +19,7 @@ error[3-0003]: recursive type is not allowed 6 │ s: S3 │ -- recursion occurs here -error[3-0003]: recursive type is not allowed +error[3-0002]: recursive type is not allowed ┌─ recursive_type.fe:9:12 │ 9 │ pub struct S3 { @@ -27,7 +27,7 @@ error[3-0003]: recursive type is not allowed 10 │ s: S4 │ -- recursion occurs here -error[3-0003]: recursive type is not allowed +error[3-0002]: recursive type is not allowed ┌─ recursive_type.fe:13:12 │ 13 │ pub struct S4 { @@ -35,7 +35,7 @@ error[3-0003]: recursive type is not allowed 14 │ s: S2 │ -- recursion occurs here -error[3-0003]: recursive type is not allowed +error[3-0002]: recursive type is not allowed ┌─ recursive_type.fe:22:12 │ 22 │ pub struct S6 { From 0559b6ef066f86932cadf89e734c5c3ee3ce4a9b Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 17:33:25 +0200 Subject: [PATCH 304/678] Allow `Self` to have generic arguments --- crates/hir-analysis/src/ty/lower.rs | 15 ++++++++++++--- crates/hir-analysis/src/ty/visitor.rs | 10 +++++----- crates/hir/src/hir_def/types.rs | 2 +- crates/hir/src/lower/types.rs | 5 ++++- crates/hir/src/span/types.rs | 15 +++++++++++++++ crates/hir/src/visitor.rs | 12 +++++++++++- crates/parser2/src/ast/types.rs | 1 + crates/parser2/src/parser/type_.rs | 3 +++ 8 files changed, 52 insertions(+), 11 deletions(-) diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index a853a8f155..1ebe53f0d1 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -144,7 +144,7 @@ impl<'db> TyBuilder<'db> { HirTyKind::Path(path, args) => self.lower_path(*path, *args), - HirTyKind::SelfType => self.lower_self_ty(), + HirTyKind::SelfType(args) => self.lower_self_ty(*args), HirTyKind::Tuple(elems) => self.lower_tuple(elems), @@ -178,9 +178,18 @@ impl<'db> TyBuilder<'db> { } } - pub(super) fn lower_self_ty(&mut self) -> TyId { + pub(super) fn lower_self_ty(&mut self, args: GenericArgListId) -> TyId { let res = resolve_segments_early(self.db, &[Partial::Present(kw::SELF_TY)], self.scope); - self.lower_resolved_path(&res).unwrap_left() + let self_ty = self.lower_resolved_path(&res).unwrap_left(); + let arg_tys: Vec<_> = args + .data(self.db.as_hir_db()) + .iter() + .map(|arg| self.lower_generic_arg(arg)) + .collect(); + + arg_tys + .into_iter() + .fold(self_ty, |acc, arg| TyId::app(self.db, acc, arg)) } fn lower_ptr(&mut self, pointee: Partial) -> TyId { diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index fcaa60c728..db24a541de 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -201,10 +201,10 @@ fn ty_args_span( } } - HirTypeKind::SelfType => { - // TODO: Generic args. - - DynLazySpan::invalid() - } + HirTypeKind::SelfType(_) => ty_span + .into_self_type() + .generic_args_moved() + .arg_moved(idx) + .into(), } } diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index 0152d85cab..2ce79bd1fa 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -12,7 +12,7 @@ pub enum TypeKind { /// The `PathId` is the path to the type, the `Option` is the generic /// arguments. Path(Partial, GenericArgListId), - SelfType, + SelfType(GenericArgListId), /// The `Vec` contains the types of the tuple elements. Tuple(TupleTypeId), /// The first `TypeId` is the element type, the second `Body` is the length. diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index 4699b27dda..e39b30cee9 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -20,7 +20,10 @@ impl TypeId { TypeKind::Path(path, generic_args) } - ast::TypeKind::SelfType(_) => TypeKind::SelfType, + ast::TypeKind::SelfType(ty) => { + let generic_args = GenericArgListId::lower_ast_opt(ctxt, ty.generic_args()); + TypeKind::SelfType(generic_args) + } ast::TypeKind::Tuple(ty) => TypeKind::Tuple(TupleTypeId::lower_ast(ctxt, ty)), diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs index 82163b7af3..9543963ace 100644 --- a/crates/hir/src/span/types.rs +++ b/crates/hir/src/span/types.rs @@ -37,6 +37,10 @@ impl LazyTySpan { pub fn into_array_type(self) -> LazyArrayTypeSpan { LazyArrayTypeSpan(self.0) } + + pub fn into_self_type(self) -> LazySelfTypeSpan { + LazySelfTypeSpan(self.0) + } } define_lazy_span_node!( @@ -84,3 +88,14 @@ define_lazy_span_node!( (len, len, LazyBodySpan), } ); + +define_lazy_span_node!( + LazySelfTypeSpan, + ast::SelfType, + @token { + (self_kw, self_kw), + } + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + } +); diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 7772610a27..aa20ad2600 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1674,7 +1674,17 @@ where }, ), - TypeKind::SelfType => {} + TypeKind::SelfType(generic_args) => ctxt.with_new_ctxt( + |span| span.into_self_type(), + |ctxt| { + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, *generic_args); + }, + ); + }, + ), } } diff --git a/crates/parser2/src/ast/types.rs b/crates/parser2/src/ast/types.rs index 0c9ddb6147..2a7b17ac72 100644 --- a/crates/parser2/src/ast/types.rs +++ b/crates/parser2/src/ast/types.rs @@ -70,6 +70,7 @@ impl SelfType { support::token(self.syntax(), SK::SelfTypeKw) } } +impl super::GenericArgsOwner for SelfType {} ast_node! { /// A tuple type. diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs index 7c0c94b646..fc59af5387 100644 --- a/crates/parser2/src/parser/type_.rs +++ b/crates/parser2/src/parser/type_.rs @@ -58,6 +58,9 @@ impl super::Parse for SelfTypeScope { fn parse(&mut self, parser: &mut Parser) { parser.set_newline_as_trivia(false); parser.bump_expected(SyntaxKind::SelfTypeKw); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default(), None); + } } } define_scope! { From 5267ab6515f2f62a714ef95ca39c936d9441cdb4 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 17:59:16 +0200 Subject: [PATCH 305/678] Disallow tyep alais to have type bounds --- crates/hir/src/hir_def/item.rs | 1 - crates/hir/src/lower/item.rs | 2 - crates/hir/src/span/item.rs | 1 - crates/hir/src/visitor.rs | 8 --- crates/parser2/src/ast/item.rs | 1 - crates/parser2/src/parser/func.rs | 4 +- crates/parser2/src/parser/item.rs | 8 +-- crates/parser2/src/parser/param.rs | 19 ++++--- crates/parser2/src/parser/struct_.rs | 2 +- .../test_files/error_recovery/items/type_.fe | 5 +- .../error_recovery/items/type_.snap | 52 +++++++++++++++++-- .../test_files/syntax_node/items/type.fe | 4 +- .../test_files/syntax_node/items/type.snap | 39 ++------------ 13 files changed, 76 insertions(+), 70 deletions(-) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 1efe8fb28b..ce75363dff 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -500,7 +500,6 @@ pub struct TypeAlias { pub attributes: AttrListId, pub vis: Visibility, pub generic_params: GenericParamListId, - pub where_clause: WhereClauseId, pub ty: Partial, pub top_mod: TopLevelMod, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index 84a869e3bd..f8d53102fb 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -216,7 +216,6 @@ impl TypeAlias { let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); - let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); let origin = HirOrigin::raw(&ast); @@ -227,7 +226,6 @@ impl TypeAlias { attributes, vis, generic_params, - where_clause, ty, ctxt.top_mod(), origin, diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 88bf660239..7eb0b2591e 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -118,7 +118,6 @@ define_lazy_span_node!( @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), - (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), (ty, ty, LazyTySpan), } diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index aa20ad2600..9df61588d0 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -609,14 +609,6 @@ pub fn walk_type_alias( }, ); - ctxt.with_new_ctxt( - |span| span.where_clause_moved(), - |ctxt| { - let id = alias.where_clause(ctxt.db); - visitor.visit_where_clause(ctxt, id); - }, - ); - if let Some(ty) = alias.ty(ctxt.db).to_opt() { ctxt.with_new_ctxt( |span| span.ty_moved(), diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index dee9b4df37..1f0aa030ff 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -158,7 +158,6 @@ ast_node! { SK::TypeAlias, } impl super::GenericParamsOwner for TypeAlias {} -impl super::WhereClauseOwner for TypeAlias {} impl super::AttrListOwner for TypeAlias {} impl super::ItemModifierOwner for TypeAlias {} impl TypeAlias { diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs index 253a3b32aa..b90aafdcc9 100644 --- a/crates/parser2/src/parser/func.rs +++ b/crates/parser2/src/parser/func.rs @@ -54,7 +54,7 @@ fn parse_normal_fn_def_impl(parser: &mut Parser) { ); parser.with_next_expected_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LParen, SyntaxKind::LBrace], ); @@ -99,7 +99,7 @@ fn parse_trait_fn_def_impl(parser: &mut Parser) { ); parser.with_next_expected_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LParen], ); diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 2a53527c25..0ab4a7737b 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -248,7 +248,7 @@ impl super::Parse for EnumScope { ); parser.with_next_expected_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); @@ -318,7 +318,7 @@ impl super::Parse for TraitScope { ); parser.with_next_expected_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); @@ -345,7 +345,7 @@ impl super::Parse for ImplScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::ImplKw); parser.with_recovery_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::ForKw], ); @@ -489,7 +489,7 @@ impl super::Parse for TypeAliasScope { parser.with_next_expected_tokens( |parser| { - parse_generic_params_opt(parser); + parse_generic_params_opt(parser, true); }, &[SyntaxKind::Eq], ); diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 61b291185f..4ef12fce99 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -81,7 +81,7 @@ impl super::Parse for FnParamScope { } define_scope! { - pub(crate) GenericParamListScope, + pub(crate) GenericParamListScope {disallow_type_bound: bool}, GenericParamList, Override(Gt) } @@ -92,9 +92,9 @@ impl super::Parse for GenericParamListScope { return; } - parser.parse(GenericParamScope::default(), None); + parser.parse(GenericParamScope::new(self.disallow_type_bound), None); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(GenericParamScope::default(), None); + parser.parse(GenericParamScope::new(self.disallow_type_bound), None); } parser.bump_or_recover(SyntaxKind::Gt, "expected closing `>`", None); @@ -102,7 +102,7 @@ impl super::Parse for GenericParamListScope { } define_scope! { - GenericParamScope, + GenericParamScope {disallow_type_bound: bool}, TypeGenericParam, Inheritance(Comma) } @@ -138,7 +138,9 @@ impl super::Parse for GenericParamScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { - { + if self.disallow_type_bound { + parser.error_and_recover("type bounds are not allowed here", None); + } else { parser.parse(TypeBoundListScope::default(), None); } } @@ -320,8 +322,11 @@ pub(crate) fn parse_where_clause_opt(parser: &mut Parser) { parser.set_newline_as_trivia(newline_as_trivia); } -pub(crate) fn parse_generic_params_opt(parser: &mut Parser) { +pub(crate) fn parse_generic_params_opt( + parser: &mut Parser, + disallow_type_bound: bool, +) { if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::default(), None); + parser.parse(GenericParamListScope::new(disallow_type_bound), None); } } diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs index 254771fc98..e1fe962695 100644 --- a/crates/parser2/src/parser/struct_.rs +++ b/crates/parser2/src/parser/struct_.rs @@ -29,7 +29,7 @@ impl super::Parse for StructScope { ); parser.with_next_expected_tokens( - |parser| parse_generic_params_opt(parser), + |parser| parse_generic_params_opt(parser, false), &[SyntaxKind::LBrace, SyntaxKind::WhereKw], ); diff --git a/crates/parser2/test_files/error_recovery/items/type_.fe b/crates/parser2/test_files/error_recovery/items/type_.fe index 1de5391495..e3b54f5ce3 100644 --- a/crates/parser2/test_files/error_recovery/items/type_.fe +++ b/crates/parser2/test_files/error_recovery/items/type_.fe @@ -1 +1,4 @@ -type Result \ No newline at end of file +type Result + +type Foo = Result + diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap index c6e00fea09..37aaea379d 100644 --- a/crates/parser2/test_files/error_recovery/items/type_.snap +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -3,9 +3,9 @@ source: crates/parser2/tests/error_recovery.rs expression: node input_file: crates/parser2/test_files/error_recovery/items/type_.fe --- -Root@0..29 - ItemList@0..29 - Item@0..29 +Root@0..72 + ItemList@0..72 + Item@0..31 TypeAlias@0..29 TypeKw@0..4 "type" WhiteSpace@4..5 " " @@ -41,4 +41,50 @@ Root@0..29 PathSegment@27..28 Ident@27..28 "E" Gt@28..29 ">" + Newline@29..31 "\n\n" + Item@31..72 + TypeAlias@31..70 + TypeKw@31..35 "type" + WhiteSpace@35..36 " " + Ident@36..39 "Foo" + GenericParamList@39..55 + Lt@39..40 "<" + TypeGenericParam@40..46 + Ident@40..41 "T" + Error@41..46 + Colon@41..42 ":" + WhiteSpace@42..43 " " + Ident@43..46 "i32" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericParam@48..54 + Ident@48..49 "U" + Error@49..54 + Colon@49..50 ":" + WhiteSpace@50..51 " " + Ident@51..54 "i32" + Gt@54..55 ">" + WhiteSpace@55..56 " " + Eq@56..57 "=" + WhiteSpace@57..58 " " + PathType@58..70 + Path@58..64 + PathSegment@58..64 + Ident@58..64 "Result" + GenericArgList@64..70 + Lt@64..65 "<" + TypeGenericArg@65..66 + PathType@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "T" + Comma@66..67 "," + WhiteSpace@67..68 " " + TypeGenericArg@68..69 + PathType@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "U" + Gt@69..70 ">" + Newline@70..72 "\n\n" diff --git a/crates/parser2/test_files/syntax_node/items/type.fe b/crates/parser2/test_files/syntax_node/items/type.fe index 8245e8d296..a0e1b67b43 100644 --- a/crates/parser2/test_files/syntax_node/items/type.fe +++ b/crates/parser2/test_files/syntax_node/items/type.fe @@ -1,5 +1,3 @@ pub type Int = i32 -type Result = Result - -type WithBound = NoBound \ No newline at end of file +type Result = Result \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/type.snap b/crates/parser2/test_files/syntax_node/items/type.snap index cf1e51d3ca..6148c44b09 100644 --- a/crates/parser2/test_files/syntax_node/items/type.snap +++ b/crates/parser2/test_files/syntax_node/items/type.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/type.fe --- -Root@0..98 - ItemList@0..98 +Root@0..54 + ItemList@0..54 Item@0..21 TypeAlias@0..18 ItemModifier@0..3 @@ -22,7 +22,7 @@ Root@0..98 Ident@15..18 "i32" WhiteSpace@18..19 " " Newline@19..21 "\n\n" - Item@21..56 + Item@21..54 TypeAlias@21..54 TypeKw@21..25 "type" WhiteSpace@25..26 " " @@ -54,37 +54,4 @@ Root@0..98 PathSegment@48..53 Ident@48..53 "Error" Gt@53..54 ">" - Newline@54..56 "\n\n" - Item@56..98 - TypeAlias@56..98 - TypeKw@56..60 "type" - WhiteSpace@60..61 " " - Ident@61..70 "WithBound" - GenericParamList@70..85 - Lt@70..71 "<" - TypeGenericParam@71..84 - Ident@71..72 "T" - TypeBoundList@72..84 - Colon@72..73 ":" - WhiteSpace@73..74 " " - TypeBound@74..84 - Path@74..84 - PathSegment@74..84 - Ident@74..84 "TraitBound" - Gt@84..85 ">" - WhiteSpace@85..86 " " - Eq@86..87 "=" - WhiteSpace@87..88 " " - PathType@88..98 - Path@88..95 - PathSegment@88..95 - Ident@88..95 "NoBound" - GenericArgList@95..98 - Lt@95..96 "<" - TypeGenericArg@96..97 - PathType@96..97 - Path@96..97 - PathSegment@96..97 - Ident@96..97 "T" - Gt@97..98 ">" From 0a8b3545c6af9bb39e007a4f2dcbf56b0edd1c08 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 14 Sep 2023 22:22:01 +0200 Subject: [PATCH 306/678] Refine `Self` type lwoering --- crates/hir-analysis/src/ty/lower.rs | 94 +++++++++++++++++++---------- crates/hir-analysis/src/ty/ty.rs | 17 +++++- crates/hir/src/hir_def/path.rs | 7 ++- 3 files changed, 84 insertions(+), 34 deletions(-) diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 1ebe53f0d1..b3a538b4f5 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,14 +1,12 @@ use either::Either; use hir::hir_def::{ - kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, + scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, GenericParam as HirGenericParam, GenericParamOwner, ItemKind, Partial, PathId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, }; use crate::{ - name_resolution::{ - resolve_path_early, resolve_segments_early, EarlyResolvedPath, NameDomain, NameResKind, - }, + name_resolution::{resolve_path_early, EarlyResolvedPath, NameDomain, NameResKind}, ty::{ diagnostics::{TyLowerDiag, TypeAliasDefDiagAccumulator}, visitor::TyDiagCollector, @@ -157,9 +155,9 @@ impl<'db> TyBuilder<'db> { pub(super) fn lower_path(&mut self, path: Partial, args: GenericArgListId) -> TyId { let path_ty = path .to_opt() - .map(|path| { - let res = resolve_path_early(self.db, path, self.scope); - self.lower_resolved_path(&res) + .map(|path| match self.resolve_path(path) { + Either::Left(res) => self.lower_resolved_path(res), + Either::Right(ty) => Either::Left(ty), }) .unwrap_or_else(|| Either::Left(TyId::invalid(self.db, InvalidCause::Other))); @@ -179,17 +177,44 @@ impl<'db> TyBuilder<'db> { } pub(super) fn lower_self_ty(&mut self, args: GenericArgListId) -> TyId { - let res = resolve_segments_early(self.db, &[Partial::Present(kw::SELF_TY)], self.scope); - let self_ty = self.lower_resolved_path(&res).unwrap_left(); - let arg_tys: Vec<_> = args - .data(self.db.as_hir_db()) + let res = self.resolve_path(PathId::self_ty(self.db.as_hir_db())); + let (scope, res) = match res { + Either::Left(res @ NameResKind::Scope(scope)) => (scope, res), + Either::Left(NameResKind::Prim(prim)) => return TyId::from_hir_prim_ty(self.db, prim), + Either::Right(ty) => return ty, + }; + + let (target_hir_ty, target_scope) = match scope { + ScopeId::Item(item) => match item { + ItemKind::Enum(_) | ItemKind::Struct(_) | ItemKind::Contract(_) => { + return self.lower_resolved_path(res).unwrap_left() + } + + ItemKind::Trait(_) => { + let self_param = TyParam::self_ty_param(Kind::Star); + return TyId::new(self.db, TyData::TyParam(self_param)); + } + + ItemKind::Impl(impl_) => (impl_.ty(self.db.as_hir_db()), impl_.scope()), + ItemKind::ImplTrait(impl_trait) => { + (impl_trait.ty(self.db.as_hir_db()), impl_trait.scope()) + } + _ => return TyId::invalid(self.db, InvalidCause::Other), + }, + + _ => unreachable!(), + }; + + let target_ty = target_hir_ty + .to_opt() + .map(|hir_ty| lower_hir_ty(self.db, hir_ty, target_scope)) + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); + + let db = self.db; + args.data(self.db.as_hir_db()) .iter() .map(|arg| self.lower_generic_arg(arg)) - .collect(); - - arg_tys - .into_iter() - .fold(self_ty, |acc, arg| TyId::app(self.db, acc, arg)) + .fold(target_ty, |acc, arg| TyId::app(db, acc, arg)) } fn lower_ptr(&mut self, pointee: Partial) -> TyId { @@ -218,21 +243,8 @@ impl<'db> TyBuilder<'db> { }) } - fn lower_resolved_path(&mut self, path: &EarlyResolvedPath) -> Either { - let res = match path { - EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { - Ok(res) => res, - - // This error is already handled by the name resolution. - Err(_) => return Either::Left(TyId::invalid(self.db, InvalidCause::Other)), - }, - - EarlyResolvedPath::Partial { .. } => { - return Either::Left(TyId::invalid(self.db, InvalidCause::AssocTy)); - } - }; - - let scope = match res.kind { + fn lower_resolved_path(&mut self, kind: NameResKind) -> Either { + let scope = match kind { NameResKind::Scope(scope) => scope, NameResKind::Prim(prim_ty) => { return Either::Left(TyId::from_hir_prim_ty(self.db, prim_ty)) @@ -284,6 +296,23 @@ impl<'db> TyBuilder<'db> { GenericArg::Const(_) => todo!(), } } + + /// If the path is resolved to a type, return the resolution. Otherwise, + /// returns the `TyId::Invalid` with proper `InvalidCause`. + fn resolve_path(&mut self, path: PathId) -> Either { + match resolve_path_early(self.db, path, self.scope) { + EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { + Ok(res) => Either::Left(res.kind), + + // This error is already handled by the name resolution. + Err(_) => Either::Right(TyId::invalid(self.db, InvalidCause::Other)), + }, + + EarlyResolvedPath::Partial { .. } => { + Either::Right(TyId::invalid(self.db, InvalidCause::AssocTy)) + } + } + } } struct AdtTyBuilder<'db> { @@ -375,11 +404,12 @@ fn lower_generic_param_list(db: &dyn HirAnalysisDb, item: ItemKind) -> Vec fn lower_generic_param(db: &dyn HirAnalysisDb, param: &HirGenericParam, idx: usize) -> TyId { match param { + // TODO: we need to handle kinds of generic params. GenericParam::Type(param) => { if let Some(name) = param.name.to_opt() { let ty_param = TyParam { name, - idx, + idx: Some(idx), kind: Kind::Star, }; TyId::new(db, TyData::TyParam(ty_param)) diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 8f9e5abf53..464f7abd60 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -2,6 +2,7 @@ use std::fmt; use hir::{ hir_def::{ + kw, prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, scope_graph::ScopeId, Contract, Enum, IdentId, ItemKind, Partial, Struct, TypeAlias as HirTypeAlias, @@ -320,13 +321,27 @@ pub struct TyVar { pub kind: Kind, } +/// Type generics parameter. We also treat `Self` type in a trait definition as +/// a special type parameter. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TyParam { pub name: IdentId, - pub idx: usize, + /// If the type parameter is not a`Self` type in a trait definition, this + /// field is always `Some`. + pub idx: Option, pub kind: Kind, } +impl TyParam { + pub fn self_ty_param(kind: Kind) -> Self { + Self { + name: kw::SELF_TY, + idx: None, + kind, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyConcrete { Prim(PrimTy), diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs index fc2444abd2..363b0a6817 100644 --- a/crates/hir/src/hir_def/path.rs +++ b/crates/hir/src/hir_def/path.rs @@ -1,6 +1,6 @@ use crate::{hir_def::Partial, HirDb}; -use super::IdentId; +use super::{kw, IdentId}; #[salsa::interned] pub struct PathId { @@ -16,4 +16,9 @@ impl PathId { pub fn len(self, db: &dyn HirDb) -> usize { self.segments(db).len() } + + pub fn self_ty(db: &dyn HirDb) -> Self { + let self_ty = Partial::Present(kw::SELF_TY); + Self::new(db, vec![self_ty]) + } } From 86fed97fdf54d4d43bf28608e9496dfc686d142e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 15 Sep 2023 15:29:33 +0200 Subject: [PATCH 307/678] Implement type kind bound parsing --- crates/parser2/src/ast/param.rs | 50 +++++- crates/parser2/src/parser/param.rs | 70 ++++++-- crates/parser2/src/syntax_kind.rs | 6 +- .../test_files/error_recovery/items/func.snap | 21 ++- .../error_recovery/items/impl_.snap | 7 +- .../error_recovery/items/impl_trait.snap | 14 +- .../error_recovery/items/struct_.snap | 7 +- .../error_recovery/items/trait_.snap | 21 ++- .../error_recovery/items/type_.snap | 12 +- .../test_files/syntax_node/items/enums.fe | 9 +- .../test_files/syntax_node/items/enums.snap | 129 +++++++++++--- .../test_files/syntax_node/items/func.snap | 21 ++- .../test_files/syntax_node/items/impl.snap | 44 ++--- .../syntax_node/items/impl_trait.snap | 74 +++++---- .../test_files/syntax_node/items/trait.fe | 2 +- .../test_files/syntax_node/items/trait.snap | 70 ++++---- .../syntax_node/structs/generics.snap | 157 ++++++++++-------- 17 files changed, 475 insertions(+), 239 deletions(-) diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 0ae5021eb7..b02ff3a0ca 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -277,16 +277,60 @@ ast_node! { /// A type bound. /// `Trait` /// `Trait` + /// `(* -> *) -> *` pub struct TypeBound, SK::TypeBound, } impl TypeBound { /// A path of the type bound. + pub fn trait_bound(&self) -> Option { + support::child(self.syntax()) + } + + pub fn kind_constraint(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct KindBound, + SK::KindBound +} +impl KindBound { + pub fn variant(&self) -> Option { + if let Some(tok) = support::token(self.syntax(), SK::Arrow) { + let mut children = support::children(self.syntax()); + let lhs = children.next(); + let rhs = children.next(); + Some(KindBoundVariant::Abs(lhs, tok, rhs)) + } else if let Some(tok) = support::token(self.syntax(), SK::Star) { + Some(KindBoundVariant::Mono(tok)) + } else { + // Case where kind is wrapped in parens, we need to unwrap the outer parens. + let child: Option = support::child(self.syntax()); + child.map(|child| child.variant()).flatten() + } + } +} + +pub enum KindBoundVariant { + /// `*` + Mono(SyntaxToken), + /// `KindBound -> KindBound` + Abs(Option, SyntaxToken, Option), +} + +ast_node! { + pub struct TraitBound, + SK::TraitBound +} +impl TraitBound { + /// A path to the trait. pub fn path(&self) -> Option { support::child(self.syntax()) } - /// A generic argument list of the type bound. + /// A generic argument list for the trait. pub fn generic_args(&self) -> Option { support::child(self.syntax()) } @@ -421,6 +465,8 @@ mod tests { p1_bounds .next() .unwrap() + .trait_bound() + .unwrap() .path() .unwrap() .segments() @@ -435,6 +481,8 @@ mod tests { assert_eq!( p1_bounds_trait2 + .trait_bound() + .unwrap() .path() .unwrap() .segments() diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 4ef12fce99..5ba11e19fd 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -81,7 +81,7 @@ impl super::Parse for FnParamScope { } define_scope! { - pub(crate) GenericParamListScope {disallow_type_bound: bool}, + pub(crate) GenericParamListScope {disallow_trait_bound: bool}, GenericParamList, Override(Gt) } @@ -92,9 +92,9 @@ impl super::Parse for GenericParamListScope { return; } - parser.parse(GenericParamScope::new(self.disallow_type_bound), None); + parser.parse(GenericParamScope::new(self.disallow_trait_bound), None); while parser.bump_if(SyntaxKind::Comma) { - parser.parse(GenericParamScope::new(self.disallow_type_bound), None); + parser.parse(GenericParamScope::new(self.disallow_trait_bound), None); } parser.bump_or_recover(SyntaxKind::Gt, "expected closing `>`", None); @@ -102,7 +102,7 @@ impl super::Parse for GenericParamListScope { } define_scope! { - GenericParamScope {disallow_type_bound: bool}, + GenericParamScope {disallow_trait_bound: bool}, TypeGenericParam, Inheritance(Comma) } @@ -138,11 +138,7 @@ impl super::Parse for GenericParamScope { } if parser.current_kind() == Some(SyntaxKind::Colon) { - if self.disallow_type_bound { - parser.error_and_recover("type bounds are not allowed here", None); - } else { - parser.parse(TypeBoundListScope::default(), None); - } + parser.parse(TypeBoundListScope::new(self.disallow_trait_bound), None); } parser.set_newline_as_trivia(true); @@ -154,7 +150,7 @@ impl super::Parse for GenericParamScope { } define_scope! { - TypeBoundListScope, + TypeBoundListScope{disallow_trait_bound: bool}, TypeBoundList, Inheritance(Plus) } @@ -162,20 +158,64 @@ impl super::Parse for TypeBoundListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Colon); - parser.parse(TypeBoundScope::default(), None); + parser.parse(TypeBoundScope::new(self.disallow_trait_bound), None); while parser.current_kind() == Some(SyntaxKind::Plus) { parser.bump_expected(SyntaxKind::Plus); - parser.parse(TypeBoundScope::default(), None); + parser.parse(TypeBoundScope::new(self.disallow_trait_bound), None); } } } define_scope! { - TypeBoundScope, + TypeBoundScope{disallow_trait_bound: bool}, TypeBound, Inheritance } impl super::Parse for TypeBoundScope { + fn parse(&mut self, parser: &mut Parser) { + let is_type_kind = matches!( + parser.current_kind(), + Some(SyntaxKind::LParen | SyntaxKind::Star) + ); + + if is_type_kind { + parser.parse(KindBoundScope::default(), None); + } else { + if self.disallow_trait_bound { + parser.error_and_recover("trait bounds are not allowed here", None); + return; + } + parser.parse(TraitBoundScope::default(), None); + } + } +} +define_scope! { + KindBoundScope, + KindBound, + Inheritance +} +impl super::Parse for KindBoundScope { + fn parse(&mut self, parser: &mut Parser) { + if parser.bump_if(SyntaxKind::Star) { + } else if parser.bump_if(SyntaxKind::LParen) { + parser.parse(KindBoundScope::default(), None); + parser.bump_or_recover(SyntaxKind::RParen, "expected closing `)`", None); + } else { + parser.error_and_recover("expected `*` or `(`", None); + } + + if parser.bump_if(SyntaxKind::Arrow) { + parser.parse(KindBoundScope::default(), None); + } + } +} + +define_scope! { + TraitBoundScope, + TraitBound, + Inheritance +} +impl super::Parse for TraitBoundScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); if parser.current_kind() == Some(SyntaxKind::Lt) { @@ -324,9 +364,9 @@ pub(crate) fn parse_where_clause_opt(parser: &mut Parser) { pub(crate) fn parse_generic_params_opt( parser: &mut Parser, - disallow_type_bound: bool, + disallow_trait_bound: bool, ) { if parser.current_kind() == Some(SyntaxKind::Lt) { - parser.parse(GenericParamListScope::new(disallow_type_bound), None); + parser.parse(GenericParamListScope::new(disallow_trait_bound), None); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index 44d6e36856..e93e077582 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -441,8 +441,12 @@ pub enum SyntaxKind { /// `foo::Trait1 + Trait2` TypeBoundList, - /// `Trait1` + /// `TraitBound` or `TypeKind`. TypeBound, + /// `Trait1` + TraitBound, + /// `*` or `(* -> *) -> *` + KindBound, /// `where Option: Trait1 + Trait2` WhereClause, /// `Option: Trait1 + Trait2` diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 28095d3eba..1fb7840d0c 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -18,9 +18,10 @@ Root@0..133 Colon@8..9 ":" WhiteSpace@9..10 " " TypeBound@10..15 - Path@10..15 - PathSegment@10..15 - Ident@10..15 "Trait" + TraitBound@10..15 + Path@10..15 + PathSegment@10..15 + Ident@10..15 "Trait" Gt@15..16 ">" Error@16..17 Gt@16..17 ">" @@ -84,9 +85,10 @@ Root@0..133 Colon@64..65 ":" WhiteSpace@65..66 " " TypeBound@66..72 - Path@66..72 - PathSegment@66..72 - Ident@66..72 "Trait2" + TraitBound@66..72 + Path@66..72 + PathSegment@66..72 + Ident@66..72 "Trait2" WhiteSpace@72..73 " " Newline@73..74 "\n" BlockExpr@74..78 @@ -137,9 +139,10 @@ Root@0..133 Colon@118..119 ":" WhiteSpace@119..120 " " TypeBound@120..126 - Path@120..126 - PathSegment@120..126 - Ident@120..126 "Trait2" + TraitBound@120..126 + Path@120..126 + PathSegment@120..126 + Ident@120..126 "Trait2" WhiteSpace@126..127 " " Newline@127..128 "\n" BlockExpr@128..132 diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap index 786cdb94da..16100031a3 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -43,9 +43,10 @@ Root@0..56 Colon@24..25 ":" WhiteSpace@25..26 " " TypeBound@26..33 - Path@26..33 - PathSegment@26..33 - Ident@26..33 "Integer" + TraitBound@26..33 + Path@26..33 + PathSegment@26..33 + Ident@26..33 "Integer" Newline@33..34 "\n" ImplItemList@34..37 LBrace@34..35 "{" diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.snap b/crates/parser2/test_files/error_recovery/items/impl_trait.snap index 4bc909b1e6..1222dab101 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_trait.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.snap @@ -60,9 +60,10 @@ Root@0..90 Colon@30..31 ":" WhiteSpace@31..32 " " TypeBound@32..33 - Path@32..33 - PathSegment@32..33 - Ident@32..33 "X" + TraitBound@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "X" WhiteSpace@33..34 " " Error@34..34 ImplTraitItemList@34..36 @@ -122,9 +123,10 @@ Root@0..90 Colon@65..66 ":" WhiteSpace@66..67 " " TypeBound@67..68 - Path@67..68 - PathSegment@67..68 - Ident@67..68 "X" + TraitBound@67..68 + Path@67..68 + PathSegment@67..68 + Ident@67..68 "X" WhiteSpace@68..69 " " Error@69..69 ImplTraitItemList@69..71 diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index a5a2033dad..986c495346 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -44,9 +44,10 @@ Root@0..160 Colon@32..33 ":" WhiteSpace@33..34 " " TypeBound@34..39 - Path@34..39 - PathSegment@34..39 - Ident@34..39 "Trait" + TraitBound@34..39 + Path@34..39 + PathSegment@34..39 + Ident@34..39 "Trait" Newline@39..40 "\n" WhiteSpace@40..44 " " Newline@44..45 "\n" diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap index 82d40f797e..1a0b56157d 100644 --- a/crates/parser2/test_files/error_recovery/items/trait_.snap +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -94,9 +94,10 @@ Root@0..133 Colon@77..78 ":" WhiteSpace@78..79 " " TypeBound@79..82 - Path@79..82 - PathSegment@79..82 - Ident@79..82 "Add" + TraitBound@79..82 + Path@79..82 + PathSegment@79..82 + Ident@79..82 "Add" WhiteSpace@82..83 " " Error@83..83 TraitItemList@83..85 @@ -122,9 +123,10 @@ Root@0..133 Colon@102..103 ":" WhiteSpace@103..104 " " TypeBound@104..107 - Path@104..107 - PathSegment@104..107 - Ident@104..107 "Sub" + TraitBound@104..107 + Path@104..107 + PathSegment@104..107 + Ident@104..107 "Sub" Gt@107..108 ">" WhiteSpace@108..110 " " Newline@110..111 "\n" @@ -141,9 +143,10 @@ Root@0..133 Colon@122..123 ":" WhiteSpace@123..124 " " TypeBound@124..127 - Path@124..127 - PathSegment@124..127 - Ident@124..127 "Add" + TraitBound@124..127 + Path@124..127 + PathSegment@124..127 + Ident@124..127 "Add" WhiteSpace@127..128 " " Newline@128..129 "\n" TraitItemList@129..133 diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap index 37aaea379d..38febb6a43 100644 --- a/crates/parser2/test_files/error_recovery/items/type_.snap +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -51,18 +51,22 @@ Root@0..72 Lt@39..40 "<" TypeGenericParam@40..46 Ident@40..41 "T" - Error@41..46 + TypeBoundList@41..46 Colon@41..42 ":" WhiteSpace@42..43 " " - Ident@43..46 "i32" + TypeBound@43..46 + Error@43..46 + Ident@43..46 "i32" Comma@46..47 "," WhiteSpace@47..48 " " TypeGenericParam@48..54 Ident@48..49 "U" - Error@49..54 + TypeBoundList@49..54 Colon@49..50 ":" WhiteSpace@50..51 " " - Ident@51..54 "i32" + TypeBound@51..54 + Error@51..54 + Ident@51..54 "i32" Gt@54..55 ">" WhiteSpace@55..56 " " Eq@56..57 "=" diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe index e57ded1cb1..a644d756ec 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.fe +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -22,4 +22,11 @@ where Foo::Bar: Trait { AddMul(T) SubDiv(U) -} \ No newline at end of file +} + +enum HKTEnum *, U> +where + U: (* -> *) -> * +{ + Foo(U) +} diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 87b42882dc..6ddc2f3087 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- -Root@0..300 - ItemList@0..300 +Root@0..374 + ItemList@0..374 Item@0..15 Enum@0..13 EnumKw@0..4 "enum" @@ -128,9 +128,10 @@ Root@0..300 Colon@166..167 ":" WhiteSpace@167..168 " " TypeBound@168..173 - Path@168..173 - PathSegment@168..173 - Ident@168..173 "Clone" + TraitBound@168..173 + Path@168..173 + PathSegment@168..173 + Ident@168..173 "Clone" Newline@173..174 "\n" VariantDefList@174..198 LBrace@174..175 "{" @@ -152,7 +153,7 @@ Root@0..300 Newline@196..197 "\n" RBrace@197..198 "}" Newline@198..200 "\n\n" - Item@200..300 + Item@200..302 Enum@200..300 EnumKw@200..204 "enum" WhiteSpace@204..205 " " @@ -165,16 +166,18 @@ Root@0..300 Colon@216..217 ":" WhiteSpace@217..218 " " TypeBound@218..221 - Path@218..221 - PathSegment@218..221 - Ident@218..221 "Add" + TraitBound@218..221 + Path@218..221 + PathSegment@218..221 + Ident@218..221 "Add" WhiteSpace@221..222 " " Plus@222..223 "+" WhiteSpace@223..224 " " TypeBound@224..227 - Path@224..227 - PathSegment@224..227 - Ident@224..227 "Mul" + TraitBound@224..227 + Path@224..227 + PathSegment@224..227 + Ident@224..227 "Mul" WhiteSpace@227..228 " " Comma@228..229 "," WhiteSpace@229..230 " " @@ -184,16 +187,18 @@ Root@0..300 Colon@231..232 ":" WhiteSpace@232..233 " " TypeBound@233..236 - Path@233..236 - PathSegment@233..236 - Ident@233..236 "Sub" + TraitBound@233..236 + Path@233..236 + PathSegment@233..236 + Ident@233..236 "Sub" WhiteSpace@236..237 " " Plus@237..238 "+" WhiteSpace@238..239 " " TypeBound@239..242 - Path@239..242 - PathSegment@239..242 - Ident@239..242 "Div" + TraitBound@239..242 + Path@239..242 + PathSegment@239..242 + Ident@239..242 "Div" Gt@242..243 ">" Newline@243..244 "\n" WhereClause@244..269 @@ -219,9 +224,10 @@ Root@0..300 Colon@261..262 ":" WhiteSpace@262..263 " " TypeBound@263..268 - Path@263..268 - PathSegment@263..268 - Ident@263..268 "Trait" + TraitBound@263..268 + Path@263..268 + PathSegment@263..268 + Ident@263..268 "Trait" Newline@268..269 "\n" VariantDefList@269..300 LBrace@269..270 "{" @@ -249,4 +255,85 @@ Root@0..300 RParen@297..298 ")" Newline@298..299 "\n" RBrace@299..300 "}" + Newline@300..302 "\n\n" + Item@302..374 + Enum@302..373 + EnumKw@302..306 "enum" + WhiteSpace@306..307 " " + Ident@307..314 "HKTEnum" + GenericParamList@314..328 + Lt@314..315 "<" + TypeGenericParam@315..324 + Ident@315..316 "T" + TypeBoundList@316..324 + Colon@316..317 ":" + WhiteSpace@317..318 " " + TypeBound@318..324 + KindBound@318..324 + Star@318..319 "*" + WhiteSpace@319..320 " " + Arrow@320..322 "->" + WhiteSpace@322..323 " " + KindBound@323..324 + Star@323..324 "*" + Comma@324..325 "," + WhiteSpace@325..326 " " + TypeGenericParam@326..327 + Ident@326..327 "U" + Gt@327..328 ">" + Newline@328..329 "\n" + WhereClause@329..356 + WhereKw@329..334 "where" + WhiteSpace@334..335 " " + Newline@335..336 "\n" + WhiteSpace@336..339 " " + WherePredicate@339..356 + PathType@339..340 + Path@339..340 + PathSegment@339..340 + Ident@339..340 "U" + TypeBoundList@340..355 + Colon@340..341 ":" + WhiteSpace@341..342 " " + TypeBound@342..355 + KindBound@342..355 + LParen@342..343 "(" + KindBound@343..349 + Star@343..344 "*" + WhiteSpace@344..345 " " + Arrow@345..347 "->" + WhiteSpace@347..348 " " + KindBound@348..349 + Star@348..349 "*" + RParen@349..350 ")" + WhiteSpace@350..351 " " + Arrow@351..353 "->" + WhiteSpace@353..354 " " + KindBound@354..355 + Star@354..355 "*" + Newline@355..356 "\n" + VariantDefList@356..373 + LBrace@356..357 "{" + Newline@357..358 "\n" + WhiteSpace@358..362 " " + VariantDef@362..371 + Ident@362..365 "Foo" + TupleType@365..371 + LParen@365..366 "(" + PathType@366..370 + Path@366..367 + PathSegment@366..367 + Ident@366..367 "U" + GenericArgList@367..370 + Lt@367..368 "<" + TypeGenericArg@368..369 + PathType@368..369 + Path@368..369 + PathSegment@368..369 + Ident@368..369 "T" + Gt@369..370 ">" + RParen@370..371 ")" + Newline@371..372 "\n" + RBrace@372..373 "}" + Newline@373..374 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 8d3658b37f..1fa2050503 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -172,9 +172,10 @@ Root@0..361 Colon@194..195 ":" WhiteSpace@195..196 " " TypeBound@196..201 - Path@196..201 - PathSegment@196..201 - Ident@196..201 "Trait" + TraitBound@196..201 + Path@196..201 + PathSegment@196..201 + Ident@196..201 "Trait" Comma@201..202 "," WhiteSpace@202..203 " " TypeGenericParam@203..204 @@ -238,9 +239,10 @@ Root@0..361 Colon@250..251 ":" WhiteSpace@251..252 " " TypeBound@252..257 - Path@252..257 - PathSegment@252..257 - Ident@252..257 "Trait" + TraitBound@252..257 + Path@252..257 + PathSegment@252..257 + Ident@252..257 "Trait" WhiteSpace@257..258 " " Newline@258..259 "\n" WhiteSpace@259..269 " " @@ -261,9 +263,10 @@ Root@0..361 Colon@278..279 ":" WhiteSpace@279..280 " " TypeBound@280..285 - Path@280..285 - PathSegment@280..285 - Ident@280..285 "Clone" + TraitBound@280..285 + Path@280..285 + PathSegment@280..285 + Ident@280..285 "Clone" Newline@285..286 "\n" WhiteSpace@286..296 " " Newline@296..297 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index b9b5f92d81..208c2f3980 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -16,9 +16,10 @@ Root@0..272 Colon@6..7 ":" WhiteSpace@7..8 " " TypeBound@8..11 - Path@8..11 - PathSegment@8..11 - Ident@8..11 "Add" + TraitBound@8..11 + Path@8..11 + PathSegment@8..11 + Ident@8..11 "Add" Gt@11..12 ">" WhiteSpace@12..13 " " PathType@13..24 @@ -155,9 +156,10 @@ Root@0..272 Colon@167..168 ":" WhiteSpace@168..169 " " TypeBound@169..174 - Path@169..174 - PathSegment@169..174 - Ident@169..174 "Clone" + TraitBound@169..174 + Path@169..174 + PathSegment@169..174 + Ident@169..174 "Clone" Newline@174..175 "\n" ImplItemList@175..272 LBrace@175..176 "{" @@ -175,17 +177,18 @@ Root@0..272 Colon@189..190 ":" WhiteSpace@190..191 " " TypeBound@191..197 - Path@191..194 - PathSegment@191..194 - Ident@191..194 "Add" - GenericArgList@194..197 - Lt@194..195 "<" - TypeGenericArg@195..196 - PathType@195..196 - Path@195..196 - PathSegment@195..196 - Ident@195..196 "T" - Gt@196..197 ">" + TraitBound@191..197 + Path@191..194 + PathSegment@191..194 + Ident@191..194 "Add" + GenericArgList@194..197 + Lt@194..195 "<" + TypeGenericArg@195..196 + PathType@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "T" + Gt@196..197 ">" Gt@197..198 ">" FuncParamList@198..212 LParen@198..199 "(" @@ -217,9 +220,10 @@ Root@0..272 Colon@229..230 ":" WhiteSpace@230..231 " " TypeBound@231..235 - Path@231..235 - PathSegment@231..235 - Ident@231..235 "Copy" + TraitBound@231..235 + Path@231..235 + PathSegment@231..235 + Ident@231..235 "Copy" Newline@235..236 "\n" WhiteSpace@236..240 " " BlockExpr@240..270 diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index 6df8df7c7a..787cd3371a 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -131,9 +131,10 @@ Root@0..335 Colon@109..110 ":" WhiteSpace@110..111 " " TypeBound@111..116 - Path@111..116 - PathSegment@111..116 - Ident@111..116 "Clone" + TraitBound@111..116 + Path@111..116 + PathSegment@111..116 + Ident@111..116 "Clone" Newline@116..117 "\n" WhiteSpace@117..123 " " WherePredicate@123..130 @@ -145,9 +146,10 @@ Root@0..335 Colon@124..125 ":" WhiteSpace@125..126 " " TypeBound@126..129 - Path@126..129 - PathSegment@126..129 - Ident@126..129 "Bar" + TraitBound@126..129 + Path@126..129 + PathSegment@126..129 + Ident@126..129 "Bar" Newline@129..130 "\n" ImplTraitItemList@130..205 LBrace@130..131 "{" @@ -165,17 +167,18 @@ Root@0..335 Colon@144..145 ":" WhiteSpace@145..146 " " TypeBound@146..159 - Path@146..156 - PathSegment@146..156 - Ident@146..156 "OtherTrait" - GenericArgList@156..159 - Lt@156..157 "<" - TypeGenericArg@157..158 - PathType@157..158 - Path@157..158 - PathSegment@157..158 - Ident@157..158 "U" - Gt@158..159 ">" + TraitBound@146..159 + Path@146..156 + PathSegment@146..156 + Ident@146..156 "OtherTrait" + GenericArgList@156..159 + Lt@156..157 "<" + TypeGenericArg@157..158 + PathType@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "U" + Gt@158..159 ">" Gt@159..160 ">" FuncParamList@160..166 LParen@160..161 "(" @@ -232,9 +235,10 @@ Root@0..335 Colon@213..214 ":" WhiteSpace@214..215 " " TypeBound@215..220 - Path@215..220 - PathSegment@215..220 - Ident@215..220 "Clone" + TraitBound@215..220 + Path@215..220 + PathSegment@215..220 + Ident@215..220 "Clone" Comma@220..221 "," WhiteSpace@221..222 " " TypeGenericParam@222..223 @@ -289,9 +293,10 @@ Root@0..335 Colon@254..255 ":" WhiteSpace@255..256 " " TypeBound@256..259 - Path@256..259 - PathSegment@256..259 - Ident@256..259 "Bar" + TraitBound@256..259 + Path@256..259 + PathSegment@256..259 + Ident@256..259 "Bar" Newline@259..260 "\n" ImplTraitItemList@260..335 LBrace@260..261 "{" @@ -309,17 +314,18 @@ Root@0..335 Colon@274..275 ":" WhiteSpace@275..276 " " TypeBound@276..289 - Path@276..286 - PathSegment@276..286 - Ident@276..286 "OtherTrait" - GenericArgList@286..289 - Lt@286..287 "<" - TypeGenericArg@287..288 - PathType@287..288 - Path@287..288 - PathSegment@287..288 - Ident@287..288 "U" - Gt@288..289 ">" + TraitBound@276..289 + Path@276..286 + PathSegment@276..286 + Ident@276..286 "OtherTrait" + GenericArgList@286..289 + Lt@286..287 "<" + TypeGenericArg@287..288 + PathType@287..288 + Path@287..288 + PathSegment@287..288 + Ident@287..288 "U" + Gt@288..289 ">" Gt@289..290 ">" FuncParamList@290..296 LParen@290..291 "(" diff --git a/crates/parser2/test_files/syntax_node/items/trait.fe b/crates/parser2/test_files/syntax_node/items/trait.fe index 23fa735198..85f9cb17ec 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.fe +++ b/crates/parser2/test_files/syntax_node/items/trait.fe @@ -25,4 +25,4 @@ impl Parser pub fn parse(mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) { (SyntaxNode::new_root(self.builder.finish()), self.errors) } -} \ No newline at end of file +} diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index 7e6d9685e5..f8155c9450 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/trait.fe --- -Root@0..592 - ItemList@0..592 +Root@0..593 + ItemList@0..593 Item@0..17 Trait@0..15 TraitKw@0..5 "trait" @@ -40,9 +40,10 @@ Root@0..592 Colon@45..46 ":" WhiteSpace@46..47 " " TypeBound@47..52 - Path@47..52 - PathSegment@47..52 - Ident@47..52 "Trait" + TraitBound@47..52 + Path@47..52 + PathSegment@47..52 + Ident@47..52 "Trait" Comma@52..53 "," WhiteSpace@53..54 " " ConstGenericParam@54..66 @@ -91,16 +92,18 @@ Root@0..592 Colon@104..105 ":" WhiteSpace@105..106 " " TypeBound@106..109 - Path@106..109 - PathSegment@106..109 - Ident@106..109 "Add" + TraitBound@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "Add" WhiteSpace@109..110 " " Plus@110..111 "+" WhiteSpace@111..112 " " TypeBound@112..115 - Path@112..115 - PathSegment@112..115 - Ident@112..115 "Sub" + TraitBound@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "Sub" Gt@115..116 ">" FuncParamList@116..132 LParen@116..117 "(" @@ -189,9 +192,10 @@ Root@0..592 Colon@201..202 ":" WhiteSpace@202..203 " " TypeBound@203..206 - Path@203..206 - PathSegment@203..206 - Ident@203..206 "Add" + TraitBound@203..206 + Path@203..206 + PathSegment@203..206 + Ident@203..206 "Add" Gt@206..207 ">" WhiteSpace@207..208 " " Newline@208..209 "\n" @@ -238,9 +242,10 @@ Root@0..592 Colon@264..265 ":" WhiteSpace@265..266 " " TypeBound@266..269 - Path@266..269 - PathSegment@266..269 - Ident@266..269 "Sub" + TraitBound@266..269 + Path@266..269 + PathSegment@266..269 + Ident@266..269 "Sub" Newline@269..270 "\n" RBrace@270..271 "}" Newline@271..274 "\n\n\n" @@ -269,9 +274,10 @@ Root@0..592 Colon@306..307 ":" WhiteSpace@307..308 " " TypeBound@308..319 - Path@308..319 - PathSegment@308..319 - Ident@308..319 "TokenStream" + TraitBound@308..319 + Path@308..319 + PathSegment@308..319 + Ident@308..319 "TokenStream" Gt@319..320 ">" FuncParamList@320..353 LParen@320..321 "(" @@ -303,7 +309,7 @@ Root@0..592 Newline@353..354 "\n" RBrace@354..355 "}" Newline@355..357 "\n\n" - Item@357..592 + Item@357..593 Impl@357..592 ImplKw@357..361 "impl" GenericParamList@361..364 @@ -339,16 +345,18 @@ Root@0..592 Colon@387..388 ":" WhiteSpace@388..389 " " TypeBound@389..400 - Path@389..400 - PathSegment@389..400 - Ident@389..400 "TokenStream" + TraitBound@389..400 + Path@389..400 + PathSegment@389..400 + Ident@389..400 "TokenStream" WhiteSpace@400..401 " " Plus@401..402 "+" WhiteSpace@402..403 " " TypeBound@403..408 - Path@403..408 - PathSegment@403..408 - Ident@403..408 "Clone" + TraitBound@403..408 + Path@403..408 + PathSegment@403..408 + Ident@403..408 "Clone" Newline@408..409 "\n" ImplItemList@409..592 LBrace@409..410 "{" @@ -369,9 +377,10 @@ Root@0..592 Colon@429..430 ":" WhiteSpace@430..431 " " TypeBound@431..436 - Path@431..436 - PathSegment@431..436 - Ident@431..436 "Parse" + TraitBound@431..436 + Path@431..436 + PathSegment@431..436 + Ident@431..436 "Parse" Gt@436..437 ">" FuncParamList@437..493 LParen@437..438 "(" @@ -474,4 +483,5 @@ Root@0..592 RBrace@589..590 "}" Newline@590..591 "\n" RBrace@591..592 "}" + Newline@592..593 "\n" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index c8b58976b4..af44340318 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -88,12 +88,13 @@ Root@0..560 Colon@126..127 ":" WhiteSpace@127..128 " " TypeBound@128..138 - Path@128..138 - PathSegment@128..131 - Ident@128..131 "foo" - Colon2@131..133 "::" - PathSegment@133..138 - Ident@133..138 "Trait" + TraitBound@128..138 + Path@128..138 + PathSegment@128..131 + Ident@128..131 "foo" + Colon2@131..133 "::" + PathSegment@133..138 + Ident@133..138 "Trait" Comma@138..139 "," Newline@139..140 "\n" WhiteSpace@140..144 " " @@ -171,22 +172,24 @@ Root@0..560 Colon@233..234 ":" WhiteSpace@234..235 " " TypeBound@235..245 - Path@235..245 - PathSegment@235..238 - Ident@235..238 "foo" - Colon2@238..240 "::" - PathSegment@240..245 - Ident@240..245 "Trait" + TraitBound@235..245 + Path@235..245 + PathSegment@235..238 + Ident@235..238 "foo" + Colon2@238..240 "::" + PathSegment@240..245 + Ident@240..245 "Trait" WhiteSpace@245..246 " " Plus@246..247 "+" WhiteSpace@247..248 " " TypeBound@248..258 - Path@248..258 - PathSegment@248..251 - Ident@248..251 "bar" - Colon2@251..253 "::" - PathSegment@253..258 - Ident@253..258 "Trait" + TraitBound@248..258 + Path@248..258 + PathSegment@248..251 + Ident@248..251 "bar" + Colon2@251..253 "::" + PathSegment@253..258 + Ident@253..258 "Trait" Comma@258..259 "," Newline@259..260 "\n" WhiteSpace@260..264 " " @@ -201,12 +204,13 @@ Root@0..560 Colon@272..273 ":" WhiteSpace@273..274 " " TypeBound@274..284 - Path@274..284 - PathSegment@274..277 - Ident@274..277 "bar" - Colon2@277..279 "::" - PathSegment@279..284 - Ident@279..284 "Trait" + TraitBound@274..284 + Path@274..284 + PathSegment@274..277 + Ident@274..277 "bar" + Colon2@277..279 "::" + PathSegment@279..284 + Ident@279..284 "Trait" Newline@284..285 "\n" Gt@285..286 ">" WhiteSpace@286..287 " " @@ -223,16 +227,18 @@ Root@0..560 Colon@298..299 ":" WhiteSpace@299..300 " " TypeBound@300..306 - Path@300..306 - PathSegment@300..306 - Ident@300..306 "Trait1" + TraitBound@300..306 + Path@300..306 + PathSegment@300..306 + Ident@300..306 "Trait1" WhiteSpace@306..307 " " Plus@307..308 "+" WhiteSpace@308..309 " " TypeBound@309..315 - Path@309..315 - PathSegment@309..315 - Ident@309..315 "Trait2" + TraitBound@309..315 + Path@309..315 + PathSegment@309..315 + Ident@309..315 "Trait2" Newline@315..316 "\n" WhiteSpace@316..320 " " WherePredicate@320..347 @@ -252,16 +258,18 @@ Root@0..560 Colon@329..330 ":" WhiteSpace@330..331 " " TypeBound@331..337 - Path@331..337 - PathSegment@331..337 - Ident@331..337 "Trait1" + TraitBound@331..337 + Path@331..337 + PathSegment@331..337 + Ident@331..337 "Trait1" WhiteSpace@337..338 " " Plus@338..339 "+" WhiteSpace@339..340 " " TypeBound@340..346 - Path@340..346 - PathSegment@340..346 - Ident@340..346 "Trait2" + TraitBound@340..346 + Path@340..346 + PathSegment@340..346 + Ident@340..346 "Trait2" Newline@346..347 "\n" WhiteSpace@347..351 " " WherePredicate@351..378 @@ -281,16 +289,18 @@ Root@0..560 Colon@360..361 ":" WhiteSpace@361..362 " " TypeBound@362..368 - Path@362..368 - PathSegment@362..368 - Ident@362..368 "Trait2" + TraitBound@362..368 + Path@362..368 + PathSegment@362..368 + Ident@362..368 "Trait2" WhiteSpace@368..369 " " Plus@369..370 "+" WhiteSpace@370..371 " " TypeBound@371..377 - Path@371..377 - PathSegment@371..377 - Ident@371..377 "Trait3" + TraitBound@371..377 + Path@371..377 + PathSegment@371..377 + Ident@371..377 "Trait3" Newline@377..378 "\n" RecordFieldDefList@378..411 LBrace@378..379 "{" @@ -346,15 +356,16 @@ Root@0..560 Colon@431..432 ":" WhiteSpace@432..433 " " TypeBound@433..446 - Path@433..446 - PathSegment@433..436 - Ident@433..436 "std" - Colon2@436..438 "::" - PathSegment@438..441 - Ident@438..441 "ops" - Colon2@441..443 "::" - PathSegment@443..446 - Ident@443..446 "Add" + TraitBound@433..446 + Path@433..446 + PathSegment@433..436 + Ident@433..436 "std" + Colon2@436..438 "::" + PathSegment@438..441 + Ident@438..441 "ops" + Colon2@441..443 "::" + PathSegment@443..446 + Ident@443..446 "Add" Comma@446..447 "," WhiteSpace@447..448 " " TypeGenericParam@448..449 @@ -396,31 +407,33 @@ Root@0..560 Colon@491..492 ":" WhiteSpace@492..493 " " TypeBound@493..498 - Path@493..498 - PathSegment@493..498 - Ident@493..498 "Trait" + TraitBound@493..498 + Path@493..498 + PathSegment@493..498 + Ident@493..498 "Trait" WhiteSpace@498..499 " " Plus@499..500 "+" WhiteSpace@500..501 " " TypeBound@501..514 - Path@501..506 - PathSegment@501..506 - Ident@501..506 "Trait" - GenericArgList@506..514 - Lt@506..507 "<" - TypeGenericArg@507..510 - PathType@507..510 - Path@507..510 - PathSegment@507..510 - Ident@507..510 "i32" - Comma@510..511 "," - WhiteSpace@511..512 " " - TypeGenericArg@512..513 - PathType@512..513 - Path@512..513 - PathSegment@512..513 - Ident@512..513 "Y" - Gt@513..514 ">" + TraitBound@501..514 + Path@501..506 + PathSegment@501..506 + Ident@501..506 "Trait" + GenericArgList@506..514 + Lt@506..507 "<" + TypeGenericArg@507..510 + PathType@507..510 + Path@507..510 + PathSegment@507..510 + Ident@507..510 "i32" + Comma@510..511 "," + WhiteSpace@511..512 " " + TypeGenericArg@512..513 + PathType@512..513 + Path@512..513 + PathSegment@512..513 + Ident@512..513 "Y" + Gt@513..514 ">" Newline@514..515 "\n" RecordFieldDefList@515..560 LBrace@515..516 "{" From 0bc0e6384ec534af1c97dc6f2ef5341e636d871c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 15 Sep 2023 17:55:03 +0200 Subject: [PATCH 308/678] Implement type kind bound hir lowering --- crates/hir/src/hir_def/params.rs | 17 ++++++- crates/hir/src/lower/params.rs | 39 +++++++++++++++- crates/hir/src/span/mod.rs | 4 +- crates/hir/src/span/params.rs | 21 +++++++++ crates/hir/src/visitor.rs | 79 ++++++++++++++++++++++++++++++-- crates/parser2/src/ast/param.rs | 55 +++++++++++++++------- 6 files changed, 188 insertions(+), 27 deletions(-) diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 3f6069d235..92d154f350 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -37,6 +37,7 @@ pub enum GenericParam { Type(TypeGenericParam), Const(ConstGenericParam), } + impl GenericParam { pub fn name(&self) -> Partial { match self { @@ -120,9 +121,23 @@ impl FuncParamName { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TypeBound { +pub enum TypeBound { + Trait(TraitBound), + Kind(Partial), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TraitBound { /// The path to the trait. pub path: Partial, /// The type arguments of the trait. pub generic_args: Option, } + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum KindBound { + /// `*` + Mono, + /// `* -> *` + Abs(Partial>, Partial>), +} diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index e99ace950a..8d46b40fd4 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,6 +1,6 @@ -use parser::ast::{self}; +use parser::ast::{self, KindBoundVariant}; -use crate::hir_def::{kw, params::*, Body, IdentId, PathId, TypeId}; +use crate::hir_def::{kw, params::*, Body, IdentId, Partial, PathId, TypeId}; use super::FileLowerCtxt; @@ -174,6 +174,16 @@ impl WherePredicate { impl TypeBound { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeBound) -> Self { + if let Some(trait_bound) = ast.trait_bound() { + Self::Trait(TraitBound::lower_ast(ctxt, trait_bound)) + } else { + Self::Kind(KindBound::lower_ast_opt(ctxt, ast.kind_bound())) + } + } +} + +impl TraitBound { + fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TraitBound) -> Self { let path = ast.path().map(|ast| PathId::lower_ast(ctxt, ast)).into(); let generic_args = ast .generic_args() @@ -182,6 +192,31 @@ impl TypeBound { } } +impl KindBound { + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Partial { + let Some(kind_variant) = ast.map(|ast| ast.variant()).flatten() else { + return Partial::Absent; + }; + + match kind_variant { + KindBoundVariant::Mono(_) => Partial::Present(KindBound::Mono), + KindBoundVariant::Abs(lhs, _, rhs) => { + let lhs = KindBound::lower_ast_opt(ctxt, lhs) + .to_opt() + .map(|kind| Box::new(kind)) + .into(); + + let rhs = KindBound::lower_ast_opt(ctxt, rhs) + .to_opt() + .map(|kind| Box::new(kind)) + .into(); + + Partial::Present(KindBound::Abs(lhs, rhs)) + } + } + } +} + impl FuncParamName { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::FuncParamName) -> Self { match ast { diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 5b8f7e5197..5b195539f1 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -49,8 +49,8 @@ pub mod lazy_spans { pub use super::params::{ LazyConstGenericParamSpan, LazyFuncParamListSpan, LazyFuncParamSpan, LazyGenericArgListSpan, LazyGenericArgSpan, LazyGenericParamListSpan, LazyGenericParamSpan, - LazyTypeBoundListSpan, LazyTypeBoundSpan, LazyTypeGenericArgSpan, LazyWhereClauseSpan, - LazyWherePredicateSpan, + LazyKindBoundSpan, LazyTraitBoundSpan, LazyTypeBoundListSpan, LazyTypeBoundSpan, + LazyTypeGenericArgSpan, LazyWhereClauseSpan, LazyWherePredicateSpan, }; pub use super::pat::{ diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index b7b43f0da0..82efa8d6cf 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -121,8 +121,29 @@ define_lazy_span_node! { define_lazy_span_node!( LazyTypeBoundSpan, ast::TypeBound, + @node { + (trait_bound, trait_bound, LazyTraitBoundSpan), + (kind_bound, kind_bound, LazyKindBoundSpan), + } +); + +define_lazy_span_node!( + LazyTraitBoundSpan, + ast::TraitBound, @node { (path, path, LazyPathSpan), (generic_args, generic_args, LazyGenericArgListSpan), } ); + +define_lazy_span_node!( + LazyKindBoundSpan, + ast::KindBound, + @token { + (arrow, arrow), + } + @node { + (lhs, lhs, LazyKindBoundSpan), + (rhs, rhs, LazyKindBoundSpan ), + } +); diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 9df61588d0..eed9a37340 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -7,11 +7,12 @@ use crate::{ Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, FieldDef, FieldDefListId, FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, - LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, TopLevelMod, - Trait, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, - UsePathSegment, VariantDef, VariantDefListId, VariantKind, WhereClauseId, WherePredicate, + KindBound, LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, + TopLevelMod, Trait, TraitBound, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, + UseAlias, UsePathId, UsePathSegment, VariantDef, VariantDefListId, VariantKind, + WhereClauseId, WherePredicate, }, - span::{lazy_spans::*, transition::ChainRoot, SpanDowncast}, + span::{lazy_spans::*, params::LazyTraitBoundSpan, transition::ChainRoot, SpanDowncast}, HirDb, }; @@ -168,6 +169,22 @@ pub trait Visitor { walk_type_bound(self, ctxt, bound); } + fn visit_trait_bound( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyTraitBoundSpan>, + bound: &TraitBound, + ) { + walk_trait_bound(self, ctxt, bound); + } + + fn visit_kind_bound( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyKindBoundSpan>, + bound: &KindBound, + ) { + walk_kind_bound(self, ctxt, bound); + } + fn visit_where_clause( &mut self, ctxt: &mut VisitorCtxt<'_, LazyWhereClauseSpan>, @@ -1646,7 +1663,7 @@ where TypeKind::Tuple(t) => ctxt.with_new_ctxt( |span| span.into_tuple_type(), - |ctxt| walk_tuple_type(visitor, ctxt, t), + |ctxt| walk_tuple_type(visitor, ctxt, *t), ), TypeKind::Array(elem, body) => ctxt.with_new_ctxt( @@ -1723,6 +1740,28 @@ pub fn walk_type_bound( bound: &TypeBound, ) where V: Visitor + ?Sized, +{ + match bound { + TypeBound::Trait(trait_bound) => ctxt.with_new_ctxt( + |span| span.trait_bound_moved(), + |ctxt| visitor.visit_trait_bound(ctxt, trait_bound), + ), + TypeBound::Kind(Partial::Present(kind_bound)) => ctxt.with_new_ctxt( + |span| span.kind_bound_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, kind_bound); + }, + ), + _ => {} + } +} + +pub fn walk_trait_bound( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyTraitBoundSpan>, + bound: &TraitBound, +) where + V: Visitor + ?Sized, { if let Some(path) = bound.path.to_opt() { ctxt.with_new_ctxt( @@ -1743,6 +1782,36 @@ pub fn walk_type_bound( } } +pub fn walk_kind_bound( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'_, LazyKindBoundSpan>, + bound: &KindBound, +) where + V: Visitor + ?Sized, +{ + let KindBound::Abs(lhs, rhs) = bound else { + return; + }; + + if let Partial::Present(lhs) = lhs { + ctxt.with_new_ctxt( + |span| span.lhs_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, lhs.as_ref()); + }, + ) + } + + if let Partial::Present(rhs) = rhs { + ctxt.with_new_ctxt( + |span| span.lhs_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, rhs.as_ref()); + }, + ) + } +} + pub fn walk_where_clause( visitor: &mut V, ctxt: &mut VisitorCtxt<'_, LazyWhereClauseSpan>, diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index b02ff3a0ca..e2e9274151 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -287,7 +287,23 @@ impl TypeBound { support::child(self.syntax()) } - pub fn kind_constraint(&self) -> Option { + pub fn kind_bound(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct TraitBound, + SK::TraitBound +} +impl TraitBound { + /// A path to the trait. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// A generic argument list for the trait. + pub fn generic_args(&self) -> Option { support::child(self.syntax()) } } @@ -311,6 +327,27 @@ impl KindBound { child.map(|child| child.variant()).flatten() } } + + pub fn arrow(&self) -> Option { + match self.variant()? { + KindBoundVariant::Abs(_, tok, _) => Some(tok), + KindBoundVariant::Mono(_) => None, + } + } + + pub fn lhs(&self) -> Option { + match self.variant()? { + KindBoundVariant::Abs(lhs, _, _) => lhs, + KindBoundVariant::Mono(_) => None, + } + } + + pub fn rhs(&self) -> Option { + match self.variant()? { + KindBoundVariant::Abs(_, _, rhs) => rhs, + KindBoundVariant::Mono(_) => None, + } + } } pub enum KindBoundVariant { @@ -320,22 +357,6 @@ pub enum KindBoundVariant { Abs(Option, SyntaxToken, Option), } -ast_node! { - pub struct TraitBound, - SK::TraitBound -} -impl TraitBound { - /// A path to the trait. - pub fn path(&self) -> Option { - support::child(self.syntax()) - } - - /// A generic argument list for the trait. - pub fn generic_args(&self) -> Option { - support::child(self.syntax()) - } -} - /// A trait for AST nodes that can have generic parameters. pub trait GenericParamsOwner: AstNode { /// Returns the generic parameter list of the node. From aece9be444bf73b50935887e0319b8b91036638e Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Fri, 15 Sep 2023 23:18:13 +0200 Subject: [PATCH 309/678] Implement type kind bound ty lowering --- crates/hir-analysis/src/lib.rs | 3 + crates/hir-analysis/src/ty/diagnostics.rs | 33 +- crates/hir-analysis/src/ty/lower.rs | 324 +++++++++++++++--- crates/hir-analysis/src/ty/mod.rs | 19 +- crates/hir-analysis/src/ty/visitor.rs | 2 +- crates/hir/src/hir_def/item.rs | 126 +++++-- crates/hir/src/hir_def/params.rs | 10 + crates/hir/src/visitor.rs | 13 +- .../syntax_node/items/enums.snap.new | 285 +++++++++++++++ crates/uitest/fixtures/ty/kind_bound.fe | 14 + crates/uitest/fixtures/ty/kind_bound.snap.new | 13 + 11 files changed, 769 insertions(+), 73 deletions(-) create mode 100644 crates/parser2/test_files/syntax_node/items/enums.snap.new create mode 100644 crates/uitest/fixtures/ty/kind_bound.fe create mode 100644 crates/uitest/fixtures/ty/kind_bound.snap.new diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 16acfde2e7..c62225867c 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -16,6 +16,8 @@ pub struct Jar( ty::lower::lower_hir_ty, ty::lower::lower_adt, ty::lower::lower_type_alias, + ty::lower::collect_generic_params, + ty::lower::GenericParamOwnerId, /// ADT analysis. ty::adt_analysis::check_recursive_adt, ty::adt_analysis::analyze_adt, @@ -24,6 +26,7 @@ pub struct Jar( ty::trait_::TraitInstId, ty::diagnostics::AdtDefDiagAccumulator, ty::diagnostics::TypeAliasDefDiagAccumulator, + ty::diagnostics::GenericParamDiagAccumulator, ); pub trait HirAnalysisDb: salsa::DbWithJar + HirDb { diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 440bb224e2..9c75c8f6be 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -14,6 +14,8 @@ use super::ty::Kind; pub struct AdtDefDiagAccumulator(pub(super) TyLowerDiag); #[salsa::accumulator] pub struct TypeAliasDefDiagAccumulator(pub(super) TyLowerDiag); +#[salsa::accumulator] +pub struct GenericParamDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { @@ -31,6 +33,10 @@ pub enum TyLowerDiag { }, TypeAliasCycle(DynLazySpan), + DuplicateKindBound(DynLazySpan, DynLazySpan), + + KindBoundNotAllowed(DynLazySpan), + AssocTy(DynLazySpan), } @@ -85,7 +91,9 @@ impl TyLowerDiag { Self::RecursiveType { .. } => 2, Self::TypeAliasArgumentMismatch { .. } => 3, Self::TypeAliasCycle(_) => 4, - Self::AssocTy(_) => 5, + Self::DuplicateKindBound(_, _) => 5, + Self::KindBoundNotAllowed(_) => 6, + Self::AssocTy(_) => 7, } } @@ -106,6 +114,9 @@ impl TyLowerDiag { ), Self::TypeAliasCycle(_) => "recursive type alias cycle is detected".to_string(), + Self::DuplicateKindBound(_, _) => "duplicate type bound is not allowed.".to_string(), + Self::KindBoundNotAllowed(_) => "kind bound is not allowed".to_string(), + Self::AssocTy(_) => "associated type is not supported ".to_string(), } } @@ -172,6 +183,26 @@ impl TyLowerDiag { span.resolve(db), )], + Self::DuplicateKindBound(primary, first_defined) => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "duplicate type bound here".to_string(), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "first defined here".to_string(), + first_defined.resolve(db), + ), + ] + } + Self::KindBoundNotAllowed(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "kind bound is not allowed here".to_string(), + span.resolve(db), + )], + Self::AssocTy(span) => vec![SubDiagnostic::new( LabelStyle::Primary, "associated type is not implemented".to_string(), diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index b3a538b4f5..ba6932748a 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -1,14 +1,18 @@ use either::Either; -use hir::hir_def::{ - scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, - GenericParam as HirGenericParam, GenericParamOwner, ItemKind, Partial, PathId, - TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, +use hir::{ + hir_def::{ + scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, + GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, + TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, + WherePredicate, + }, + visitor::prelude::*, }; use crate::{ name_resolution::{resolve_path_early, EarlyResolvedPath, NameDomain, NameResKind}, ty::{ - diagnostics::{TyLowerDiag, TypeAliasDefDiagAccumulator}, + diagnostics::{GenericParamDiagAccumulator, TyLowerDiag, TypeAliasDefDiagAccumulator}, visitor::TyDiagCollector, }, HirAnalysisDb, @@ -28,15 +32,28 @@ pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { AdtTyBuilder::new(db, adt).build() } +#[salsa::tracked] +pub(crate) fn collect_generic_params( + db: &dyn HirAnalysisDb, + owner: GenericParamOwnerId, +) -> GenericParamTypeSet { + let (set, diags) = GenericParamCollector::new(db, owner.data(db)).finalize(); + diags.into_iter().for_each(|diag| { + GenericParamDiagAccumulator::push(db, diag); + }); + + set +} + #[salsa::tracked(return_ref, recovery_fn = recover_lower_type_alias_cycle)] pub(crate) fn lower_type_alias(db: &dyn HirAnalysisDb, alias: HirTypeAlias) -> TyAlias { - let params = lower_generic_param_list(db, alias.into()); + let params = collect_generic_params(db, GenericParamOwnerId::new(db, alias.into())); let Some(hir_ty) = alias.ty(db.as_hir_db()).to_opt() else { return TyAlias { alias, alias_to: TyId::invalid(db, InvalidCause::Other), - params, + params: params.params, }; }; @@ -63,7 +80,7 @@ pub(crate) fn lower_type_alias(db: &dyn HirAnalysisDb, alias: HirTypeAlias) -> T TyAlias { alias, alias_to, - params, + params: params.params, } } @@ -76,11 +93,11 @@ fn recover_lower_type_alias_cycle( TypeAliasDefDiagAccumulator::push(db, diag); let alias_to = TyId::invalid(db, InvalidCause::Other); - let params = lower_generic_param_list(db, alias.into()); + let params = collect_generic_params(db, GenericParamOwnerId::new(db, alias.into())); TyAlias { alias, alias_to, - params, + params: params.params, } } @@ -190,9 +207,12 @@ impl<'db> TyBuilder<'db> { return self.lower_resolved_path(res).unwrap_left() } - ItemKind::Trait(_) => { - let self_param = TyParam::self_ty_param(Kind::Star); - return TyId::new(self.db, TyData::TyParam(self_param)); + ItemKind::Trait(trait_) => { + let params = collect_generic_params( + self.db, + GenericParamOwnerId::new(self.db, trait_.into()), + ); + return params.trait_self.unwrap(); } ItemKind::Impl(impl_) => (impl_.ty(self.db.as_hir_db()), impl_.scope()), @@ -254,11 +274,11 @@ impl<'db> TyBuilder<'db> { let item = match scope { ScopeId::Item(item) => item, ScopeId::GenericParam(item, idx) => { - let params = GenericParamOwner::from_item_opt(item) - .unwrap() - .params(self.db.as_hir_db()); - let ty = lower_generic_param(self.db, ¶ms.data(self.db.as_hir_db())[idx], idx); - return Either::Left(ty); + let owner = GenericParamOwner::from_item_opt(item).unwrap(); + let owner_id = GenericParamOwnerId::new(self.db, owner); + + let params = collect_generic_params(self.db, owner_id); + return Either::Left(params.params[idx]); } _ => unreachable!(), }; @@ -339,7 +359,14 @@ impl<'db> AdtTyBuilder<'db> { } fn collect_generic_params(&mut self) { - self.params = lower_generic_param_list(self.db, self.adt.as_item(self.db)); + let owner = match self.adt.data(self.db) { + AdtRef::Contract(_) => return, + AdtRef::Enum(enum_) => enum_.into(), + AdtRef::Struct(struct_) => struct_.into(), + }; + let owner_id = GenericParamOwnerId::new(self.db, owner); + + self.params = collect_generic_params(self.db, owner_id).params.clone(); } fn collect_variants(&mut self) { @@ -388,37 +415,248 @@ impl<'db> AdtTyBuilder<'db> { } } -fn lower_generic_param_list(db: &dyn HirAnalysisDb, item: ItemKind) -> Vec { - let Some(params_owner) = GenericParamOwner::from_item_opt(item) else { - return Vec::new(); - }; +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct GenericParamTypeSet { + pub(crate) params: Vec, + pub(crate) trait_self: Option, +} + +struct GenericParamCollector<'db> { + db: &'db dyn HirAnalysisDb, + parent: GenericParamOwner, + params: Vec, + /// The self type of the trait. + trait_self: TyParamPrecursor, + current_idx: ParamLoc, + diags: Vec, +} + +impl<'db> GenericParamCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, parent: GenericParamOwner) -> Self { + let trait_self = TyParamPrecursor { + name: Partial::Absent, + idx: None, + kind: Kind::Star, + kind_span: None, + }; + + Self { + db, + parent, + params: Vec::new(), + trait_self: trait_self, + current_idx: ParamLoc::Idx(0), + diags: Vec::new(), + } + } + + fn finalize(mut self) -> (GenericParamTypeSet, Vec) { + let param_list = self.parent.params(self.db.as_hir_db()); + let param_list_span = self.parent.params_span(); + self.visit_generic_param_list( + &mut VisitorCtxt::new(self.db.as_hir_db(), self.parent.scope(), param_list_span), + param_list, + ); + + if let Some(where_clause_owner) = self.parent.where_clause_owner() { + let where_clause = where_clause_owner.where_clause(self.db.as_hir_db()); + let where_clause_span = where_clause_owner.where_clause_span(); + self.visit_where_clause( + &mut VisitorCtxt::new(self.db.as_hir_db(), self.parent.scope(), where_clause_span), + where_clause, + ); + }; + + let params = self + .params + .into_iter() + .map(|param| param.into_ty(self.db)) + .collect(); + let trait_self = matches!(self.parent, GenericParamOwner::Trait(_)) + .then(|| self.trait_self.into_ty(self.db)); + let params_set = GenericParamTypeSet { params, trait_self }; + + (params_set, self.diags) + } + + fn param_idx_from_ty(&self, ty: Option) -> ParamLoc { + let Some(ty) = ty else { + return ParamLoc::NotParam; + }; + + let path = match ty.data(self.db.as_hir_db()) { + HirTyKind::Path(Partial::Present(path), args) => { + if args.is_empty(self.db.as_hir_db()) { + *path + } else { + return ParamLoc::NotParam; + } + } + + HirTyKind::SelfType(args) => { + return if matches!(self.parent.into(), ItemKind::Trait(_)) + && args.is_empty(self.db.as_hir_db()) + { + ParamLoc::TraitSelf + } else { + ParamLoc::NotParam + }; + } + + _ => return ParamLoc::NotParam, + }; + + match resolve_path_early(self.db, path, self.parent.scope()) { + EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { + Ok(res) => match res.kind { + NameResKind::Scope(ScopeId::GenericParam(item, idx)) => { + debug_assert_eq!(item, ItemKind::from(self.parent)); + ParamLoc::Idx(idx) + } + _ => ParamLoc::NotParam, + }, + Err(_) => ParamLoc::NotParam, + }, - params_owner - .params(db.as_hir_db()) - .data(db.as_hir_db()) - .iter() - .enumerate() - .map(|(idx, param)| lower_generic_param(db, param, idx)) - .collect() + EarlyResolvedPath::Partial { .. } => ParamLoc::NotParam, + } + } } -fn lower_generic_param(db: &dyn HirAnalysisDb, param: &HirGenericParam, idx: usize) -> TyId { - match param { - // TODO: we need to handle kinds of generic params. - GenericParam::Type(param) => { - if let Some(name) = param.name.to_opt() { - let ty_param = TyParam { +impl<'db> Visitor for GenericParamCollector<'db> { + fn visit_generic_param( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericParamSpan>, + param: &GenericParam, + ) { + match param { + GenericParam::Type(param) => { + let name = param.name; + let idx = self.current_idx.unwrap_idx(); + + let kind = Kind::Star; + self.params.push(TyParamPrecursor { name, idx: Some(idx), - kind: Kind::Star, - }; - TyId::new(db, TyData::TyParam(ty_param)) - } else { - TyId::invalid(db, InvalidCause::Other) + kind, + kind_span: None, + }); + } + + GenericParam::Const(_) => { + todo!() } } - GenericParam::Const(_) => { - todo!() + + walk_generic_param(self, ctxt, param); + self.current_idx = ParamLoc::Idx(self.current_idx.unwrap_idx() + 1); + } + + fn visit_where_predicate( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyWherePredicateSpan>, + pred: &WherePredicate, + ) { + self.current_idx = self.param_idx_from_ty(pred.ty.to_opt()); + walk_where_predicate(self, ctxt, pred) + } + + fn visit_ty(&mut self, _: &mut VisitorCtxt<'_, LazyTySpan>, _: HirTyId) { + // Remove `walk_ty` because 1. we don't need to visit the type and 2. we + // want to avoid unnecessary overhead of visiting type. + } + + fn visit_kind_bound( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyKindBoundSpan>, + bound: &HirKindBound, + ) { + let kind = lower_kind(bound); + let param = match self.current_idx { + ParamLoc::Idx(idx) => &mut self.params[idx], + ParamLoc::TraitSelf => &mut self.trait_self, + ParamLoc::NotParam => { + self.diags.push(TyLowerDiag::KindBoundNotAllowed( + ctxt.span().unwrap().into(), + )); + return; + } + }; + + if let Some(first_defined_span) = ¶m.kind_span { + if param.kind != kind { + self.diags.push(TyLowerDiag::DuplicateKindBound( + ctxt.span().unwrap().into(), + first_defined_span.clone().into(), + )); + } + } else { + param.kind = kind; + param.kind_span = Some(ctxt.span().unwrap().into()); } } } + +struct TyParamPrecursor { + name: Partial, + idx: Option, + kind: Kind, + kind_span: Option, +} + +impl TyParamPrecursor { + pub fn into_ty(self, db: &dyn HirAnalysisDb) -> TyId { + let Partial::Present(name) = self.name else { + return TyId::invalid(db, InvalidCause::Other); + }; + + let param = TyParam { + name, + idx: self.idx, + kind: self.kind, + }; + + TyId::new(db, TyData::TyParam(param)) + } +} + +fn lower_kind(kind: &HirKindBound) -> Kind { + match kind { + HirKindBound::Mono => Kind::Star, + HirKindBound::Abs(lhs, rhs) => match (lhs, rhs) { + (Partial::Present(lhs), Partial::Present(rhs)) => { + Kind::Abs(Box::new(lower_kind(lhs)), Box::new(lower_kind(rhs))) + } + (Partial::Present(lhs), Partial::Absent) => { + Kind::Abs(Box::new(lower_kind(lhs)), Box::new(Kind::Any)) + } + (Partial::Absent, Partial::Present(rhs)) => { + Kind::Abs(Box::new(Kind::Any), Box::new(lower_kind(rhs))) + } + (Partial::Absent, Partial::Absent) => { + Kind::Abs(Box::new(Kind::Any), Box::new(Kind::Any)) + } + }, + } +} + +#[derive(Debug, Clone, Copy)] +enum ParamLoc { + Idx(usize), + TraitSelf, + NotParam, +} + +impl ParamLoc { + fn unwrap_idx(&self) -> usize { + match self { + ParamLoc::Idx(idx) => *idx, + _ => panic!(), + } + } +} + +#[salsa::tracked] +pub(crate) struct GenericParamOwnerId { + data: GenericParamOwner, +} diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 5f99ded813..8c82077512 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -3,7 +3,10 @@ use hir::analysis_pass::ModuleAnalysisPass; use rustc_hash::FxHashSet; use self::{ - diagnostics::{AdtDefDiagAccumulator, TyLowerDiag, TypeAliasDefDiagAccumulator}, + diagnostics::{ + AdtDefDiagAccumulator, GenericParamDiagAccumulator, TyLowerDiag, + TypeAliasDefDiagAccumulator, + }, ty::AdtRefId, }; @@ -50,9 +53,15 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { adts.map(|adt| { adt_analysis::analyze_adt::accumulated::(self.db, adt) .into_iter() - .map(|diag| Box::new(diag) as _) + .chain( + adt_analysis::analyze_adt::accumulated::( + self.db, adt, + ) + .into_iter(), + ) }) .flatten() + .map(|diag| Box::new(diag) as _) .collect() } } @@ -77,6 +86,12 @@ impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { .map(|&alias| { lower::lower_type_alias::accumulated::(self.db, alias) .into_iter() + .chain( + lower::lower_type_alias::accumulated::( + self.db, alias, + ) + .into_iter(), + ) }) .flatten() .collect(); diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index db24a541de..b9587eece0 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -87,7 +87,7 @@ impl<'db> TyDiagCollector<'db> { } pub(super) fn collect(mut self, hir_ty: HirTyId, span: LazyTySpan) -> Vec { - let mut ctxt = VisitorCtxt::new(self.db.as_hir_db(), span); + let mut ctxt = VisitorCtxt::new(self.db.as_hir_db(), self.scope, span); self.visit_ty(&mut ctxt, hir_ty); self.diags } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index ce75363dff..87fe044452 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -15,7 +15,7 @@ use crate::{ LazyImplTraitSpan, LazyItemSpan, LazyModSpan, LazyStructSpan, LazyTopModSpan, LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, }, - params::LazyGenericParamListSpan, + params::{LazyGenericParamListSpan, LazyWhereClauseSpan}, DynLazySpan, HirOrigin, }, HirDb, @@ -168,6 +168,33 @@ impl ItemKind { } } +impl From for ItemKind { + fn from(owner: GenericParamOwner) -> Self { + match owner { + GenericParamOwner::Func(func) => ItemKind::Func(func), + GenericParamOwner::Struct(struct_) => ItemKind::Struct(struct_), + GenericParamOwner::Enum(enum_) => ItemKind::Enum(enum_), + GenericParamOwner::TypeAlias(type_alias) => ItemKind::TypeAlias(type_alias), + GenericParamOwner::Impl(impl_) => ItemKind::Impl(impl_), + GenericParamOwner::Trait(trait_) => ItemKind::Trait(trait_), + GenericParamOwner::ImplTrait(impl_trait) => ItemKind::ImplTrait(impl_trait), + } + } +} + +impl From for ItemKind { + fn from(owner: WhereClauseOwner) -> Self { + match owner { + WhereClauseOwner::Func(func) => ItemKind::Func(func), + WhereClauseOwner::Struct(struct_) => ItemKind::Struct(struct_), + WhereClauseOwner::Enum(enum_) => ItemKind::Enum(enum_), + WhereClauseOwner::Impl(impl_) => ItemKind::Impl(impl_), + WhereClauseOwner::Trait(trait_) => ItemKind::Trait(trait_), + WhereClauseOwner::ImplTrait(impl_trait) => ItemKind::ImplTrait(impl_trait), + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] pub enum GenericParamOwner { Func(Func), @@ -180,19 +207,11 @@ pub enum GenericParamOwner { } impl GenericParamOwner { - pub fn top_mod(&self, db: &dyn HirDb) -> TopLevelMod { - match self { - GenericParamOwner::Func(func) => func.top_mod(db), - GenericParamOwner::Struct(struct_) => struct_.top_mod(db), - GenericParamOwner::Enum(enum_) => enum_.top_mod(db), - GenericParamOwner::TypeAlias(type_alias) => type_alias.top_mod(db), - GenericParamOwner::Impl(impl_) => impl_.top_mod(db), - GenericParamOwner::Trait(trait_) => trait_.top_mod(db), - GenericParamOwner::ImplTrait(impl_trait) => impl_trait.top_mod(db), - } + pub fn top_mod(self, db: &dyn HirDb) -> TopLevelMod { + ItemKind::from(self).top_mod(db) } - pub fn params(&self, db: &dyn HirDb) -> GenericParamListId { + pub fn params(self, db: &dyn HirDb) -> GenericParamListId { match self { GenericParamOwner::Func(func) => func.generic_params(db), GenericParamOwner::Struct(struct_) => struct_.generic_params(db), @@ -204,18 +223,26 @@ impl GenericParamOwner { } } - pub fn params_span(&self) -> LazyGenericParamListSpan { + pub fn params_span(self) -> LazyGenericParamListSpan { match self { - GenericParamOwner::Func(func) => func.lazy_span().generic_params(), - GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params(), - GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params(), - GenericParamOwner::TypeAlias(type_alias) => type_alias.lazy_span().generic_params(), - GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params(), - GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params(), - GenericParamOwner::ImplTrait(impl_trait) => impl_trait.lazy_span().generic_params(), + GenericParamOwner::Func(func) => func.lazy_span().generic_params_moved(), + GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params_moved(), + GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params_moved(), + GenericParamOwner::TypeAlias(type_alias) => { + type_alias.lazy_span().generic_params_moved() + } + GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params_moved(), + GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params_moved(), + GenericParamOwner::ImplTrait(impl_trait) => { + impl_trait.lazy_span().generic_params_moved() + } } } + pub fn scope(self) -> ScopeId { + ItemKind::from(self).scope() + } + pub fn from_item_opt(item: ItemKind) -> Option { match item { ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), @@ -228,6 +255,65 @@ impl GenericParamOwner { _ => None, } } + + pub fn where_clause_owner(self) -> Option { + let item = ItemKind::from(self); + WhereClauseOwner::from_item_opt(item) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] +pub enum WhereClauseOwner { + Func(Func), + Struct(Struct), + Enum(Enum), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), +} + +impl WhereClauseOwner { + pub fn top_mod(self, db: &dyn HirDb) -> TopLevelMod { + ItemKind::from(self).top_mod(db) + } + + pub fn where_clause(self, db: &dyn HirDb) -> WhereClauseId { + match self { + Self::Func(func) => func.where_clause(db), + Self::Struct(struct_) => struct_.where_clause(db), + Self::Enum(enum_) => enum_.where_clause(db), + Self::Impl(impl_) => impl_.where_clause(db), + Self::Trait(trait_) => trait_.where_clause(db), + Self::ImplTrait(impl_trait) => impl_trait.where_clause(db), + } + } + + pub fn where_clause_span(self) -> LazyWhereClauseSpan { + match self { + Self::Func(func) => func.lazy_span().where_clause_moved(), + Self::Struct(struct_) => struct_.lazy_span().where_clause_moved(), + Self::Enum(enum_) => enum_.lazy_span().where_clause_moved(), + Self::Impl(impl_) => impl_.lazy_span().where_clause_moved(), + Self::Trait(trait_) => trait_.lazy_span().where_clause_moved(), + Self::ImplTrait(impl_trait) => impl_trait.lazy_span().where_clause_moved(), + } + } + + pub fn scope(self) -> ScopeId { + ItemKind::from(self).scope() + } + + pub fn from_item_opt(item: ItemKind) -> Option { + match item { + ItemKind::Func(func) => Some(Self::Func(func)), + ItemKind::Struct(struct_) => Some(Self::Struct(struct_)), + ItemKind::Enum(enum_) => Some(Self::Enum(enum_)), + ItemKind::Impl(impl_) => Some(Self::Impl(impl_)), + ItemKind::Trait(trait_) => Some(Self::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(Self::ImplTrait(impl_trait)), + _ => None, + } + } } #[salsa::tracked] diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 92d154f350..8452a26fb0 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -8,6 +8,16 @@ pub struct GenericArgListId { pub data: Vec, } +impl GenericArgListId { + pub fn len(self, db: &dyn HirDb) -> usize { + self.data(db).len() + } + + pub fn is_empty(self, db: &dyn HirDb) -> bool { + self.data(db).is_empty() + } +} + #[salsa::interned] pub struct GenericParamListId { #[return_ref] diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index eed9a37340..4a14ccfc1c 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -22,10 +22,11 @@ pub mod prelude { walk_call_arg_list, walk_const, walk_contract, walk_enum, walk_expr, walk_field, walk_field_def, walk_field_def_list, walk_field_list, walk_func, walk_func_param, walk_func_param_list, walk_generic_arg, walk_generic_arg_list, walk_generic_param, - walk_generic_param_list, walk_impl, walk_impl_trait, walk_item, walk_mod, walk_pat, - walk_path, walk_stmt, walk_struct, walk_top_mod, walk_trait, walk_ty, walk_type_alias, - walk_type_bound, walk_type_bound_list, walk_use, walk_use_path, walk_variant_def, - walk_variant_def_list, walk_where_clause, walk_where_predicate, Visitor, VisitorCtxt, + walk_generic_param_list, walk_impl, walk_impl_trait, walk_item, walk_kind_bound, walk_mod, + walk_pat, walk_path, walk_stmt, walk_struct, walk_top_mod, walk_trait, walk_trait_bound, + walk_ty, walk_type_alias, walk_type_bound, walk_type_bound_list, walk_use, walk_use_path, + walk_variant_def, walk_variant_def_list, walk_where_clause, walk_where_predicate, Visitor, + VisitorCtxt, }; pub use crate::span::lazy_spans::*; @@ -1874,14 +1875,14 @@ impl<'db, T> VisitorCtxt<'db, T> where T: LazySpan, { - pub fn new(db: &'db dyn HirDb, span: T) -> Self + pub fn new(db: &'db dyn HirDb, scope: ScopeId, span: T) -> Self where T: Into, { Self { db, span: span.into(), - scope_stack: Vec::new(), + scope_stack: vec![scope], _t: PhantomData, } } diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap.new b/crates/parser2/test_files/syntax_node/items/enums.snap.new new file mode 100644 index 0000000000..9d8973fa2b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/enums.snap.new @@ -0,0 +1,285 @@ +--- +source: crates/parser2/tests/syntax_node.rs +assertion_line: 15 +expression: node +input_file: crates/parser2/test_files/syntax_node/items/enums.fe +--- +Root@0..294 + ItemList@0..294 + Item@0..15 + Enum@0..13 + EnumKw@0..4 "enum" + WhiteSpace@4..5 " " + Ident@5..10 "Empty" + WhiteSpace@10..11 " " + VariantDefList@11..13 + LBrace@11..12 "{" + RBrace@12..13 "}" + Newline@13..15 "\n\n" + Item@15..58 + Enum@15..56 + EnumKw@15..19 "enum" + WhiteSpace@19..20 " " + Ident@20..25 "Basic" + WhiteSpace@25..26 " " + VariantDefList@26..56 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + VariantDef@32..36 + Ident@32..36 "Unit" + Newline@36..37 "\n" + WhiteSpace@37..41 " " + VariantDef@41..54 + Ident@41..44 "Tup" + TupleType@44..54 + LParen@44..45 "(" + PathType@45..48 + Path@45..48 + PathSegment@45..48 + Ident@45..48 "i32" + Comma@48..49 "," + WhiteSpace@49..50 " " + PathType@50..53 + Path@50..53 + PathSegment@50..53 + Ident@50..53 "u32" + RParen@53..54 ")" + Newline@54..55 "\n" + RBrace@55..56 "}" + Newline@56..58 "\n\n" + Item@58..119 + Enum@58..117 + EnumKw@58..62 "enum" + WhiteSpace@62..63 " " + Ident@63..69 "Option" + GenericParamList@69..72 + Lt@69..70 "<" + TypeGenericParam@70..71 + Ident@70..71 "T" + Gt@71..72 ">" + WhiteSpace@72..73 " " + Newline@73..74 "\n" + WhiteSpace@74..78 " " + WhereClause@78..93 + WhereKw@78..83 "where" + WhiteSpace@83..84 " " + WherePredicate@84..93 + PathType@84..85 + Path@84..85 + PathSegment@84..85 + Ident@84..85 "T" + TypeBoundList@85..92 + Colon@85..86 ":" + WhiteSpace@86..87 " " + TypeBound@87..92 + TraitBound@87..92 + Path@87..92 + PathSegment@87..92 + Ident@87..92 "Clone" + Newline@92..93 "\n" + VariantDefList@93..117 + LBrace@93..94 "{" + Newline@94..95 "\n" + WhiteSpace@95..99 " " + VariantDef@99..106 + Ident@99..103 "Some" + TupleType@103..106 + LParen@103..104 "(" + PathType@104..105 + Path@104..105 + PathSegment@104..105 + Ident@104..105 "T" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + VariantDef@111..115 + Ident@111..115 "None" + Newline@115..116 "\n" + RBrace@116..117 "}" + Newline@117..119 "\n\n" + Item@119..222 + Enum@119..220 + EnumKw@119..123 "enum" + WhiteSpace@123..124 " " + Ident@124..133 "BoundEnum" + GenericParamList@133..162 + Lt@133..134 "<" + TypeGenericParam@134..146 + Ident@134..135 "T" + TypeBoundList@135..146 + Colon@135..136 ":" + WhiteSpace@136..137 " " + TypeBound@137..140 + TraitBound@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "Add" + WhiteSpace@140..141 " " + Plus@141..142 "+" + WhiteSpace@142..143 " " + TypeBound@143..146 + TraitBound@143..146 + Path@143..146 + PathSegment@143..146 + Ident@143..146 "Mul" + WhiteSpace@146..147 " " + Comma@147..148 "," + WhiteSpace@148..149 " " + TypeGenericParam@149..161 + Ident@149..150 "U" + TypeBoundList@150..161 + Colon@150..151 ":" + WhiteSpace@151..152 " " + TypeBound@152..155 + TraitBound@152..155 + Path@152..155 + PathSegment@152..155 + Ident@152..155 "Sub" + WhiteSpace@155..156 " " + Plus@156..157 "+" + WhiteSpace@157..158 " " + TypeBound@158..161 + TraitBound@158..161 + Path@158..161 + PathSegment@158..161 + Ident@158..161 "Div" + Gt@161..162 ">" + WhiteSpace@162..163 " " + Newline@163..164 "\n" + WhereClause@164..189 + WhereKw@164..169 "where" + WhiteSpace@169..170 " " + WherePredicate@170..189 + PathType@170..181 + Path@170..178 + PathSegment@170..173 + Ident@170..173 "Foo" + Colon2@173..175 "::" + PathSegment@175..178 + Ident@175..178 "Bar" + GenericArgList@178..181 + Lt@178..179 "<" + TypeGenericArg@179..180 + PathType@179..180 + Path@179..180 + PathSegment@179..180 + Ident@179..180 "T" + Gt@180..181 ">" + TypeBoundList@181..188 + Colon@181..182 ":" + WhiteSpace@182..183 " " + TypeBound@183..188 + TraitBound@183..188 + Path@183..188 + PathSegment@183..188 + Ident@183..188 "Trait" + Newline@188..189 "\n" + VariantDefList@189..220 + LBrace@189..190 "{" + Newline@190..191 "\n" + WhiteSpace@191..195 " " + VariantDef@195..204 + Ident@195..201 "AddMul" + TupleType@201..204 + LParen@201..202 "(" + PathType@202..203 + Path@202..203 + PathSegment@202..203 + Ident@202..203 "T" + RParen@203..204 ")" + Newline@204..205 "\n" + WhiteSpace@205..209 " " + VariantDef@209..218 + Ident@209..215 "SubDiv" + TupleType@215..218 + LParen@215..216 "(" + PathType@216..217 + Path@216..217 + PathSegment@216..217 + Ident@216..217 "U" + RParen@217..218 ")" + Newline@218..219 "\n" + RBrace@219..220 "}" + Newline@220..222 "\n\n" + Item@222..294 + Enum@222..293 + EnumKw@222..226 "enum" + WhiteSpace@226..227 " " + Ident@227..234 "HKTEnum" + GenericParamList@234..248 + Lt@234..235 "<" + TypeGenericParam@235..244 + Ident@235..236 "T" + TypeBoundList@236..244 + Colon@236..237 ":" + WhiteSpace@237..238 " " + TypeBound@238..244 + KindBound@238..244 + KindBound@238..244 + Star@238..239 "*" + WhiteSpace@239..240 " " + Arrow@240..242 "->" + WhiteSpace@242..243 " " + Star@243..244 "*" + Comma@244..245 "," + WhiteSpace@245..246 " " + TypeGenericParam@246..247 + Ident@246..247 "U" + Gt@247..248 ">" + Newline@248..249 "\n" + WhereClause@249..276 + WhereKw@249..254 "where" + WhiteSpace@254..255 " " + Newline@255..256 "\n" + WhiteSpace@256..259 " " + WherePredicate@259..276 + PathType@259..260 + Path@259..260 + PathSegment@259..260 + Ident@259..260 "U" + TypeBoundList@260..275 + Colon@260..261 ":" + WhiteSpace@261..262 " " + TypeBound@262..275 + KindBound@262..275 + KindBound@262..275 + LParen@262..263 "(" + KindBound@263..269 + KindBound@263..269 + Star@263..264 "*" + WhiteSpace@264..265 " " + Arrow@265..267 "->" + WhiteSpace@267..268 " " + Star@268..269 "*" + RParen@269..270 ")" + WhiteSpace@270..271 " " + Arrow@271..273 "->" + WhiteSpace@273..274 " " + Star@274..275 "*" + Newline@275..276 "\n" + VariantDefList@276..293 + LBrace@276..277 "{" + Newline@277..278 "\n" + WhiteSpace@278..282 " " + VariantDef@282..291 + Ident@282..285 "Foo" + TupleType@285..291 + LParen@285..286 "(" + PathType@286..290 + Path@286..287 + PathSegment@286..287 + Ident@286..287 "U" + GenericArgList@287..290 + Lt@287..288 "<" + TypeGenericArg@288..289 + PathType@288..289 + Path@288..289 + PathSegment@288..289 + Ident@288..289 "T" + Gt@289..290 ">" + RParen@290..291 ")" + Newline@291..292 "\n" + RBrace@292..293 "}" + Newline@293..294 "\n" + diff --git a/crates/uitest/fixtures/ty/kind_bound.fe b/crates/uitest/fixtures/ty/kind_bound.fe new file mode 100644 index 0000000000..d8562d9e13 --- /dev/null +++ b/crates/uitest/fixtures/ty/kind_bound.fe @@ -0,0 +1,14 @@ +pub struct PtrWrapper +where T: * -> * +{ + ptr: T +} + +pub struct Foo { + x: PtrWrapper + y: PtrWrapper +} + +pub struct Arc { + inner: T +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/kind_bound.snap.new b/crates/uitest/fixtures/ty/kind_bound.snap.new new file mode 100644 index 0000000000..8f365c30bc --- /dev/null +++ b/crates/uitest/fixtures/ty/kind_bound.snap.new @@ -0,0 +1,13 @@ +--- +source: crates/uitest/tests/ty.rs +assertion_line: 17 +expression: diags +input_file: crates/uitest/fixtures/ty/kind_bound.fe +--- +error[3-0001]: kind mismatch between two types + ┌─ kind_bound.fe:9:19 + │ +9 │ y: PtrWrapper + │ ^^^ expected `(* -> Any)` kind, but found `*` kind + + From 376e9892dff0bc9c23f1132387b291e13c506121 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 14:37:57 +0200 Subject: [PATCH 310/678] Fix kind bound parsing --- crates/hir/src/lower/params.rs | 31 +- crates/hir/src/span/params.rs | 15 +- crates/hir/src/visitor.rs | 4 +- crates/parser2/src/ast/param.rs | 56 ++-- crates/parser2/src/parser/param.rs | 50 ++- crates/parser2/src/syntax_kind.rs | 6 +- .../test_files/error_recovery/items/enum_.fe | 11 +- .../error_recovery/items/enum_.snap | 102 ++++++- .../test_files/syntax_node/items/enums.fe | 4 +- .../test_files/syntax_node/items/enums.snap | 202 +++++++++---- .../syntax_node/items/enums.snap.new | 285 ------------------ 11 files changed, 350 insertions(+), 416 deletions(-) delete mode 100644 crates/parser2/test_files/syntax_node/items/enums.snap.new diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index 8d46b40fd4..af6cb159b4 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,4 +1,4 @@ -use parser::ast::{self, KindBoundVariant}; +use parser::ast::{self}; use crate::hir_def::{kw, params::*, Body, IdentId, Partial, PathId, TypeId}; @@ -194,25 +194,26 @@ impl TraitBound { impl KindBound { fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Partial { - let Some(kind_variant) = ast.map(|ast| ast.variant()).flatten() else { + let Some(ast) = ast else { return Partial::Absent; }; - match kind_variant { - KindBoundVariant::Mono(_) => Partial::Present(KindBound::Mono), - KindBoundVariant::Abs(lhs, _, rhs) => { - let lhs = KindBound::lower_ast_opt(ctxt, lhs) - .to_opt() - .map(|kind| Box::new(kind)) - .into(); + if let Some(abs) = ast.abs() { + let lhs = KindBound::lower_ast_opt(ctxt, abs.lhs()) + .to_opt() + .map(|kind| Box::new(kind)) + .into(); - let rhs = KindBound::lower_ast_opt(ctxt, rhs) - .to_opt() - .map(|kind| Box::new(kind)) - .into(); + let rhs = KindBound::lower_ast_opt(ctxt, abs.rhs()) + .to_opt() + .map(|kind| Box::new(kind)) + .into(); - Partial::Present(KindBound::Abs(lhs, rhs)) - } + Partial::Present(KindBound::Abs(lhs, rhs)) + } else if let Some(_) = ast.mono() { + Partial::Present(KindBound::Mono) + } else { + Partial::Absent } } } diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 82efa8d6cf..680379810b 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -139,11 +139,22 @@ define_lazy_span_node!( define_lazy_span_node!( LazyKindBoundSpan, ast::KindBound, + @node { + (abs, abs, LazyKindBoundAbsSpan), + (mono, mono, LazyKindBoundMonoSpan), + } +); + +define_lazy_span_node!( + LazyKindBoundAbsSpan, + ast::KindBoundAbs, @token { (arrow, arrow), } @node { - (lhs, lhs, LazyKindBoundSpan), - (rhs, rhs, LazyKindBoundSpan ), + (lhs, lhs, LazyKindBoundSpan), + (rhs, rhs, LazyKindBoundSpan), } ); + +define_lazy_span_node! {LazyKindBoundMonoSpan, ast::LazyKindBoundMono} diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 4a14ccfc1c..129b36eee0 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -1796,7 +1796,7 @@ pub fn walk_kind_bound( if let Partial::Present(lhs) = lhs { ctxt.with_new_ctxt( - |span| span.lhs_moved(), + |span| span.abs_moved().lhs_moved(), |ctxt| { visitor.visit_kind_bound(ctxt, lhs.as_ref()); }, @@ -1805,7 +1805,7 @@ pub fn walk_kind_bound( if let Partial::Present(rhs) = rhs { ctxt.with_new_ctxt( - |span| span.lhs_moved(), + |span| span.abs_moved().rhs_moved(), |ctxt| { visitor.visit_kind_bound(ctxt, rhs.as_ref()); }, diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index e2e9274151..3bd1f17a24 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -310,51 +310,53 @@ impl TraitBound { ast_node! { pub struct KindBound, - SK::KindBound + SK::KindBoundAbs | SK::KindBoundMono } impl KindBound { - pub fn variant(&self) -> Option { - if let Some(tok) = support::token(self.syntax(), SK::Arrow) { - let mut children = support::children(self.syntax()); - let lhs = children.next(); - let rhs = children.next(); - Some(KindBoundVariant::Abs(lhs, tok, rhs)) - } else if let Some(tok) = support::token(self.syntax(), SK::Star) { - Some(KindBoundVariant::Mono(tok)) - } else { - // Case where kind is wrapped in parens, we need to unwrap the outer parens. - let child: Option = support::child(self.syntax()); - child.map(|child| child.variant()).flatten() + pub fn mono(&self) -> Option { + match self.syntax().kind() { + SK::KindBoundMono => Some(KindBoundMono::cast(self.syntax().clone()).unwrap()), + _ => None, } } - pub fn arrow(&self) -> Option { - match self.variant()? { - KindBoundVariant::Abs(_, tok, _) => Some(tok), - KindBoundVariant::Mono(_) => None, + pub fn abs(&self) -> Option { + match self.syntax().kind() { + SK::KindBoundAbs => Some(KindBoundAbs::cast(self.syntax().clone()).unwrap()), + _ => None, } } +} + +ast_node! { + pub struct KindBoundMono, + SK::KindBoundMono, +} +ast_node! { + pub struct KindBoundAbs, + SK::KindBoundAbs, +} +impl KindBoundAbs { pub fn lhs(&self) -> Option { - match self.variant()? { - KindBoundVariant::Abs(lhs, _, _) => lhs, - KindBoundVariant::Mono(_) => None, - } + support::child(self.syntax()) } pub fn rhs(&self) -> Option { - match self.variant()? { - KindBoundVariant::Abs(_, _, rhs) => rhs, - KindBoundVariant::Mono(_) => None, - } + support::children(self.syntax()).nth(1) + } + + pub fn arrow(&self) -> Option { + support::token(self.syntax(), SK::Arrow) } } +#[derive(Debug, Clone)] pub enum KindBoundVariant { /// `*` - Mono(SyntaxToken), + Mono(KindBoundMono), /// `KindBound -> KindBound` - Abs(Option, SyntaxToken, Option), + Abs(KindBoundAbs), } /// A trait for AST nodes that can have generic parameters. diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 5ba11e19fd..8e800823b9 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -179,7 +179,7 @@ impl super::Parse for TypeBoundScope { ); if is_type_kind { - parser.parse(KindBoundScope::default(), None); + parse_kind_bound(parser); } else { if self.disallow_trait_bound { parser.error_and_recover("trait bounds are not allowed here", None); @@ -189,24 +189,46 @@ impl super::Parse for TypeBoundScope { } } } + +fn parse_kind_bound(parser: &mut Parser) { + let checkpoint = parser.checkpoint(); + let is_newline_trivia = parser.set_newline_as_trivia(false); + + if parser.bump_if(SyntaxKind::LParen) { + parse_kind_bound(parser); + parser.bump_or_recover(SyntaxKind::RParen, "expected closing `)`", None); + } else if parser.current_kind() == Some(SyntaxKind::Star) { + parser.parse(KindBoundMonoScope::default(), None); + } else { + parser.error_and_recover("expected `*` or `(`", None); + } + + if parser.current_kind() == Some(SyntaxKind::Arrow) { + parser.parse(KindBoundAbsScope::default(), checkpoint.into()); + } + parser.set_newline_as_trivia(is_newline_trivia); +} + define_scope! { - KindBoundScope, - KindBound, + KindBoundMonoScope, + KindBoundMono, Inheritance } -impl super::Parse for KindBoundScope { +impl super::Parse for KindBoundMonoScope { fn parse(&mut self, parser: &mut Parser) { - if parser.bump_if(SyntaxKind::Star) { - } else if parser.bump_if(SyntaxKind::LParen) { - parser.parse(KindBoundScope::default(), None); - parser.bump_or_recover(SyntaxKind::RParen, "expected closing `)`", None); - } else { - parser.error_and_recover("expected `*` or `(`", None); - } + parser.bump_expected(SyntaxKind::Star); + } +} - if parser.bump_if(SyntaxKind::Arrow) { - parser.parse(KindBoundScope::default(), None); - } +define_scope! { + KindBoundAbsScope, + KindBoundAbs, + Inheritance +} +impl super::Parse for KindBoundAbsScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Arrow); + parse_kind_bound(parser); } } diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index e93e077582..a98e4f6462 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -445,8 +445,10 @@ pub enum SyntaxKind { TypeBound, /// `Trait1` TraitBound, - /// `*` or `(* -> *) -> *` - KindBound, + /// `* -> *` or `(*-> *) -> *` + KindBoundAbs, + /// `*`. + KindBoundMono, /// `where Option: Trait1 + Trait2` WhereClause, /// `Option: Trait1 + Trait2` diff --git a/crates/parser2/test_files/error_recovery/items/enum_.fe b/crates/parser2/test_files/error_recovery/items/enum_.fe index 753c314d28..3d2ff26e76 100644 --- a/crates/parser2/test_files/error_recovery/items/enum_.fe +++ b/crates/parser2/test_files/error_recovery/items/enum_.fe @@ -3,4 +3,13 @@ pub enum MyEnum { A Y(T, u32) A Z -} \ No newline at end of file +} + +pub enum MyEnum2 +where + T: * -> (* -> * + U: * -> * +{ + T(t) + U(U) +} diff --git a/crates/parser2/test_files/error_recovery/items/enum_.snap b/crates/parser2/test_files/error_recovery/items/enum_.snap index 8d85dfc88f..aed06a61cf 100644 --- a/crates/parser2/test_files/error_recovery/items/enum_.snap +++ b/crates/parser2/test_files/error_recovery/items/enum_.snap @@ -3,9 +3,9 @@ source: crates/parser2/tests/error_recovery.rs expression: node input_file: crates/parser2/test_files/error_recovery/items/enum_.fe --- -Root@0..63 - ItemList@0..63 - Item@0..63 +Root@0..151 + ItemList@0..151 + Item@0..65 Enum@0..63 ItemModifier@0..3 PubKw@0..3 "pub" @@ -67,4 +67,100 @@ Root@0..63 Ident@60..61 "Z" Newline@61..62 "\n" RBrace@62..63 "}" + Newline@63..65 "\n\n" + Item@65..151 + Enum@65..150 + ItemModifier@65..68 + PubKw@65..68 "pub" + WhiteSpace@68..69 " " + EnumKw@69..73 "enum" + WhiteSpace@73..74 " " + Ident@74..81 "MyEnum2" + GenericParamList@81..87 + Lt@81..82 "<" + TypeGenericParam@82..83 + Ident@82..83 "T" + Comma@83..84 "," + WhiteSpace@84..85 " " + TypeGenericParam@85..86 + Ident@85..86 "U" + Gt@86..87 ">" + WhiteSpace@87..88 " " + Newline@88..89 "\n" + WhereClause@89..129 + WhereKw@89..94 "where" + Newline@94..95 "\n" + WhiteSpace@95..99 " " + WherePredicate@99..115 + PathType@99..100 + Path@99..100 + PathSegment@99..100 + Ident@99..100 "T" + TypeBoundList@100..114 + Colon@100..101 ":" + WhiteSpace@101..102 " " + TypeBound@102..114 + KindBoundAbs@102..114 + KindBoundMono@102..103 + Star@102..103 "*" + WhiteSpace@103..104 " " + Arrow@104..106 "->" + WhiteSpace@106..107 " " + LParen@107..108 "(" + KindBoundAbs@108..114 + KindBoundMono@108..109 + Star@108..109 "*" + WhiteSpace@109..110 " " + Arrow@110..112 "->" + WhiteSpace@112..113 " " + KindBoundMono@113..114 + Star@113..114 "*" + Error@114..114 + Newline@114..115 "\n" + WhiteSpace@115..119 " " + WherePredicate@119..129 + PathType@119..120 + Path@119..120 + PathSegment@119..120 + Ident@119..120 "U" + TypeBoundList@120..128 + Colon@120..121 ":" + WhiteSpace@121..122 " " + TypeBound@122..128 + KindBoundAbs@122..128 + KindBoundMono@122..123 + Star@122..123 "*" + WhiteSpace@123..124 " " + Arrow@124..126 "->" + WhiteSpace@126..127 " " + KindBoundMono@127..128 + Star@127..128 "*" + Newline@128..129 "\n" + VariantDefList@129..150 + LBrace@129..130 "{" + Newline@130..131 "\n" + WhiteSpace@131..135 " " + VariantDef@135..139 + Ident@135..136 "T" + TupleType@136..139 + LParen@136..137 "(" + PathType@137..138 + Path@137..138 + PathSegment@137..138 + Ident@137..138 "t" + RParen@138..139 ")" + Newline@139..140 "\n" + WhiteSpace@140..144 " " + VariantDef@144..148 + Ident@144..145 "U" + TupleType@145..148 + LParen@145..146 "(" + PathType@146..147 + Path@146..147 + PathSegment@146..147 + Ident@146..147 "U" + RParen@147..148 ")" + Newline@148..149 "\n" + RBrace@149..150 "}" + Newline@150..151 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe index a644d756ec..144f1b9cee 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.fe +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -24,9 +24,11 @@ where Foo::Bar: Trait SubDiv(U) } -enum HKTEnum *, U> +enum HKTEnum *, U, V, W> where U: (* -> *) -> * + V: * -> * -> (* -> *) + W: * -> * -> * -> * { Foo(U) } diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index 6ddc2f3087..d04226ba96 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/enums.fe --- -Root@0..374 - ItemList@0..374 +Root@0..428 + ItemList@0..428 Item@0..15 Enum@0..13 EnumKw@0..4 "enum" @@ -256,12 +256,12 @@ Root@0..374 Newline@298..299 "\n" RBrace@299..300 "}" Newline@300..302 "\n\n" - Item@302..374 - Enum@302..373 + Item@302..428 + Enum@302..427 EnumKw@302..306 "enum" WhiteSpace@306..307 " " Ident@307..314 "HKTEnum" - GenericParamList@314..328 + GenericParamList@314..334 Lt@314..315 "<" TypeGenericParam@315..324 Ident@315..316 "T" @@ -269,71 +269,145 @@ Root@0..374 Colon@316..317 ":" WhiteSpace@317..318 " " TypeBound@318..324 - KindBound@318..324 - Star@318..319 "*" + KindBoundAbs@318..324 + KindBoundMono@318..319 + Star@318..319 "*" WhiteSpace@319..320 " " Arrow@320..322 "->" WhiteSpace@322..323 " " - KindBound@323..324 + KindBoundMono@323..324 Star@323..324 "*" Comma@324..325 "," WhiteSpace@325..326 " " TypeGenericParam@326..327 Ident@326..327 "U" - Gt@327..328 ">" - Newline@328..329 "\n" - WhereClause@329..356 - WhereKw@329..334 "where" - WhiteSpace@334..335 " " - Newline@335..336 "\n" - WhiteSpace@336..339 " " - WherePredicate@339..356 - PathType@339..340 - Path@339..340 - PathSegment@339..340 - Ident@339..340 "U" - TypeBoundList@340..355 - Colon@340..341 ":" - WhiteSpace@341..342 " " - TypeBound@342..355 - KindBound@342..355 - LParen@342..343 "(" - KindBound@343..349 - Star@343..344 "*" - WhiteSpace@344..345 " " - Arrow@345..347 "->" - WhiteSpace@347..348 " " - KindBound@348..349 - Star@348..349 "*" - RParen@349..350 ")" - WhiteSpace@350..351 " " - Arrow@351..353 "->" - WhiteSpace@353..354 " " - KindBound@354..355 - Star@354..355 "*" - Newline@355..356 "\n" - VariantDefList@356..373 - LBrace@356..357 "{" - Newline@357..358 "\n" - WhiteSpace@358..362 " " - VariantDef@362..371 - Ident@362..365 "Foo" - TupleType@365..371 - LParen@365..366 "(" - PathType@366..370 - Path@366..367 - PathSegment@366..367 - Ident@366..367 "U" - GenericArgList@367..370 - Lt@367..368 "<" - TypeGenericArg@368..369 - PathType@368..369 - Path@368..369 - PathSegment@368..369 - Ident@368..369 "T" - Gt@369..370 ">" - RParen@370..371 ")" - Newline@371..372 "\n" - RBrace@372..373 "}" - Newline@373..374 "\n" + Comma@327..328 "," + WhiteSpace@328..329 " " + TypeGenericParam@329..330 + Ident@329..330 "V" + Comma@330..331 "," + WhiteSpace@331..332 " " + TypeGenericParam@332..333 + Ident@332..333 "W" + Gt@333..334 ">" + Newline@334..335 "\n" + WhereClause@335..410 + WhereKw@335..340 "where" + WhiteSpace@340..341 " " + Newline@341..342 "\n" + WhiteSpace@342..345 " " + WherePredicate@345..362 + PathType@345..346 + Path@345..346 + PathSegment@345..346 + Ident@345..346 "U" + TypeBoundList@346..361 + Colon@346..347 ":" + WhiteSpace@347..348 " " + TypeBound@348..361 + KindBoundAbs@348..361 + LParen@348..349 "(" + KindBoundAbs@349..355 + KindBoundMono@349..350 + Star@349..350 "*" + WhiteSpace@350..351 " " + Arrow@351..353 "->" + WhiteSpace@353..354 " " + KindBoundMono@354..355 + Star@354..355 "*" + RParen@355..356 ")" + WhiteSpace@356..357 " " + Arrow@357..359 "->" + WhiteSpace@359..360 " " + KindBoundMono@360..361 + Star@360..361 "*" + Newline@361..362 "\n" + WhiteSpace@362..365 " " + WherePredicate@365..387 + PathType@365..366 + Path@365..366 + PathSegment@365..366 + Ident@365..366 "V" + TypeBoundList@366..386 + Colon@366..367 ":" + WhiteSpace@367..368 " " + TypeBound@368..386 + KindBoundAbs@368..386 + KindBoundMono@368..369 + Star@368..369 "*" + WhiteSpace@369..370 " " + Arrow@370..372 "->" + KindBoundAbs@372..386 + WhiteSpace@372..373 " " + KindBoundMono@373..374 + Star@373..374 "*" + WhiteSpace@374..375 " " + Arrow@375..377 "->" + WhiteSpace@377..378 " " + LParen@378..379 "(" + KindBoundAbs@379..385 + KindBoundMono@379..380 + Star@379..380 "*" + WhiteSpace@380..381 " " + Arrow@381..383 "->" + WhiteSpace@383..384 " " + KindBoundMono@384..385 + Star@384..385 "*" + RParen@385..386 ")" + Newline@386..387 "\n" + WhiteSpace@387..390 " " + WherePredicate@390..410 + PathType@390..391 + Path@390..391 + PathSegment@390..391 + Ident@390..391 "W" + TypeBoundList@391..409 + Colon@391..392 ":" + WhiteSpace@392..393 " " + TypeBound@393..409 + KindBoundAbs@393..409 + KindBoundMono@393..394 + Star@393..394 "*" + WhiteSpace@394..395 " " + Arrow@395..397 "->" + KindBoundAbs@397..409 + WhiteSpace@397..398 " " + KindBoundMono@398..399 + Star@398..399 "*" + WhiteSpace@399..400 " " + Arrow@400..402 "->" + KindBoundAbs@402..409 + WhiteSpace@402..403 " " + KindBoundMono@403..404 + Star@403..404 "*" + WhiteSpace@404..405 " " + Arrow@405..407 "->" + WhiteSpace@407..408 " " + KindBoundMono@408..409 + Star@408..409 "*" + Newline@409..410 "\n" + VariantDefList@410..427 + LBrace@410..411 "{" + Newline@411..412 "\n" + WhiteSpace@412..416 " " + VariantDef@416..425 + Ident@416..419 "Foo" + TupleType@419..425 + LParen@419..420 "(" + PathType@420..424 + Path@420..421 + PathSegment@420..421 + Ident@420..421 "U" + GenericArgList@421..424 + Lt@421..422 "<" + TypeGenericArg@422..423 + PathType@422..423 + Path@422..423 + PathSegment@422..423 + Ident@422..423 "T" + Gt@423..424 ">" + RParen@424..425 ")" + Newline@425..426 "\n" + RBrace@426..427 "}" + Newline@427..428 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap.new b/crates/parser2/test_files/syntax_node/items/enums.snap.new deleted file mode 100644 index 9d8973fa2b..0000000000 --- a/crates/parser2/test_files/syntax_node/items/enums.snap.new +++ /dev/null @@ -1,285 +0,0 @@ ---- -source: crates/parser2/tests/syntax_node.rs -assertion_line: 15 -expression: node -input_file: crates/parser2/test_files/syntax_node/items/enums.fe ---- -Root@0..294 - ItemList@0..294 - Item@0..15 - Enum@0..13 - EnumKw@0..4 "enum" - WhiteSpace@4..5 " " - Ident@5..10 "Empty" - WhiteSpace@10..11 " " - VariantDefList@11..13 - LBrace@11..12 "{" - RBrace@12..13 "}" - Newline@13..15 "\n\n" - Item@15..58 - Enum@15..56 - EnumKw@15..19 "enum" - WhiteSpace@19..20 " " - Ident@20..25 "Basic" - WhiteSpace@25..26 " " - VariantDefList@26..56 - LBrace@26..27 "{" - Newline@27..28 "\n" - WhiteSpace@28..32 " " - VariantDef@32..36 - Ident@32..36 "Unit" - Newline@36..37 "\n" - WhiteSpace@37..41 " " - VariantDef@41..54 - Ident@41..44 "Tup" - TupleType@44..54 - LParen@44..45 "(" - PathType@45..48 - Path@45..48 - PathSegment@45..48 - Ident@45..48 "i32" - Comma@48..49 "," - WhiteSpace@49..50 " " - PathType@50..53 - Path@50..53 - PathSegment@50..53 - Ident@50..53 "u32" - RParen@53..54 ")" - Newline@54..55 "\n" - RBrace@55..56 "}" - Newline@56..58 "\n\n" - Item@58..119 - Enum@58..117 - EnumKw@58..62 "enum" - WhiteSpace@62..63 " " - Ident@63..69 "Option" - GenericParamList@69..72 - Lt@69..70 "<" - TypeGenericParam@70..71 - Ident@70..71 "T" - Gt@71..72 ">" - WhiteSpace@72..73 " " - Newline@73..74 "\n" - WhiteSpace@74..78 " " - WhereClause@78..93 - WhereKw@78..83 "where" - WhiteSpace@83..84 " " - WherePredicate@84..93 - PathType@84..85 - Path@84..85 - PathSegment@84..85 - Ident@84..85 "T" - TypeBoundList@85..92 - Colon@85..86 ":" - WhiteSpace@86..87 " " - TypeBound@87..92 - TraitBound@87..92 - Path@87..92 - PathSegment@87..92 - Ident@87..92 "Clone" - Newline@92..93 "\n" - VariantDefList@93..117 - LBrace@93..94 "{" - Newline@94..95 "\n" - WhiteSpace@95..99 " " - VariantDef@99..106 - Ident@99..103 "Some" - TupleType@103..106 - LParen@103..104 "(" - PathType@104..105 - Path@104..105 - PathSegment@104..105 - Ident@104..105 "T" - RParen@105..106 ")" - Newline@106..107 "\n" - WhiteSpace@107..111 " " - VariantDef@111..115 - Ident@111..115 "None" - Newline@115..116 "\n" - RBrace@116..117 "}" - Newline@117..119 "\n\n" - Item@119..222 - Enum@119..220 - EnumKw@119..123 "enum" - WhiteSpace@123..124 " " - Ident@124..133 "BoundEnum" - GenericParamList@133..162 - Lt@133..134 "<" - TypeGenericParam@134..146 - Ident@134..135 "T" - TypeBoundList@135..146 - Colon@135..136 ":" - WhiteSpace@136..137 " " - TypeBound@137..140 - TraitBound@137..140 - Path@137..140 - PathSegment@137..140 - Ident@137..140 "Add" - WhiteSpace@140..141 " " - Plus@141..142 "+" - WhiteSpace@142..143 " " - TypeBound@143..146 - TraitBound@143..146 - Path@143..146 - PathSegment@143..146 - Ident@143..146 "Mul" - WhiteSpace@146..147 " " - Comma@147..148 "," - WhiteSpace@148..149 " " - TypeGenericParam@149..161 - Ident@149..150 "U" - TypeBoundList@150..161 - Colon@150..151 ":" - WhiteSpace@151..152 " " - TypeBound@152..155 - TraitBound@152..155 - Path@152..155 - PathSegment@152..155 - Ident@152..155 "Sub" - WhiteSpace@155..156 " " - Plus@156..157 "+" - WhiteSpace@157..158 " " - TypeBound@158..161 - TraitBound@158..161 - Path@158..161 - PathSegment@158..161 - Ident@158..161 "Div" - Gt@161..162 ">" - WhiteSpace@162..163 " " - Newline@163..164 "\n" - WhereClause@164..189 - WhereKw@164..169 "where" - WhiteSpace@169..170 " " - WherePredicate@170..189 - PathType@170..181 - Path@170..178 - PathSegment@170..173 - Ident@170..173 "Foo" - Colon2@173..175 "::" - PathSegment@175..178 - Ident@175..178 "Bar" - GenericArgList@178..181 - Lt@178..179 "<" - TypeGenericArg@179..180 - PathType@179..180 - Path@179..180 - PathSegment@179..180 - Ident@179..180 "T" - Gt@180..181 ">" - TypeBoundList@181..188 - Colon@181..182 ":" - WhiteSpace@182..183 " " - TypeBound@183..188 - TraitBound@183..188 - Path@183..188 - PathSegment@183..188 - Ident@183..188 "Trait" - Newline@188..189 "\n" - VariantDefList@189..220 - LBrace@189..190 "{" - Newline@190..191 "\n" - WhiteSpace@191..195 " " - VariantDef@195..204 - Ident@195..201 "AddMul" - TupleType@201..204 - LParen@201..202 "(" - PathType@202..203 - Path@202..203 - PathSegment@202..203 - Ident@202..203 "T" - RParen@203..204 ")" - Newline@204..205 "\n" - WhiteSpace@205..209 " " - VariantDef@209..218 - Ident@209..215 "SubDiv" - TupleType@215..218 - LParen@215..216 "(" - PathType@216..217 - Path@216..217 - PathSegment@216..217 - Ident@216..217 "U" - RParen@217..218 ")" - Newline@218..219 "\n" - RBrace@219..220 "}" - Newline@220..222 "\n\n" - Item@222..294 - Enum@222..293 - EnumKw@222..226 "enum" - WhiteSpace@226..227 " " - Ident@227..234 "HKTEnum" - GenericParamList@234..248 - Lt@234..235 "<" - TypeGenericParam@235..244 - Ident@235..236 "T" - TypeBoundList@236..244 - Colon@236..237 ":" - WhiteSpace@237..238 " " - TypeBound@238..244 - KindBound@238..244 - KindBound@238..244 - Star@238..239 "*" - WhiteSpace@239..240 " " - Arrow@240..242 "->" - WhiteSpace@242..243 " " - Star@243..244 "*" - Comma@244..245 "," - WhiteSpace@245..246 " " - TypeGenericParam@246..247 - Ident@246..247 "U" - Gt@247..248 ">" - Newline@248..249 "\n" - WhereClause@249..276 - WhereKw@249..254 "where" - WhiteSpace@254..255 " " - Newline@255..256 "\n" - WhiteSpace@256..259 " " - WherePredicate@259..276 - PathType@259..260 - Path@259..260 - PathSegment@259..260 - Ident@259..260 "U" - TypeBoundList@260..275 - Colon@260..261 ":" - WhiteSpace@261..262 " " - TypeBound@262..275 - KindBound@262..275 - KindBound@262..275 - LParen@262..263 "(" - KindBound@263..269 - KindBound@263..269 - Star@263..264 "*" - WhiteSpace@264..265 " " - Arrow@265..267 "->" - WhiteSpace@267..268 " " - Star@268..269 "*" - RParen@269..270 ")" - WhiteSpace@270..271 " " - Arrow@271..273 "->" - WhiteSpace@273..274 " " - Star@274..275 "*" - Newline@275..276 "\n" - VariantDefList@276..293 - LBrace@276..277 "{" - Newline@277..278 "\n" - WhiteSpace@278..282 " " - VariantDef@282..291 - Ident@282..285 "Foo" - TupleType@285..291 - LParen@285..286 "(" - PathType@286..290 - Path@286..287 - PathSegment@286..287 - Ident@286..287 "U" - GenericArgList@287..290 - Lt@287..288 "<" - TypeGenericArg@288..289 - PathType@288..289 - Path@288..289 - PathSegment@288..289 - Ident@288..289 "T" - Gt@289..290 ">" - RParen@290..291 ")" - Newline@291..292 "\n" - RBrace@292..293 "}" - Newline@293..294 "\n" - From 5f413f9a059c4830c1f68494c423758c21a8d0fd Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 15:29:06 +0200 Subject: [PATCH 311/678] Add tests for kind bound --- crates/hir-analysis/src/ty/lower.rs | 2 +- crates/hir-analysis/src/ty/mod.rs | 10 ++-- crates/hir-analysis/src/ty/ty.rs | 10 +++- crates/uitest/fixtures/ty/kind_bound.fe | 51 ++++++++++++++++--- crates/uitest/fixtures/ty/kind_bound.snap | 50 ++++++++++++++++++ crates/uitest/fixtures/ty/kind_bound.snap.new | 13 ----- 6 files changed, 112 insertions(+), 24 deletions(-) create mode 100644 crates/uitest/fixtures/ty/kind_bound.snap delete mode 100644 crates/uitest/fixtures/ty/kind_bound.snap.new diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index ba6932748a..344ab1116e 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -656,7 +656,7 @@ impl ParamLoc { } } -#[salsa::tracked] +#[salsa::interned] pub(crate) struct GenericParamOwnerId { data: GenericParamOwner, } diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 8c82077512..5625487502 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -54,9 +54,13 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { adt_analysis::analyze_adt::accumulated::(self.db, adt) .into_iter() .chain( - adt_analysis::analyze_adt::accumulated::( - self.db, adt, - ) + if let Some(owner_id) = adt.generic_owner_id(self.db) { + lower::collect_generic_params::accumulated::( + self.db, owner_id, + ) + } else { + Vec::new() + } .into_iter(), ) }) diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 464f7abd60..db99e6a44f 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -14,7 +14,7 @@ use rustc_hash::FxHashMap; use crate::HirAnalysisDb; -use super::lower::lower_hir_ty; +use super::lower::{lower_hir_ty, GenericParamOwnerId}; #[salsa::interned] pub struct TyId { @@ -422,6 +422,14 @@ impl AdtRefId { pub fn from_contract(db: &dyn HirAnalysisDb, contract: Contract) -> Self { Self::new(db, AdtRef::Contract(contract)) } + + pub(crate) fn generic_owner_id(self, db: &dyn HirAnalysisDb) -> Option { + match self.data(db) { + AdtRef::Enum(e) => Some(GenericParamOwnerId::new(db, e.into())), + AdtRef::Struct(s) => Some(GenericParamOwnerId::new(db, s.into())), + AdtRef::Contract(_) => None, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/uitest/fixtures/ty/kind_bound.fe b/crates/uitest/fixtures/ty/kind_bound.fe index d8562d9e13..0980312a52 100644 --- a/crates/uitest/fixtures/ty/kind_bound.fe +++ b/crates/uitest/fixtures/ty/kind_bound.fe @@ -1,14 +1,53 @@ -pub struct PtrWrapper +// * -> * +pub struct Wrapper1 +{ + value: T +} + +// (* -> *) -> * -> * +pub struct Wrapper2 where T: * -> * { - ptr: T + val: T +} + + +// ((* -> *) -> *) -> (* -> *) -> * +pub struct Wrapper3 +where T: (* -> *) -> * -> * + U: * -> * +{ + value: T } pub struct Foo { - x: PtrWrapper - y: PtrWrapper + foo_x: Wrapper2 + foo_err: Wrapper2 } -pub struct Arc { - inner: T + +pub struct Bar { + bar_x: Wrapper3 + bar_y: Wrapper1> + bar_err1: Wrapper3 + bar_err2: Wrapper3 + bar_err3: wrapper3 +} + +pub struct InvalidBound *> +where T: (* -> *) -> * +{ + val: T +} + +pub struct InvalidBound2 +where Self: * +{ + val: i32 +} + +pub struct InvalidBound3 +where Wrapper1: * +{ + val: Wrapper1 } \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/kind_bound.snap b/crates/uitest/fixtures/ty/kind_bound.snap new file mode 100644 index 0000000000..79f759265e --- /dev/null +++ b/crates/uitest/fixtures/ty/kind_bound.snap @@ -0,0 +1,50 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/kind_bound.fe +--- +error[2-0002]: `wrapper3` is not found + ┌─ kind_bound.fe:34:15 + │ +34 │ bar_err3: wrapper3 + │ ^^^^^^^^ `wrapper3` is not found + +error[3-0001]: kind mismatch between two types + ┌─ kind_bound.fe:25:23 + │ +25 │ foo_err: Wrapper2 + │ ^^^ expected `(* -> *)` kind, but found `*` kind + +error[3-0001]: kind mismatch between two types + ┌─ kind_bound.fe:32:24 + │ +32 │ bar_err1: Wrapper3 + │ ^^^ expected `((* -> *) -> (* -> *))` kind, but found `*` kind + +error[3-0001]: kind mismatch between two types + ┌─ kind_bound.fe:33:34 + │ +33 │ bar_err2: Wrapper3 + │ ^^^ expected `(* -> *)` kind, but found `*` kind + +error[3-0005]: duplicate type bound is not allowed. + ┌─ kind_bound.fe:38:10 + │ +37 │ pub struct InvalidBound *> + │ ------ first defined here +38 │ where T: (* -> *) -> * + │ ^^^^^^^^^^^^^ duplicate type bound here + +error[3-0006]: kind bound is not allowed + ┌─ kind_bound.fe:44:13 + │ +44 │ where Self: * + │ ^ kind bound is not allowed here + +error[3-0006]: kind bound is not allowed + ┌─ kind_bound.fe:50:22 + │ +50 │ where Wrapper1: * + │ ^ kind bound is not allowed here + + diff --git a/crates/uitest/fixtures/ty/kind_bound.snap.new b/crates/uitest/fixtures/ty/kind_bound.snap.new deleted file mode 100644 index 8f365c30bc..0000000000 --- a/crates/uitest/fixtures/ty/kind_bound.snap.new +++ /dev/null @@ -1,13 +0,0 @@ ---- -source: crates/uitest/tests/ty.rs -assertion_line: 17 -expression: diags -input_file: crates/uitest/fixtures/ty/kind_bound.fe ---- -error[3-0001]: kind mismatch between two types - ┌─ kind_bound.fe:9:19 - │ -9 │ y: PtrWrapper - │ ^^^ expected `(* -> Any)` kind, but found `*` kind - - From 32b709fbe17669168a346ec02e214916516a805d Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 18:03:27 +0200 Subject: [PATCH 312/678] Implement super trait parsing --- crates/parser2/src/ast/item.rs | 17 ++++- crates/parser2/src/parser/item.rs | 22 +++++- crates/parser2/src/syntax_kind.rs | 2 + .../test_files/syntax_node/items/trait.fe | 5 ++ .../test_files/syntax_node/items/trait.snap | 73 ++++++++++++++++++- 5 files changed, 113 insertions(+), 6 deletions(-) diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 1f0aa030ff..3d1d0196e1 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -1,4 +1,4 @@ -use super::{ast_node, TupleType}; +use super::{ast_node, PathType, TupleType}; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; use rowan::ast::{support, AstNode}; @@ -196,6 +196,21 @@ impl Trait { pub fn item_list(&self) -> Option { support::child(self.syntax()) } + + pub fn super_trait_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct SuperTraitList, + SK::SuperTraitList, + IntoIterator +} +impl SuperTraitList { + pub fn colon(&self) -> Option { + support::token(self.syntax(), SK::Colon) + } } ast_node! { diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index 0ab4a7737b..bbbf9a9df3 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -319,9 +319,18 @@ impl super::Parse for TraitScope { parser.with_next_expected_tokens( |parser| parse_generic_params_opt(parser, false), - &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + &[SyntaxKind::LBrace, SyntaxKind::WhereKw, SyntaxKind::Colon], ); + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.with_next_expected_tokens( + |parser| { + parser.parse(SuperTraitListScope::default(), None); + }, + &[SyntaxKind::LBrace, SyntaxKind::WhereKw], + ); + } + parser.with_next_expected_tokens(parse_where_clause_opt, &[SyntaxKind::LBrace]); if parser.current_kind() != Some(SyntaxKind::LBrace) { @@ -333,6 +342,17 @@ impl super::Parse for TraitScope { } } +define_scope! {SuperTraitListScope, SuperTraitList, Inheritance(Plus)} +impl super::Parse for SuperTraitListScope { + fn parse(&mut self, parser: &mut Parser) { + parser.bump_expected(SyntaxKind::Colon); + parser.parse(PathTypeScope::default(), None); + while parser.bump_if(SyntaxKind::Plus) { + parser.parse(PathTypeScope::default(), None); + } + } +} + define_scope! { TraitItemListScope, TraitItemList, Override(RBrace, Newline, FnKw) } impl super::Parse for TraitItemListScope { fn parse(&mut self, parser: &mut Parser) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index a98e4f6462..b3f879ac6f 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -357,6 +357,8 @@ pub enum SyntaxKind { ImplItemList, /// `trait Foo {..}` Trait, + /// `: Trait + Trait2` + SuperTraitList, /// `{ fn foo() {..} }` TraitItemList, /// `impl Trait for Foo { .. }` diff --git a/crates/parser2/test_files/syntax_node/items/trait.fe b/crates/parser2/test_files/syntax_node/items/trait.fe index 85f9cb17ec..938bb0940f 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.fe +++ b/crates/parser2/test_files/syntax_node/items/trait.fe @@ -26,3 +26,8 @@ impl Parser (SyntaxNode::new_root(self.builder.finish()), self.errors) } } + + +pub trait SubTrait: Parse + Add +where T: Add +{} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index f8155c9450..3a3197f5ca 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -3,8 +3,8 @@ source: crates/parser2/tests/syntax_node.rs expression: node input_file: crates/parser2/test_files/syntax_node/items/trait.fe --- -Root@0..593 - ItemList@0..593 +Root@0..652 + ItemList@0..652 Item@0..17 Trait@0..15 TraitKw@0..5 "trait" @@ -309,7 +309,7 @@ Root@0..593 Newline@353..354 "\n" RBrace@354..355 "}" Newline@355..357 "\n\n" - Item@357..593 + Item@357..595 Impl@357..592 ImplKw@357..361 "impl" GenericParamList@361..364 @@ -483,5 +483,70 @@ Root@0..593 RBrace@589..590 "}" Newline@590..591 "\n" RBrace@591..592 "}" - Newline@592..593 "\n" + Newline@592..595 "\n\n\n" + Item@595..652 + Trait@595..652 + ItemModifier@595..598 + PubKw@595..598 "pub" + WhiteSpace@598..599 " " + TraitKw@599..604 "trait" + WhiteSpace@604..605 " " + Ident@605..613 "SubTrait" + GenericParamList@613..616 + Lt@613..614 "<" + TypeGenericParam@614..615 + Ident@614..615 "T" + Gt@615..616 ">" + SuperTraitList@616..632 + Colon@616..617 ":" + WhiteSpace@617..618 " " + PathType@618..623 + Path@618..623 + PathSegment@618..623 + Ident@618..623 "Parse" + WhiteSpace@623..624 " " + Plus@624..625 "+" + WhiteSpace@625..626 " " + PathType@626..632 + Path@626..629 + PathSegment@626..629 + Ident@626..629 "Add" + GenericArgList@629..632 + Lt@629..630 "<" + TypeGenericArg@630..631 + PathType@630..631 + Path@630..631 + PathSegment@630..631 + Ident@630..631 "T" + Gt@631..632 ">" + WhiteSpace@632..633 " " + Newline@633..634 "\n" + WhereClause@634..650 + WhereKw@634..639 "where" + WhiteSpace@639..640 " " + WherePredicate@640..650 + PathType@640..641 + Path@640..641 + PathSegment@640..641 + Ident@640..641 "T" + TypeBoundList@641..649 + Colon@641..642 ":" + WhiteSpace@642..643 " " + TypeBound@643..649 + TraitBound@643..649 + Path@643..646 + PathSegment@643..646 + Ident@643..646 "Add" + GenericArgList@646..649 + Lt@646..647 "<" + TypeGenericArg@647..648 + PathType@647..648 + Path@647..648 + PathSegment@647..648 + Ident@647..648 "T" + Gt@648..649 ">" + Newline@649..650 "\n" + TraitItemList@650..652 + LBrace@650..651 "{" + RBrace@651..652 "}" From 3bd23252b169f7d21c5db7aef7bd5ab4c7ec7aae Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 19:07:41 +0200 Subject: [PATCH 313/678] Implement super type hir lowering --- crates/hir/src/hir_def/item.rs | 1 + crates/hir/src/lower/item.rs | 9 +++++++++ crates/hir/src/span/item.rs | 9 +++++++++ crates/hir/src/visitor.rs | 28 ++++++++++++++++++++++++++++ 4 files changed, 47 insertions(+) diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 87fe044452..883f07ccc0 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -642,6 +642,7 @@ pub struct Trait { pub attributes: AttrListId, pub vis: Visibility, pub generic_params: GenericParamListId, + pub super_traits: Vec, pub where_clause: WhereClauseId, pub top_mod: TopLevelMod, diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs index f8d53102fb..8aa8b91708 100644 --- a/crates/hir/src/lower/item.rs +++ b/crates/hir/src/lower/item.rs @@ -275,6 +275,14 @@ impl Trait { let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let super_traits = if let Some(super_traits) = ast.super_trait_list() { + super_traits + .into_iter() + .map(|trait_ref| TraitRef::lower_ast(ctxt, trait_ref)) + .collect() + } else { + vec![] + }; let origin = HirOrigin::raw(&ast); if let Some(item_list) = ast.item_list() { @@ -290,6 +298,7 @@ impl Trait { attributes, vis, generic_params, + super_traits, where_clause, ctxt.top_mod(), origin, diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index 7eb0b2591e..eb7ada4583 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -144,11 +144,20 @@ define_lazy_span_node!( @node { (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), + (super_traits, super_trait_list, LazySuperTraitListSpan), (where_clause, where_clause, LazyWhereClauseSpan), (modifier, modifier, LazyItemModifierSpan), } ); +define_lazy_span_node!( + LazySuperTraitListSpan, + ast::SuperTraitList, + @idx { + (super_trait, LazyPathTypeSpan), + } +); + define_lazy_span_node!( LazyImplTraitSpan, ast::ImplTrait, diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 129b36eee0..3be7ecae7c 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -708,6 +708,34 @@ where }, ); + ctxt.with_new_ctxt( + |span| span.super_traits(), + |ctxt| { + for (i, trait_ref) in trait_.super_traits(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.super_trait(i), + |ctxt| { + if let Some(path) = trait_ref.path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, trait_ref.generic_args); + }, + ); + }, + ); + } + }, + ); + ctxt.with_new_ctxt( |span| span.where_clause_moved(), |ctxt| { From 6895cbbd27d8dce34506c5a245c67cc6d5fea5f9 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 21:11:11 +0200 Subject: [PATCH 314/678] Implement trait def lowering --- crates/driver2/src/lib.rs | 3 +- crates/hir-analysis/src/lib.rs | 1 + crates/hir-analysis/src/ty/lower.rs | 44 ++++++++++++++++--- crates/hir-analysis/src/ty/mod.rs | 31 +++++++++++++ crates/hir-analysis/src/ty/trait_.rs | 4 +- crates/hir/src/hir_def/item.rs | 55 ++++++++++++++++++------ crates/hir/src/lib.rs | 2 + crates/uitest/fixtures/ty/trait_def.fe | 7 +++ crates/uitest/fixtures/ty/trait_def.snap | 14 ++++++ 9 files changed, 140 insertions(+), 21 deletions(-) create mode 100644 crates/uitest/fixtures/ty/trait_def.fe create mode 100644 crates/uitest/fixtures/ty/trait_def.snap diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 50de2f3d03..3695a72d4b 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -17,7 +17,7 @@ use hir::{ }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, - ty::{TypeAliasAnalysisPass, TypeDefAnalysisPass}, + ty::{TraitAnalysisPass, TypeAliasAnalysisPass, TypeDefAnalysisPass}, HirAnalysisDb, }; @@ -147,5 +147,6 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(TypeDefAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index c62225867c..37b899995d 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -15,6 +15,7 @@ pub struct Jar( /// Type lowering. ty::lower::lower_hir_ty, ty::lower::lower_adt, + ty::lower::lower_trait, ty::lower::lower_type_alias, ty::lower::collect_generic_params, ty::lower::GenericParamOwnerId, diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index 344ab1116e..e87223567b 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -2,7 +2,7 @@ use either::Either; use hir::{ hir_def::{ scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, - GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, + GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, Trait, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, WherePredicate, }, @@ -18,8 +18,9 @@ use crate::{ HirAnalysisDb, }; -use super::ty::{ - AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, Subst, TyData, TyId, TyParam, +use super::{ + trait_::TraitDef, + ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, Subst, TyData, TyId, TyParam}, }; #[salsa::tracked] @@ -33,6 +34,11 @@ pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { } #[salsa::tracked] +pub fn lower_trait(db: &dyn HirAnalysisDb, trait_: Trait) -> TraitDef { + TraitBuilder::new(db, trait_).build() +} + +#[salsa::tracked(return_ref)] pub(crate) fn collect_generic_params( db: &dyn HirAnalysisDb, owner: GenericParamOwnerId, @@ -53,7 +59,7 @@ pub(crate) fn lower_type_alias(db: &dyn HirAnalysisDb, alias: HirTypeAlias) -> T return TyAlias { alias, alias_to: TyId::invalid(db, InvalidCause::Other), - params: params.params, + params: params.params.clone(), }; }; @@ -80,7 +86,7 @@ pub(crate) fn lower_type_alias(db: &dyn HirAnalysisDb, alias: HirTypeAlias) -> T TyAlias { alias, alias_to, - params: params.params, + params: params.params.clone(), } } @@ -97,7 +103,7 @@ fn recover_lower_type_alias_cycle( TyAlias { alias, alias_to, - params: params.params, + params: params.params.clone(), } } @@ -415,6 +421,32 @@ impl<'db> AdtTyBuilder<'db> { } } +struct TraitBuilder<'db> { + db: &'db dyn HirAnalysisDb, + trait_: Trait, + params: Vec, + self_args: TyId, + // TODO: We need to lower associated methods here. + // methods: Vec +} + +impl<'db> TraitBuilder<'db> { + fn new(db: &'db dyn HirAnalysisDb, trait_: Trait) -> Self { + let params_owner_id = GenericParamOwnerId::new(db, trait_.into()); + let params_set = collect_generic_params(db, params_owner_id); + Self { + db, + trait_, + params: params_set.params.clone(), + self_args: params_set.trait_self.unwrap(), + } + } + + fn build(self) -> TraitDef { + TraitDef::new(self.db, self.trait_, self.params, self.self_args) + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct GenericParamTypeSet { pub(crate) params: Vec, diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 5625487502..072cdd8ff2 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -7,6 +7,7 @@ use self::{ AdtDefDiagAccumulator, GenericParamDiagAccumulator, TyLowerDiag, TypeAliasDefDiagAccumulator, }, + lower::GenericParamOwnerId, ty::AdtRefId, }; @@ -79,6 +80,7 @@ impl<'db> TypeAliasAnalysisPass<'db> { Self { db } } } + impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { fn run_on_module( &mut self, @@ -103,3 +105,32 @@ impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { diags.into_iter().map(|diag| Box::new(diag) as _).collect() } } + +pub struct TraitAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} +impl<'db> TraitAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass for TraitAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: hir::hir_def::TopLevelMod, + ) -> Vec> { + top_mod + .all_traits(self.db.as_hir_db()) + .iter() + .map(|&trait_| { + let owner_id = GenericParamOwnerId::new(self.db, trait_.into()); + lower::collect_generic_params::accumulated::( + self.db, owner_id, + ) + }) + .flatten() + .map(|diag| Box::new(diag) as _) + .collect() + } +} diff --git a/crates/hir-analysis/src/ty/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs index 7f3ad46d17..62c65de930 100644 --- a/crates/hir-analysis/src/ty/trait_.rs +++ b/crates/hir-analysis/src/ty/trait_.rs @@ -37,5 +37,7 @@ pub struct TraitInstId { pub struct TraitDef { pub trait_: Trait, pub args: Vec, - pub super_traits: Vec, + pub self_arg: TyId, + // TODO: we need to collect associated method types here. + // methods: Vec } diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 883f07ccc0..ffbe23329b 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -370,6 +370,12 @@ impl TopLevelMod { Visibility::Public } + /// Returns all items in the top level module including ones in nested + /// modules. + pub fn all_items<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_items_in_top_mod(db, self) + } + /// Returns all structs in the top level module including ones in nested /// modules. pub fn all_structs<'db>(self, db: &'db dyn HirDb) -> &'db Vec { @@ -389,18 +395,29 @@ impl TopLevelMod { } /// Returns all type aliases in the top level module including ones in - /// nested + /// nested modules. pub fn all_type_aliases<'db>(self, db: &'db dyn HirDb) -> &'db Vec { all_type_aliases_in_top_mod(db, self) } + + /// Returns all traits in the top level module including ones in nested + /// modules. + pub fn all_traits<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_traits_in_top_mod(db, self) + } +} + +#[salsa::tracked(return_ref)] +pub fn all_items_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + top_mod.children_nested(db).collect() } #[salsa::tracked(return_ref)] pub fn all_structs_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { - top_mod - .children_nested(db) + all_items_in_top_mod(db, top_mod) + .iter() .filter_map(|item| match item { - ItemKind::Struct(struct_) => Some(struct_), + ItemKind::Struct(struct_) => Some(*struct_), _ => None, }) .collect() @@ -408,10 +425,10 @@ pub fn all_structs_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec Vec { - top_mod - .children_non_nested(db) + all_items_in_top_mod(db, top_mod) + .iter() .filter_map(|item| match item { - ItemKind::Enum(enum_) => Some(enum_), + ItemKind::Enum(enum_) => Some(*enum_), _ => None, }) .collect() @@ -419,20 +436,32 @@ pub fn all_enums_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { #[salsa::tracked(return_ref)] pub fn all_type_aliases_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { - top_mod - .children_non_nested(db) + all_items_in_top_mod(db, top_mod) + .iter() .filter_map(|item| match item { - ItemKind::TypeAlias(alias) => Some(alias), + ItemKind::TypeAlias(alias) => Some(*alias), _ => None, }) .collect() } + #[salsa::tracked(return_ref)] pub fn all_contracts_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { - top_mod - .children_non_nested(db) + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Contract(contract) => Some(*contract), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_traits_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + all_items_in_top_mod(db, top_mod) + .iter() .filter_map(|item| match item { - ItemKind::Contract(contract) => Some(contract), + ItemKind::Trait(trait_) => Some(*trait_), _ => None, }) .collect() diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index bfa63bb91f..66d9370042 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -50,8 +50,10 @@ pub struct Jar( hir_def::TupleTypeId, hir_def::UsePathId, /// Utility methods for analysis. + hir_def::all_items_in_top_mod, hir_def::all_structs_in_top_mod, hir_def::all_enums_in_top_mod, + hir_def::all_traits_in_top_mod, hir_def::all_contracts_in_top_mod, hir_def::all_type_aliases_in_top_mod, /// Accumulated diagnostics. diff --git a/crates/uitest/fixtures/ty/trait_def.fe b/crates/uitest/fixtures/ty/trait_def.fe new file mode 100644 index 0000000000..8150bbd239 --- /dev/null +++ b/crates/uitest/fixtures/ty/trait_def.fe @@ -0,0 +1,7 @@ +trait Clone {} +pub trait Trait +where T: * -> * + T: (* -> *) -> * +{ + +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/trait_def.snap b/crates/uitest/fixtures/ty/trait_def.snap new file mode 100644 index 0000000000..767649ee5a --- /dev/null +++ b/crates/uitest/fixtures/ty/trait_def.snap @@ -0,0 +1,14 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/trait_def.fe +--- +error[3-0005]: duplicate type bound is not allowed. + ┌─ trait_def.fe:4:10 + │ +3 │ where T: * -> * + │ ------ first defined here +4 │ T: (* -> *) -> * + │ ^^^^^^^^^^^^^ duplicate type bound here + + From 1968c41689bb7d1b022852f83977a4216b5d513c Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sat, 16 Sep 2023 21:13:35 +0200 Subject: [PATCH 315/678] Bumpup workspace resolver to version 2 --- Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.toml b/Cargo.toml index daafdfa420..82d38453cd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = ["crates/*"] +resolver = "2" [profile.dev.package.solc] opt-level = 3 From 6e4431bb2d7bb98721187f1d3d14edef7736db41 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 17 Sep 2023 16:15:40 +0200 Subject: [PATCH 316/678] Implement `UnificationTable` --- Cargo.lock | 10 ++ crates/hir-analysis/Cargo.toml | 1 + crates/hir-analysis/src/lib.rs | 1 + crates/hir-analysis/src/ty/lower.rs | 9 +- crates/hir-analysis/src/ty/mod.rs | 2 + crates/hir-analysis/src/ty/trait_.rs | 1 - crates/hir-analysis/src/ty/ty.rs | 63 +++++++----- crates/hir-analysis/src/ty/unify.rs | 137 +++++++++++++++++++++++++++ 8 files changed, 196 insertions(+), 28 deletions(-) create mode 100644 crates/hir-analysis/src/ty/unify.rs diff --git a/Cargo.lock b/Cargo.lock index 436e7e603e..fc11dc0e6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -701,6 +701,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ena" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" +dependencies = [ + "log", +] + [[package]] name = "enumn" version = "0.1.8" @@ -1073,6 +1082,7 @@ dependencies = [ "derive_more", "dir-test", "either", + "ena", "fe-common2", "fe-compiler-test-utils", "fe-hir", diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml index 96d1144ff2..31002b9d27 100644 --- a/crates/hir-analysis/Cargo.toml +++ b/crates/hir-analysis/Cargo.toml @@ -14,6 +14,7 @@ rustc-hash = "1.1.0" either = "1.8" derive_more = "0.99" itertools = "0.10" +ena = "0.14" hir = { path = "../hir", package = "fe-hir" } common = { path = "../common2", package = "fe-common2" } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index 37b899995d..db9f7681c5 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -10,6 +10,7 @@ pub struct Jar( /// Type inference. ty::ty::TyId, ty::ty::ty_kind, + ty::ty::free_inference_keys, ty::ty::AdtDef, ty::ty::AdtRefId, /// Type lowering. diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/lower.rs index e87223567b..f03d78416f 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/lower.rs @@ -8,6 +8,7 @@ use hir::{ }, visitor::prelude::*, }; +use rustc_hash::FxHashMap; use crate::{ name_resolution::{resolve_path_early, EarlyResolvedPath, NameDomain, NameResKind}, @@ -20,7 +21,7 @@ use crate::{ use super::{ trait_::TraitDef, - ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, Subst, TyData, TyId, TyParam}, + ty::{AdtDef, AdtField, AdtRef, AdtRefId, InvalidCause, Kind, TyData, TyId, TyParam}, }; #[salsa::tracked] @@ -131,7 +132,7 @@ impl TyAlias { }, ); } - let mut subst = Subst::new(); + let mut subst = FxHashMap::default(); for (¶m, &arg) in self.params.iter().zip(arg_tys.iter()) { let arg = if param.kind(db) != arg.kind(db) { @@ -142,10 +143,10 @@ impl TyAlias { } else { arg }; - subst.insert(db, param, arg); + subst.insert(param, arg); } - self.alias_to.apply_subst(db, &subst) + self.alias_to.apply_subst(db, &mut subst) } } diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 072cdd8ff2..5dced8da5d 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -18,6 +18,8 @@ pub mod trait_; pub mod ty; pub mod visitor; +mod unify; + pub struct TypeDefAnalysisPass<'db> { db: &'db dyn HirAnalysisDb, } diff --git a/crates/hir-analysis/src/ty/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs index 62c65de930..94888248cf 100644 --- a/crates/hir-analysis/src/ty/trait_.rs +++ b/crates/hir-analysis/src/ty/trait_.rs @@ -9,7 +9,6 @@ use super::ty::TyId; pub struct Predicate { pub trait_: TraitInstId, pub ty: TyId, - pub trait_args: Vec, } /// Represents an each type which implements a trait. diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index db99e6a44f..85b082ba52 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -1,4 +1,4 @@ -use std::fmt; +use std::{collections::BTreeSet, fmt}; use hir::{ hir_def::{ @@ -14,7 +14,11 @@ use rustc_hash::FxHashMap; use crate::HirAnalysisDb; -use super::lower::{lower_hir_ty, GenericParamOwnerId}; +use super::{ + lower::{lower_hir_ty, GenericParamOwnerId}, + unify::InferenceKey, + visitor::TyVisitor, +}; #[salsa::interned] pub struct TyId { @@ -69,6 +73,13 @@ impl TyId { matches!(self.data(db), TyData::TyVar(_)) } + pub(super) fn free_inference_keys<'db>( + self, + db: &'db dyn HirAnalysisDb, + ) -> &'db BTreeSet { + free_inference_keys(db, self) + } + /// Perform type level application. /// If the kind is mismatched, return `TyData::Invalid`. pub(super) fn app(db: &dyn HirAnalysisDb, abs: Self, arg: Self) -> TyId { @@ -88,7 +99,10 @@ impl TyId { Self::new(db, TyData::TyApp(abs, arg)) } - pub(crate) fn apply_subst(self, db: &dyn HirAnalysisDb, subst: &Subst) -> TyId { + pub(crate) fn apply_subst(self, db: &dyn HirAnalysisDb, subst: &mut S) -> TyId + where + S: Subst + ?Sized, + { if let Some(to) = subst.get(self) { return to; } @@ -317,8 +331,8 @@ impl fmt::Display for Kind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TyVar { - pub id: u32, pub kind: Kind, + pub(super) key: InferenceKey, } /// Type generics parameter. We also treat `Self` type in a trait definition as @@ -449,28 +463,17 @@ impl AdtRef { } } -#[derive(Default, Clone)] -pub(crate) struct Subst { - inner: FxHashMap, -} +pub trait Subst { + fn get(&mut self, from: TyId) -> Option; -impl Subst { - pub(crate) fn new() -> Self { - Self::default() - } - - /// Insert a substitution mapping. - /// This method panics when - /// 1. `from` and `to` have different kinds. - /// 2. `from` is not a `TyVar` or `TyParam`. - pub(crate) fn insert(&mut self, db: &dyn HirAnalysisDb, from: TyId, to: TyId) { - debug_assert!(from.kind(db) == to.kind(db)); - debug_assert!(from.is_ty_var(db,) || from.is_ty_param(db)); - self.inner.insert(from, to); + fn apply(&mut self, db: &dyn HirAnalysisDb, ty: TyId) -> TyId { + ty.apply_subst(db, self) } +} - pub(crate) fn get(&self, from: TyId) -> Option { - self.inner.get(&from).copied() +impl Subst for FxHashMap { + fn get(&mut self, from: TyId) -> Option { + FxHashMap::get(self, &from).copied() } } @@ -536,3 +539,17 @@ impl HasKind for AdtDef { kind } } + +#[salsa::tracked(return_ref)] +pub(crate) fn free_inference_keys(db: &dyn HirAnalysisDb, ty: TyId) -> BTreeSet { + struct FreeInferenceKeyCollector(BTreeSet); + impl TyVisitor for FreeInferenceKeyCollector { + fn visit_var(&mut self, _db: &dyn HirAnalysisDb, var: &TyVar) { + self.0.insert(var.key); + } + } + + let mut collector = FreeInferenceKeyCollector(BTreeSet::new()); + collector.visit_ty(db, ty); + collector.0 +} diff --git a/crates/hir-analysis/src/ty/unify.rs b/crates/hir-analysis/src/ty/unify.rs new file mode 100644 index 0000000000..9e36ff2626 --- /dev/null +++ b/crates/hir-analysis/src/ty/unify.rs @@ -0,0 +1,137 @@ +use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue}; + +use crate::HirAnalysisDb; + +use super::ty::{Kind, Subst, TyData, TyId}; + +pub struct UnificationTable<'db> { + db: &'db dyn HirAnalysisDb, + table: InPlaceUnificationTable, +} + +impl<'db> UnificationTable<'db> { + /// Returns true if the two types were unified. + pub fn unify(&mut self, ty1: TyId, ty2: TyId) -> bool { + let snapshot = self.table.snapshot(); + + if self.unify_impl(ty1, ty2) { + self.table.commit(snapshot); + true + } else { + self.table.rollback_to(snapshot); + false + } + } + + pub fn new_key(&mut self, kind: Kind) -> InferenceKey { + self.table.new_key(InferenceValue::Unbounded(kind.clone())) + } + + pub fn probe(&mut self, key: InferenceKey) -> Option { + match self.table.probe_value(key) { + InferenceValue::Bounded(ty) => Some(ty), + InferenceValue::Unbounded(_) => None, + } + } + + fn unify_impl(&mut self, ty1: TyId, ty2: TyId) -> bool { + if ty1.kind(self.db) != ty2.kind(self.db) { + return false; + } + let ty1 = self.apply(self.db, ty1); + let ty2 = self.apply(self.db, ty2); + + match (ty1.data(self.db), ty2.data(self.db)) { + (TyData::TyVar(var), _) if !ty2.free_inference_keys(self.db).contains(&var.key) => { + self.table + .union_value(var.key, InferenceValue::Bounded(ty2)); + true + } + + (_, TyData::TyVar(var)) if !ty1.free_inference_keys(self.db).contains(&var.key) => { + self.table + .union_value(var.key, InferenceValue::Bounded(ty2)); + true + } + (TyData::TyVar(var1), TyData::TyVar(var2)) => { + self.table.union(var1.key, var2.key); + true + } + + (TyData::TyApp(ty1_1, ty1_2), TyData::TyApp(ty2_1, ty2_2)) => { + let ok = self.unify_impl(ty1_1, ty2_1); + if ok { + let ty1_2 = self.apply(self.db, ty1_2); + let ty2_2 = self.apply(self.db, ty2_2); + self.unify_impl(ty1_2, ty2_2) + } else { + false + } + } + + (TyData::TyParam(_), TyData::TyParam(_)) | (TyData::TyCon(_), TyData::TyCon(_)) => { + ty1 == ty2 + } + + (TyData::Invalid(_), _) | (_, TyData::Invalid(_)) => true, + + _ => false, + } + } +} + +impl<'db> Subst for UnificationTable<'db> { + fn get(&mut self, ty: TyId) -> Option { + match ty.data(self.db) { + TyData::TyVar(var) => self.probe(var.key), + _ => None, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct InferenceKey(u32); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum InferenceValue { + Bounded(TyId), + Unbounded(Kind), +} + +impl UnifyKey for InferenceKey { + type Value = InferenceValue; + + fn index(&self) -> u32 { + self.0 + } + + fn from_index(idx: u32) -> Self { + Self(idx) + } + + fn tag() -> &'static str { + "InferenceKey" + } +} + +impl UnifyValue for InferenceValue { + type Error = NoError; + + fn unify_values(v1: &Self, v2: &Self) -> Result { + match (v1, v2) { + (InferenceValue::Unbounded(k1), InferenceValue::Unbounded(k2)) => { + assert!(k1 == k2); + Ok(InferenceValue::Unbounded(k1.clone())) + } + + (InferenceValue::Unbounded(_), InferenceValue::Bounded(ty)) + | (InferenceValue::Bounded(ty), InferenceValue::Unbounded(_)) => { + Ok(InferenceValue::Bounded(*ty)) + } + + (InferenceValue::Bounded(ty1), InferenceValue::Bounded(ty2)) => { + panic!("trying to unify two bounded types {:?} and {:?}", ty1, ty2) + } + } + } +} From ecece678b55b730921d68a81638e68c327cc25d0 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 17 Sep 2023 23:53:15 +0200 Subject: [PATCH 317/678] Define `TraitLowerDiag` --- crates/hir-analysis/src/ty/diagnostics.rs | 131 +++++++++++++++++++++- 1 file changed, 130 insertions(+), 1 deletion(-) diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 9c75c8f6be..2d877416b9 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -3,7 +3,7 @@ use common::diagnostics::{ }; use hir::{ diagnostics::DiagnosticVoucher, - hir_def::TypeAlias as HirTypeAlias, + hir_def::{Trait, TypeAlias as HirTypeAlias}, span::{DynLazySpan, LazySpan}, HirDb, }; @@ -230,3 +230,132 @@ impl DiagnosticVoucher for TyLowerDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } } + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TraitLowerDiag { + ExternalTraitForExternalType(DynLazySpan), + + ConflictTraitImpl { + primary: DynLazySpan, + conflict_with: DynLazySpan, + }, + + KindMismatch { + primary: DynLazySpan, + trait_def: Trait, + }, + + TraitArgumentMismatch { + span: DynLazySpan, + trait_: Trait, + n_given_arg: usize, + }, +} + +impl TraitLowerDiag { + fn local_code(&self) -> u16 { + match self { + Self::ExternalTraitForExternalType(_) => 0, + Self::ConflictTraitImpl { .. } => 1, + Self::KindMismatch { .. } => 2, + Self::TraitArgumentMismatch { .. } => 3, + } + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + Self::ExternalTraitForExternalType(_) => { + "external trait cannot be implemented for external type".to_string() + } + + Self::ConflictTraitImpl { .. } => "conflict trait implementation".to_string(), + + Self::KindMismatch { .. } => "type doesn't satisfy required kind bound".to_string(), + + Self::TraitArgumentMismatch { .. } => { + "given trait argument number mismatch".to_string() + } + } + } + + fn sub_diags(&self, db: &dyn hir::SpannedHirDb) -> Vec { + match self { + Self::ExternalTraitForExternalType(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "external trait cannot be implemented for external type".to_string(), + span.resolve(db), + )], + + Self::ConflictTraitImpl { + primary, + conflict_with, + } => vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "conflict trait implementation".to_string(), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "conflict with this trait implementation".to_string(), + conflict_with.resolve(db), + ), + ], + + Self::KindMismatch { primary, trait_def } => vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "type doesn't satisfy required kind bound here".to_string(), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "trait is defined here".to_string(), + trait_def.lazy_span().name().resolve(db), + ), + ], + + Self::TraitArgumentMismatch { + span, + trait_, + n_given_arg, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected {} arguments here, but {} given", + trait_.generic_params(db.as_hir_db()).len(db.as_hir_db()), + n_given_arg, + ), + span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + format!("trait defined here"), + trait_.lazy_span().name().resolve(db), + ), + ] + } + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl DiagnosticVoucher for TraitLowerDiag { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) + } + + fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} From 60c29bbe5a93f1e96e762b2f3fa30d0a5a86cafd Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Sun, 17 Sep 2023 23:53:37 +0200 Subject: [PATCH 318/678] Implement trait implementation conflict analysis --- crates/hir-analysis/src/lib.rs | 17 +-- crates/hir-analysis/src/ty/adt_analysis.rs | 2 +- crates/hir-analysis/src/ty/mod.rs | 25 ++-- crates/hir-analysis/src/ty/trait_.rs | 62 +++++---- crates/hir-analysis/src/ty/trait_lower.rs | 118 ++++++++++++++++++ crates/hir-analysis/src/ty/ty.rs | 2 +- .../src/ty/{lower.rs => ty_lower.rs} | 31 ----- crates/hir-analysis/src/ty/unify.rs | 21 +++- crates/hir-analysis/src/ty/visitor.rs | 2 +- 9 files changed, 205 insertions(+), 75 deletions(-) create mode 100644 crates/hir-analysis/src/ty/trait_lower.rs rename crates/hir-analysis/src/ty/{lower.rs => ty_lower.rs} (96%) diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index db9f7681c5..bae9843f8e 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -14,18 +14,21 @@ pub struct Jar( ty::ty::AdtDef, ty::ty::AdtRefId, /// Type lowering. - ty::lower::lower_hir_ty, - ty::lower::lower_adt, - ty::lower::lower_trait, - ty::lower::lower_type_alias, - ty::lower::collect_generic_params, - ty::lower::GenericParamOwnerId, + ty::ty_lower::lower_hir_ty, + ty::ty_lower::lower_adt, + ty::ty_lower::lower_type_alias, + ty::ty_lower::collect_generic_params, + ty::ty_lower::GenericParamOwnerId, + /// Trait lowering. + ty::trait_lower::lower_trait, /// ADT analysis. ty::adt_analysis::check_recursive_adt, ty::adt_analysis::analyze_adt, - // Trait resolution. + /// Trait resolution. ty::trait_::TraitDef, ty::trait_::TraitInstId, + ty::trait_::Implementor, + /// Diagnostic accumulators. ty::diagnostics::AdtDefDiagAccumulator, ty::diagnostics::TypeAliasDefDiagAccumulator, ty::diagnostics::GenericParamDiagAccumulator, diff --git a/crates/hir-analysis/src/ty/adt_analysis.rs b/crates/hir-analysis/src/ty/adt_analysis.rs index 574f2dd745..4b3ba25c40 100644 --- a/crates/hir-analysis/src/ty/adt_analysis.rs +++ b/crates/hir-analysis/src/ty/adt_analysis.rs @@ -9,8 +9,8 @@ use crate::{ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb}; use super::{ diagnostics::TyLowerDiag, - lower::{lower_adt, lower_hir_ty}, ty::{AdtDef, AdtRefId, TyId}, + ty_lower::{lower_adt, lower_hir_ty}, visitor::{walk_ty, TyDiagCollector, TyVisitor}, }; diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index 5dced8da5d..b650eca97a 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -7,15 +7,16 @@ use self::{ AdtDefDiagAccumulator, GenericParamDiagAccumulator, TyLowerDiag, TypeAliasDefDiagAccumulator, }, - lower::GenericParamOwnerId, ty::AdtRefId, + ty_lower::GenericParamOwnerId, }; pub mod adt_analysis; pub mod diagnostics; -pub mod lower; pub mod trait_; +pub mod trait_lower; pub mod ty; +pub mod ty_lower; pub mod visitor; mod unify; @@ -58,7 +59,7 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { .into_iter() .chain( if let Some(owner_id) = adt.generic_owner_id(self.db) { - lower::collect_generic_params::accumulated::( + ty_lower::collect_generic_params::accumulated::( self.db, owner_id, ) } else { @@ -92,14 +93,16 @@ impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { .all_type_aliases(self.db.as_hir_db()) .iter() .map(|&alias| { - lower::lower_type_alias::accumulated::(self.db, alias) - .into_iter() - .chain( - lower::lower_type_alias::accumulated::( - self.db, alias, - ) - .into_iter(), + ty_lower::lower_type_alias::accumulated::( + self.db, alias, + ) + .into_iter() + .chain( + ty_lower::lower_type_alias::accumulated::( + self.db, alias, ) + .into_iter(), + ) }) .flatten() .collect(); @@ -127,7 +130,7 @@ impl<'db> ModuleAnalysisPass for TraitAnalysisPass<'db> { .iter() .map(|&trait_| { let owner_id = GenericParamOwnerId::new(self.db, trait_.into()); - lower::collect_generic_params::accumulated::( + ty_lower::collect_generic_params::accumulated::( self.db, owner_id, ) }) diff --git a/crates/hir-analysis/src/ty/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs index 94888248cf..c66aaeefe6 100644 --- a/crates/hir-analysis/src/ty/trait_.rs +++ b/crates/hir-analysis/src/ty/trait_.rs @@ -1,40 +1,60 @@ /// This module contains the logic for solving trait bounds. -use hir::hir_def::{Func, Trait}; -use rustc_hash::{FxHashMap, FxHashSet}; +use hir::hir_def::{ImplTrait, Trait}; +use rustc_hash::FxHashMap; -use super::ty::TyId; +use crate::HirAnalysisDb; -/// `Ty` implements `Trait` with the given type arguments. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct Predicate { - pub trait_: TraitInstId, - pub ty: TyId, +use super::ty::{Subst, TyId}; + +#[salsa::interned] +pub(crate) struct Implementor { + pub(crate) impl_def: ImplTrait, + pub(crate) trait_: TraitInstId, + pub(crate) ty: TyId, + #[return_ref] + pub(crate) params: Vec, } -/// Represents an each type which implements a trait. -/// Whenever `predicates` are satisfied, `impl_` is satisfied. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Implementor { - predicates: Vec, - impl_: Predicate, +impl Implementor { + pub(crate) fn trait_def(self, db: &dyn HirAnalysisDb) -> TraitDef { + self.trait_(db).def(db) + } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct TraitImplTable { - pub impls: FxHashMap, - pub method_table: FxHashMap>, +#[derive(Clone, Debug, PartialEq, Eq, Default)] +pub(crate) struct TraitImplTable { + pub(crate) impls: FxHashMap>, } /// Represents an instantiated trait which is implemented to types. #[salsa::interned] -pub struct TraitInstId { - pub trait_: TraitDef, +pub(crate) struct TraitInstId { + pub def: TraitDef, + #[return_ref] pub substs: Vec, } +impl TraitInstId { + pub(crate) fn apply_subst( + self, + db: &dyn HirAnalysisDb, + subst: &mut S, + ) -> TraitInstId { + TraitInstId::new( + db, + self.def(db), + self.substs(db) + .iter() + .map(|ty| ty.apply_subst(db, subst)) + .collect(), + ) + } +} + #[salsa::tracked] -pub struct TraitDef { +pub(crate) struct TraitDef { pub trait_: Trait, + #[return_ref] pub args: Vec, pub self_arg: TyId, // TODO: we need to collect associated method types here. diff --git a/crates/hir-analysis/src/ty/trait_lower.rs b/crates/hir-analysis/src/ty/trait_lower.rs new file mode 100644 index 0000000000..3b2b196705 --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_lower.rs @@ -0,0 +1,118 @@ +use std::collections::BTreeMap; + +use hir::hir_def::{ImplTrait, TopLevelMod, Trait}; +use rustc_hash::FxHashMap; + +use crate::HirAnalysisDb; + +use super::{ + diagnostics::{TraitLowerDiag, TyLowerDiag}, + trait_::{Implementor, TraitDef, TraitImplTable}, + ty::TyId, + ty_lower::{collect_generic_params, GenericParamOwnerId}, + unify::UnificationTable, +}; + +#[salsa::tracked] +pub(crate) fn lower_trait(db: &dyn HirAnalysisDb, trait_: Trait) -> TraitDef { + TraitBuilder::new(db, trait_).build() +} + +struct TraitBuilder<'db> { + db: &'db dyn HirAnalysisDb, + trait_: Trait, + params: Vec, + self_args: TyId, + // TODO: We need to lower associated methods here. + // methods: Vec +} + +impl<'db> TraitBuilder<'db> { + fn new(db: &'db dyn HirAnalysisDb, trait_: Trait) -> Self { + let params_owner_id = GenericParamOwnerId::new(db, trait_.into()); + let params_set = collect_generic_params(db, params_owner_id); + Self { + db, + trait_, + params: params_set.params.clone(), + self_args: params_set.trait_self.unwrap(), + } + } + + fn build(self) -> TraitDef { + TraitDef::new(self.db, self.trait_, self.params, self.self_args) + } +} + +/// Collect all implementors in an ingot. +struct ImplementorCollector<'db> { + db: &'db dyn HirAnalysisDb, + impl_table: TraitImplTable, + diags: BTreeMap, +} + +impl<'db> ImplementorCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { + db, + impl_table: TraitImplTable::default(), + diags: BTreeMap::new(), + } + } + + fn collect_impls(&mut self, impls: &[ImplTrait]) { + todo!() + } + + fn push_diag(&mut self, impl_: ImplTrait, diag: impl Into) { + let top_mod = impl_.top_mod(self.db.as_hir_db()); + self.diags + .entry(top_mod) + .or_insert_with(|| TraitImplDiag::from(diag.into())); + } +} + +impl Implementor { + fn generalize<'db>(self, db: &'db dyn HirAnalysisDb) -> (Self, UnificationTable<'db>) { + let mut subst = FxHashMap::default(); + let mut table = UnificationTable::new(db); + for param in self.params(db) { + let var = table.new_var(param.kind(db)); + subst.insert(*param, var); + } + + let impl_def = self.impl_def(db); + let trait_ = self.trait_(db).apply_subst(db, &mut subst); + let ty = self.ty(db).apply_subst(db, &mut subst); + let params = self + .params(db) + .iter() + .map(|param| subst[param]) + .collect::>(); + + let implementor = Implementor::new(db, impl_def, trait_, ty, params); + + (implementor, table) + } + + fn does_conflict(self, db: &dyn HirAnalysisDb, other: &Self) -> bool { + if self.trait_def(db) != other.trait_def(db) { + return false; + } + + let (self_, mut table) = self.generalize(db); + for (&self_param, &other_param) in self_.params(db).iter().zip(other.params(db)) { + if !table.unify(self_param, other_param) { + return false; + } + } + + table.unify(self_.ty(db), other.ty(db)) + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::From)] +enum TraitImplDiag { + Ty(TyLowerDiag), + Trait(TraitLowerDiag), +} diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index 85b082ba52..e993bae1cb 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -15,7 +15,7 @@ use rustc_hash::FxHashMap; use crate::HirAnalysisDb; use super::{ - lower::{lower_hir_ty, GenericParamOwnerId}, + ty_lower::{lower_hir_ty, GenericParamOwnerId}, unify::InferenceKey, visitor::TyVisitor, }; diff --git a/crates/hir-analysis/src/ty/lower.rs b/crates/hir-analysis/src/ty/ty_lower.rs similarity index 96% rename from crates/hir-analysis/src/ty/lower.rs rename to crates/hir-analysis/src/ty/ty_lower.rs index f03d78416f..698ba0fa93 100644 --- a/crates/hir-analysis/src/ty/lower.rs +++ b/crates/hir-analysis/src/ty/ty_lower.rs @@ -34,11 +34,6 @@ pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { AdtTyBuilder::new(db, adt).build() } -#[salsa::tracked] -pub fn lower_trait(db: &dyn HirAnalysisDb, trait_: Trait) -> TraitDef { - TraitBuilder::new(db, trait_).build() -} - #[salsa::tracked(return_ref)] pub(crate) fn collect_generic_params( db: &dyn HirAnalysisDb, @@ -422,32 +417,6 @@ impl<'db> AdtTyBuilder<'db> { } } -struct TraitBuilder<'db> { - db: &'db dyn HirAnalysisDb, - trait_: Trait, - params: Vec, - self_args: TyId, - // TODO: We need to lower associated methods here. - // methods: Vec -} - -impl<'db> TraitBuilder<'db> { - fn new(db: &'db dyn HirAnalysisDb, trait_: Trait) -> Self { - let params_owner_id = GenericParamOwnerId::new(db, trait_.into()); - let params_set = collect_generic_params(db, params_owner_id); - Self { - db, - trait_, - params: params_set.params.clone(), - self_args: params_set.trait_self.unwrap(), - } - } - - fn build(self) -> TraitDef { - TraitDef::new(self.db, self.trait_, self.params, self.self_args) - } -} - #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct GenericParamTypeSet { pub(crate) params: Vec, diff --git a/crates/hir-analysis/src/ty/unify.rs b/crates/hir-analysis/src/ty/unify.rs index 9e36ff2626..1196003cc4 100644 --- a/crates/hir-analysis/src/ty/unify.rs +++ b/crates/hir-analysis/src/ty/unify.rs @@ -2,7 +2,7 @@ use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue}; use crate::HirAnalysisDb; -use super::ty::{Kind, Subst, TyData, TyId}; +use super::ty::{Kind, Subst, TyData, TyId, TyVar}; pub struct UnificationTable<'db> { db: &'db dyn HirAnalysisDb, @@ -10,6 +10,13 @@ pub struct UnificationTable<'db> { } impl<'db> UnificationTable<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { + db, + table: InPlaceUnificationTable::new(), + } + } + /// Returns true if the two types were unified. pub fn unify(&mut self, ty1: TyId, ty2: TyId) -> bool { let snapshot = self.table.snapshot(); @@ -23,7 +30,17 @@ impl<'db> UnificationTable<'db> { } } - pub fn new_key(&mut self, kind: Kind) -> InferenceKey { + pub fn new_var(&mut self, kind: &Kind) -> TyId { + let key = self.new_key(kind); + let ty_var = TyVar { + kind: kind.clone(), + key, + }; + + TyId::new(self.db, TyData::TyVar(ty_var)) + } + + pub fn new_key(&mut self, kind: &Kind) -> InferenceKey { self.table.new_key(InferenceValue::Unbounded(kind.clone())) } diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index b9587eece0..200d5f45d1 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -8,8 +8,8 @@ use crate::HirAnalysisDb; use super::{ diagnostics::TyLowerDiag, - lower::lower_hir_ty, ty::{AdtDef, InvalidCause, PrimTy, TyConcrete, TyData, TyId, TyParam, TyVar}, + ty_lower::lower_hir_ty, }; pub trait TyVisitor { From 97c7ccc8de486515d29a80a6d93806df5dee2c4a Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Mon, 18 Sep 2023 00:03:17 +0200 Subject: [PATCH 319/678] Implement impl trait lowering --- crates/driver2/src/lib.rs | 3 +- crates/hir-analysis/src/lib.rs | 1 + .../src/name_resolution/name_resolver.rs | 13 +- crates/hir-analysis/src/ty/adt_analysis.rs | 13 +- crates/hir-analysis/src/ty/diagnostics.rs | 108 +++++--- crates/hir-analysis/src/ty/mod.rs | 69 ++++-- crates/hir-analysis/src/ty/trait_.rs | 31 ++- crates/hir-analysis/src/ty/trait_lower.rs | 233 ++++++++++++++++-- crates/hir-analysis/src/ty/ty.rs | 18 +- crates/hir-analysis/src/ty/ty_lower.rs | 73 ++++-- crates/hir/src/hir_def/item.rs | 32 +++ crates/hir/src/hir_def/mod.rs | 10 +- crates/hir/src/lib.rs | 17 +- 13 files changed, 511 insertions(+), 110 deletions(-) diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs index 3695a72d4b..a0863c385a 100644 --- a/crates/driver2/src/lib.rs +++ b/crates/driver2/src/lib.rs @@ -17,7 +17,7 @@ use hir::{ }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, - ty::{TraitAnalysisPass, TypeAliasAnalysisPass, TypeDefAnalysisPass}, + ty::{ImplTraitAnalysisPass, TraitAnalysisPass, TypeAliasAnalysisPass, TypeDefAnalysisPass}, HirAnalysisDb, }; @@ -148,5 +148,6 @@ fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { pass_manager.add_module_pass(Box::new(TypeDefAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplTraitAnalysisPass::new(db))); pass_manager } diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs index bae9843f8e..6a612ee7bc 100644 --- a/crates/hir-analysis/src/lib.rs +++ b/crates/hir-analysis/src/lib.rs @@ -21,6 +21,7 @@ pub struct Jar( ty::ty_lower::GenericParamOwnerId, /// Trait lowering. ty::trait_lower::lower_trait, + ty::trait_lower::collect_trait_impl, /// ADT analysis. ty::adt_analysis::check_recursive_adt, ty::adt_analysis::analyze_adt, diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs index 043ceaa709..a3bfbc73d6 100644 --- a/crates/hir-analysis/src/name_resolution/name_resolver.rs +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -504,6 +504,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { } pub(crate) fn resolve_query(&mut self, query: NameQuery) -> NameResBucket { + let hir_db = self.db.as_hir_db(); // If the query is already resolved, return the cached result. if let Some(resolved) = self.cache_store.get(query) { return resolved.clone(); @@ -519,7 +520,7 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 1. Look for the name in the current scope. let mut found_scopes = FxHashSet::default(); - for edge in query.scope.edges(self.db.as_hir_db()) { + for edge in query.scope.edges(hir_db) { match edge.kind.propagate(&query) { PropagationResult::Terminated => { if found_scopes.insert(edge.dest) { @@ -577,16 +578,16 @@ impl<'db, 'a> NameResolver<'db, 'a> { // 5. Look for the name in the external ingots. query .scope - .top_mod(self.db.as_hir_db()) - .ingot(self.db.as_hir_db()) - .external_ingots(self.db.as_hir_db()) + .top_mod(hir_db) + .ingot(hir_db) + .external_ingots(hir_db) .iter() - .for_each(|(name, root_mod)| { + .for_each(|(name, ingot)| { if *name == query.name { // We don't care about the result of `push` because we assume ingots are // guaranteed to be unique. bucket.push(&NameRes::new_from_scope( - ScopeId::from_item((*root_mod).into()), + ScopeId::from_item((ingot.root_mod(hir_db)).into()), NameDomain::Type, NameDerivation::External, )) diff --git a/crates/hir-analysis/src/ty/adt_analysis.rs b/crates/hir-analysis/src/ty/adt_analysis.rs index 4b3ba25c40..50f3a1e1d9 100644 --- a/crates/hir-analysis/src/ty/adt_analysis.rs +++ b/crates/hir-analysis/src/ty/adt_analysis.rs @@ -10,7 +10,7 @@ use crate::{ty::diagnostics::AdtDefDiagAccumulator, HirAnalysisDb}; use super::{ diagnostics::TyLowerDiag, ty::{AdtDef, AdtRefId, TyId}, - ty_lower::{lower_adt, lower_hir_ty}, + ty_lower::{lower_adt, lower_hir_ty, lower_hir_ty_with_diag}, visitor::{walk_ty, TyDiagCollector, TyVisitor}, }; @@ -68,15 +68,8 @@ impl<'db> AdtDefAnalysisVisitor<'db> { impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, hir_ty: HirTyId) { - self.accumulated.extend(collect_ty_lower_diags( - self.db, - hir_ty, - ctxt.span().unwrap(), - self.scope, - )); - - // We don't call `walk_ty` to make sure that we don't visit ty - // recursively, which is visited by `collect_ty_lower_diags`. + self.accumulated + .extend(lower_hir_ty_with_diag(self.db, hir_ty, ctxt.span().unwrap(), self.scope).1); } fn visit_field_def(&mut self, ctxt: &mut VisitorCtxt<'_, LazyFieldDefSpan>, field: &FieldDef) { diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 2d877416b9..9f431329a4 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -3,9 +3,9 @@ use common::diagnostics::{ }; use hir::{ diagnostics::DiagnosticVoucher, - hir_def::{Trait, TypeAlias as HirTypeAlias}, + hir_def::{ImplTrait, Trait, TypeAlias as HirTypeAlias}, span::{DynLazySpan, LazySpan}, - HirDb, + HirDb, SpannedHirDb, }; use super::ty::Kind; @@ -121,7 +121,7 @@ impl TyLowerDiag { } } - fn sub_diags(&self, db: &dyn hir::SpannedHirDb) -> Vec { + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { match self { Self::NotFullyAppliedType(span) => vec![SubDiagnostic::new( LabelStyle::Primary, @@ -221,7 +221,7 @@ impl DiagnosticVoucher for TyLowerDiag { GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) } - fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic { let severity = self.severity(); let error_code = self.error_code(); let message = self.message(db.as_hir_db()); @@ -232,33 +232,31 @@ impl DiagnosticVoucher for TyLowerDiag { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TraitLowerDiag { +pub enum ImplTraitLowerDiag { ExternalTraitForExternalType(DynLazySpan), ConflictTraitImpl { - primary: DynLazySpan, - conflict_with: DynLazySpan, + primary: ImplTrait, + conflict_with: ImplTrait, }, +} - KindMismatch { - primary: DynLazySpan, - trait_def: Trait, - }, +impl ImplTraitLowerDiag { + pub fn external_trait_for_external_type(impl_trait: ImplTrait) -> Self { + Self::ExternalTraitForExternalType(impl_trait.lazy_span().trait_ref().into()) + } - TraitArgumentMismatch { - span: DynLazySpan, - trait_: Trait, - n_given_arg: usize, - }, -} + pub(super) fn conflict_impl(primary: ImplTrait, conflict_with: ImplTrait) -> Self { + Self::ConflictTraitImpl { + primary, + conflict_with, + } + } -impl TraitLowerDiag { fn local_code(&self) -> u16 { match self { Self::ExternalTraitForExternalType(_) => 0, Self::ConflictTraitImpl { .. } => 1, - Self::KindMismatch { .. } => 2, - Self::TraitArgumentMismatch { .. } => 3, } } @@ -269,12 +267,6 @@ impl TraitLowerDiag { } Self::ConflictTraitImpl { .. } => "conflict trait implementation".to_string(), - - Self::KindMismatch { .. } => "type doesn't satisfy required kind bound".to_string(), - - Self::TraitArgumentMismatch { .. } => { - "given trait argument number mismatch".to_string() - } } } @@ -293,15 +285,71 @@ impl TraitLowerDiag { SubDiagnostic::new( LabelStyle::Primary, "conflict trait implementation".to_string(), - primary.resolve(db), + primary.lazy_span().ty().resolve(db), ), SubDiagnostic::new( LabelStyle::Secondary, "conflict with this trait implementation".to_string(), - conflict_with.resolve(db), + conflict_with.lazy_span().ty().resolve(db), ), ], + } + } + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl DiagnosticVoucher for ImplTraitLowerDiag { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) + } + + fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TraitSatisfactionDiag { + KindMismatch { + primary: DynLazySpan, + trait_def: Trait, + }, + + TraitArgumentMismatch { + span: DynLazySpan, + trait_: Trait, + n_given_arg: usize, + }, +} + +impl TraitSatisfactionDiag { + fn local_code(&self) -> u16 { + match self { + Self::KindMismatch { .. } => 0, + Self::TraitArgumentMismatch { .. } => 1, + } + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + Self::KindMismatch { .. } => "type doesn't satisfy required kind bound".to_string(), + + Self::TraitArgumentMismatch { .. } => { + "given trait argument number mismatch".to_string() + } + } + } + + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { + match self { Self::KindMismatch { primary, trait_def } => vec![ SubDiagnostic::new( LabelStyle::Primary, @@ -345,12 +393,12 @@ impl TraitLowerDiag { } } -impl DiagnosticVoucher for TraitLowerDiag { +impl DiagnosticVoucher for TraitSatisfactionDiag { fn error_code(&self) -> GlobalErrorCode { GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) } - fn to_complete(&self, db: &dyn hir::SpannedHirDb) -> CompleteDiagnostic { + fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic { let severity = self.severity(); let error_code = self.error_code(); let message = self.message(db.as_hir_db()); diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index b650eca97a..e53c571842 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -1,14 +1,17 @@ +#![allow(unused)] use crate::HirAnalysisDb; -use hir::analysis_pass::ModuleAnalysisPass; +use hir::{analysis_pass::ModuleAnalysisPass, hir_def::TopLevelMod}; use rustc_hash::FxHashSet; use self::{ + adt_analysis::analyze_adt, diagnostics::{ - AdtDefDiagAccumulator, GenericParamDiagAccumulator, TyLowerDiag, + AdtDefDiagAccumulator, GenericParamDiagAccumulator, TraitSatisfactionDiag, TyLowerDiag, TypeAliasDefDiagAccumulator, }, + trait_lower::{collect_trait_impl, TraitImplDiag}, ty::AdtRefId, - ty_lower::GenericParamOwnerId, + ty_lower::{collect_generic_params, lower_type_alias, GenericParamOwnerId}, }; pub mod adt_analysis; @@ -34,7 +37,7 @@ impl<'db> TypeDefAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { fn run_on_module( &mut self, - top_mod: hir::hir_def::TopLevelMod, + top_mod: TopLevelMod, ) -> Vec> { let hir_db = self.db.as_hir_db(); let adts = top_mod @@ -55,11 +58,11 @@ impl<'db> ModuleAnalysisPass for TypeDefAnalysisPass<'db> { ); adts.map(|adt| { - adt_analysis::analyze_adt::accumulated::(self.db, adt) + analyze_adt::accumulated::(self.db, adt) .into_iter() .chain( if let Some(owner_id) = adt.generic_owner_id(self.db) { - ty_lower::collect_generic_params::accumulated::( + collect_generic_params::accumulated::( self.db, owner_id, ) } else { @@ -87,22 +90,20 @@ impl<'db> TypeAliasAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for TypeAliasAnalysisPass<'db> { fn run_on_module( &mut self, - top_mod: hir::hir_def::TopLevelMod, + top_mod: TopLevelMod, ) -> Vec> { let diags: FxHashSet = top_mod .all_type_aliases(self.db.as_hir_db()) .iter() .map(|&alias| { - ty_lower::lower_type_alias::accumulated::( - self.db, alias, - ) - .into_iter() - .chain( - ty_lower::lower_type_alias::accumulated::( - self.db, alias, + lower_type_alias::accumulated::(self.db, alias) + .into_iter() + .chain( + lower_type_alias::accumulated::( + self.db, alias, + ) + .into_iter(), ) - .into_iter(), - ) }) .flatten() .collect(); @@ -123,14 +124,14 @@ impl<'db> TraitAnalysisPass<'db> { impl<'db> ModuleAnalysisPass for TraitAnalysisPass<'db> { fn run_on_module( &mut self, - top_mod: hir::hir_def::TopLevelMod, + top_mod: TopLevelMod, ) -> Vec> { top_mod .all_traits(self.db.as_hir_db()) .iter() .map(|&trait_| { let owner_id = GenericParamOwnerId::new(self.db, trait_.into()); - ty_lower::collect_generic_params::accumulated::( + collect_generic_params::accumulated::( self.db, owner_id, ) }) @@ -139,3 +140,35 @@ impl<'db> ModuleAnalysisPass for TraitAnalysisPass<'db> { .collect() } } + +pub struct ImplTraitAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ImplTraitAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass for ImplTraitAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod, + ) -> Vec> { + let ingot = top_mod.ingot(self.db.as_hir_db()); + let (_, diags) = collect_trait_impl(self.db, ingot); + let Some(diags) = diags.get(&top_mod) else { + return Vec::new(); + }; + + diags + .iter() + .map(|diag| match diag { + TraitImplDiag::Ty(diag) => Box::new(diag.clone()) as _, + TraitImplDiag::Satisfaction(diag) => Box::new(diag.clone()) as _, + TraitImplDiag::TraitImplLower(diag) => Box::new(diag.clone()) as _, + }) + .collect() + } +} diff --git a/crates/hir-analysis/src/ty/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs index c66aaeefe6..323b7b351e 100644 --- a/crates/hir-analysis/src/ty/trait_.rs +++ b/crates/hir-analysis/src/ty/trait_.rs @@ -1,10 +1,13 @@ /// This module contains the logic for solving trait bounds. -use hir::hir_def::{ImplTrait, Trait}; +use hir::hir_def::{ImplTrait, IngotId, Trait}; use rustc_hash::FxHashMap; use crate::HirAnalysisDb; -use super::ty::{Subst, TyId}; +use super::{ + diagnostics::TraitSatisfactionDiag, + ty::{Kind, Subst, TyId}, +}; #[salsa::interned] pub(crate) struct Implementor { @@ -26,6 +29,19 @@ pub(crate) struct TraitImplTable { pub(crate) impls: FxHashMap>, } +impl TraitImplTable { + pub(crate) fn insert(&mut self, db: &dyn HirAnalysisDb, implementor: Implementor) { + self.impls + .entry(implementor.trait_def(db)) + .or_default() + .push(implementor); + } + + pub(crate) fn get(&self, trait_def: TraitDef) -> Option<&Vec> { + self.impls.get(&trait_def) + } +} + /// Represents an instantiated trait which is implemented to types. #[salsa::interned] pub(crate) struct TraitInstId { @@ -60,3 +76,14 @@ pub(crate) struct TraitDef { // TODO: we need to collect associated method types here. // methods: Vec } + +impl TraitDef { + pub(crate) fn expected_implementor_kind(self, db: &dyn HirAnalysisDb) -> &Kind { + self.self_arg(db).kind(db) + } + + pub(crate) fn ingot(self, db: &dyn HirAnalysisDb) -> IngotId { + let hir_db = db.as_hir_db(); + self.trait_(db).top_mod(hir_db).ingot(hir_db) + } +} diff --git a/crates/hir-analysis/src/ty/trait_lower.rs b/crates/hir-analysis/src/ty/trait_lower.rs index 3b2b196705..872af7e3f2 100644 --- a/crates/hir-analysis/src/ty/trait_lower.rs +++ b/crates/hir-analysis/src/ty/trait_lower.rs @@ -1,16 +1,26 @@ use std::collections::BTreeMap; -use hir::hir_def::{ImplTrait, TopLevelMod, Trait}; +use hir::{ + hir_def::{ + scope_graph::ScopeId, ImplTrait, IngotId, ItemKind, Partial, TopLevelMod, Trait, TraitRef, + }, + visitor::prelude::LazyPathTypeSpan, +}; use rustc_hash::FxHashMap; -use crate::HirAnalysisDb; +use crate::{ + name_resolution::{resolve_path_early, EarlyResolvedPath, NameDomain, NameResKind}, + ty::ty_lower::{lower_generic_arg_list_with_diag, lower_generic_arg_with_diag, lower_hir_ty}, + HirAnalysisDb, +}; use super::{ - diagnostics::{TraitLowerDiag, TyLowerDiag}, - trait_::{Implementor, TraitDef, TraitImplTable}, + diagnostics::{ImplTraitLowerDiag, TraitSatisfactionDiag, TyLowerDiag}, + trait_::{Implementor, TraitDef, TraitImplTable, TraitInstId}, ty::TyId, - ty_lower::{collect_generic_params, GenericParamOwnerId}, + ty_lower::{collect_generic_params, lower_hir_ty_with_diag, GenericParamOwnerId}, unify::UnificationTable, + visitor::TyDiagCollector, }; #[salsa::tracked] @@ -18,6 +28,60 @@ pub(crate) fn lower_trait(db: &dyn HirAnalysisDb, trait_: Trait) -> TraitDef { TraitBuilder::new(db, trait_).build() } +#[salsa::tracked(return_ref)] +pub(crate) fn collect_trait_impl( + db: &dyn HirAnalysisDb, + ingot: IngotId, +) -> (TraitImplTable, BTreeMap>) { + let dependent_impls = ingot + .external_ingots(db.as_hir_db()) + .iter() + .map(|(_, external)| &collect_trait_impl(db, *external).0) + .collect(); + + let mut collector = ImplementorCollector::new(db, dependent_impls); + collector.collect_impls(ingot.all_impl_trait(db.as_hir_db())); + collector.finalize() +} + +pub(super) fn lower_trait_ref( + db: &dyn HirAnalysisDb, + trait_ref: TraitRef, + ref_span: LazyPathTypeSpan, + scope: ScopeId, +) -> (Option, Vec) { + let hir_db = db.as_hir_db(); + let (args, mut diags) = lower_generic_arg_list_with_diag( + db, + trait_ref.generic_args, + ref_span.generic_args(), + scope, + ); + + let Partial::Present(path) = trait_ref.path else { + return (None, diags); + }; + + match resolve_path_early(db, path, scope) { + EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { + Ok(res) => { + let NameResKind::Scope(ScopeId::Item(ItemKind::Trait(trait_))) = res.kind else { + return (None, diags); + }; + let trait_def = lower_trait(db, trait_); + (Some(TraitInstId::new(db, trait_def, args)), diags) + } + + Err(_) => (None, diags), + }, + + EarlyResolvedPath::Partial { .. } => { + diags.push(TyLowerDiag::AssocTy(ref_span.path_moved().into()).into()); + (None, diags) + } + } +} + struct TraitBuilder<'db> { db: &'db dyn HirAnalysisDb, trait_: Trait, @@ -48,34 +112,142 @@ impl<'db> TraitBuilder<'db> { struct ImplementorCollector<'db> { db: &'db dyn HirAnalysisDb, impl_table: TraitImplTable, - diags: BTreeMap, + dependent_impl_tables: Vec<&'db TraitImplTable>, + diags: BTreeMap>, } impl<'db> ImplementorCollector<'db> { - fn new(db: &'db dyn HirAnalysisDb) -> Self { + fn new(db: &'db dyn HirAnalysisDb, dependent_impl_tables: Vec<&'db TraitImplTable>) -> Self { Self { db, impl_table: TraitImplTable::default(), + dependent_impl_tables, diags: BTreeMap::new(), } } + fn finalize(self) -> (TraitImplTable, BTreeMap>) { + (self.impl_table, self.diags) + } + fn collect_impls(&mut self, impls: &[ImplTrait]) { - todo!() + for &impl_ in impls { + let Some(implementor) = self.lower_impl(impl_) else { + continue; + }; + + if let Some(conflict_with) = self.does_conflict(implementor) { + let diag = ImplTraitLowerDiag::conflict_impl( + implementor.impl_def(self.db), + conflict_with.impl_def(self.db), + ); + self.push_diag(impl_, diag); + } else { + self.impl_table.insert(self.db, implementor); + } + } + } + + fn lower_impl(&mut self, impl_: ImplTrait) -> Option { + let ty = self.lower_implementor_ty(impl_)?; + let trait_ = self.instantiate_trait(impl_, ty)?; + let impl_trait_ingot = impl_ + .top_mod(self.db.as_hir_db()) + .ingot(self.db.as_hir_db()); + + if Some(impl_trait_ingot) != ty.ingot(self.db) + && impl_trait_ingot != trait_.def(self.db).ingot(self.db) + { + let diag = ImplTraitLowerDiag::external_trait_for_external_type(impl_); + self.push_diag(impl_, diag); + return None; + } + + let param_owner = GenericParamOwnerId::new(self.db, impl_.into()); + let params = collect_generic_params(self.db, param_owner); + Some(Implementor::new( + self.db, + impl_, + trait_, + ty, + params.params.clone(), + )) + } + + fn lower_implementor_ty(&mut self, impl_: ImplTrait) -> Option { + let hir_ty = impl_.ty(self.db.as_hir_db()).to_opt()?; + let scope = impl_.scope(); + let (ty, diags) = lower_hir_ty_with_diag(self.db, hir_ty, impl_.lazy_span().ty(), scope); + if diags.is_empty() { + Some(ty) + } else { + for diag in diags { + self.push_diag(impl_, diag); + } + None + } + } + + fn instantiate_trait( + &mut self, + impl_trait: ImplTrait, + implementor_ty: TyId, + ) -> Option { + let trait_ref = impl_trait.trait_ref(self.db.as_hir_db()).to_opt()?; + let (trait_inst, diags) = lower_trait_ref( + self.db, + trait_ref, + impl_trait.lazy_span().trait_ref_moved(), + impl_trait.scope(), + ); + for diag in diags { + self.push_diag(impl_trait, diag); + } + + let trait_inst = trait_inst?; + if implementor_ty.kind(self.db) + == trait_inst.def(self.db).expected_implementor_kind(self.db) + { + Some(trait_inst) + } else { + let diag = TraitSatisfactionDiag::KindMismatch { + primary: impl_trait.lazy_span().ty_moved().into(), + trait_def: trait_inst.def(self.db).trait_(self.db), + }; + self.push_diag(impl_trait, diag); + None + } + } + + fn does_conflict(&mut self, implementor: Implementor) -> Option { + let def = implementor.trait_def(self.db); + for &already_implemented in self.impl_table.get(def)? { + let mut table = UnificationTable::new(self.db); + if already_implemented.does_conflict(self.db, implementor, &mut table) { + return Some(already_implemented); + } + } + + None + } + + fn get_implementors_for(&mut self, def: TraitDef) -> impl Iterator + '_ { + self.dependent_impl_tables + .iter() + .filter_map(move |table| table.get(def).map(|impls| impls.iter().copied())) + .flatten() + .chain(self.impl_table.get(def).into_iter().flatten().copied()) } fn push_diag(&mut self, impl_: ImplTrait, diag: impl Into) { let top_mod = impl_.top_mod(self.db.as_hir_db()); - self.diags - .entry(top_mod) - .or_insert_with(|| TraitImplDiag::from(diag.into())); + self.diags.entry(top_mod).or_default().push(diag.into()); } } impl Implementor { - fn generalize<'db>(self, db: &'db dyn HirAnalysisDb) -> (Self, UnificationTable<'db>) { + fn generalize<'db>(self, db: &'db dyn HirAnalysisDb, table: &mut UnificationTable) -> Self { let mut subst = FxHashMap::default(); - let mut table = UnificationTable::new(db); for param in self.params(db) { let var = table.new_var(param.kind(db)); subst.insert(*param, var); @@ -92,27 +264,48 @@ impl Implementor { let implementor = Implementor::new(db, impl_def, trait_, ty, params); - (implementor, table) + implementor } - fn does_conflict(self, db: &dyn HirAnalysisDb, other: &Self) -> bool { + fn does_conflict( + self, + db: &dyn HirAnalysisDb, + other: Self, + table: &mut UnificationTable, + ) -> bool { if self.trait_def(db) != other.trait_def(db) { return false; } - let (self_, mut table) = self.generalize(db); - for (&self_param, &other_param) in self_.params(db).iter().zip(other.params(db)) { + let generalized = self.generalize(db, table); + for (&self_param, &other_param) in generalized.params(db).iter().zip(other.params(db)) { if !table.unify(self_param, other_param) { return false; } } - table.unify(self_.ty(db), other.ty(db)) + table.unify(generalized.ty(db), other.ty(db)) + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::From)] +pub(crate) enum TraitImplDiag { + Ty(TyLowerDiag), + Satisfaction(TraitSatisfactionDiag), + TraitImplLower(ImplTraitLowerDiag), +} + +impl From for TraitImplDiag { + fn from(diag: TraitRefLowerDiag) -> Self { + match diag { + TraitRefLowerDiag::Ty(diag) => TraitImplDiag::Ty(diag), + TraitRefLowerDiag::TraitSatisfactionDiag(diag) => TraitImplDiag::Satisfaction(diag), + } } } #[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::From)] -enum TraitImplDiag { +pub(super) enum TraitRefLowerDiag { Ty(TyLowerDiag), - Trait(TraitLowerDiag), + TraitSatisfactionDiag(TraitSatisfactionDiag), } diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index e993bae1cb..e39ba9e139 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -5,7 +5,7 @@ use hir::{ kw, prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, scope_graph::ScopeId, - Contract, Enum, IdentId, ItemKind, Partial, Struct, TypeAlias as HirTypeAlias, + Contract, Enum, IdentId, IngotId, ItemKind, Partial, Struct, TypeAlias as HirTypeAlias, TypeId as HirTyId, }, span::DynLazySpan, @@ -37,6 +37,13 @@ impl TyId { } } + pub fn ingot(self, db: &dyn HirAnalysisDb) -> Option { + match self.data(db) { + TyData::TyCon(TyConcrete::Adt(adt)) => adt.ingot(db).into(), + _ => None, + } + } + pub fn invalid_cause(self, db: &dyn HirAnalysisDb) -> Option { match self.data(db) { TyData::Invalid(cause) => Some(cause), @@ -194,6 +201,15 @@ impl AdtDef { .into(), } } + + pub fn ingot(self, db: &dyn HirAnalysisDb) -> IngotId { + let hir_db = db.as_hir_db(); + match self.adt_ref(db).data(db) { + AdtRef::Enum(e) => e.top_mod(hir_db).ingot(hir_db), + AdtRef::Struct(s) => s.top_mod(hir_db).ingot(hir_db), + AdtRef::Contract(c) => c.top_mod(hir_db).ingot(hir_db), + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir-analysis/src/ty/ty_lower.rs b/crates/hir-analysis/src/ty/ty_lower.rs index 698ba0fa93..b77113af9a 100644 --- a/crates/hir-analysis/src/ty/ty_lower.rs +++ b/crates/hir-analysis/src/ty/ty_lower.rs @@ -29,6 +29,17 @@ pub fn lower_hir_ty(db: &dyn HirAnalysisDb, ty: HirTyId, scope: ScopeId) -> TyId TyBuilder::new(db, scope).lower_ty(ty) } +pub(crate) fn lower_hir_ty_with_diag( + db: &dyn HirAnalysisDb, + hir_ty: HirTyId, + hir_ty_span: LazyTySpan, + scope: ScopeId, +) -> (TyId, Vec) { + let mut collector = TyDiagCollector::new(db, scope); + let diags = collector.collect(hir_ty, hir_ty_span); + (lower_hir_ty(db, hir_ty, scope), diags) +} + #[salsa::tracked] pub fn lower_adt(db: &dyn HirAnalysisDb, adt: AdtRefId) -> AdtDef { AdtTyBuilder::new(db, adt).build() @@ -183,7 +194,7 @@ impl<'db> TyBuilder<'db> { let arg_tys: Vec<_> = args .data(self.db.as_hir_db()) .iter() - .map(|arg| self.lower_generic_arg(arg)) + .map(|arg| lower_generic_arg(self.db, arg, self.scope)) .collect(); match path_ty { @@ -235,7 +246,7 @@ impl<'db> TyBuilder<'db> { let db = self.db; args.data(self.db.as_hir_db()) .iter() - .map(|arg| self.lower_generic_arg(arg)) + .map(|arg| lower_generic_arg(self.db, arg, self.scope)) .fold(target_ty, |acc, arg| TyId::app(db, acc, arg)) } @@ -307,18 +318,6 @@ impl<'db> TyBuilder<'db> { } } - fn lower_generic_arg(&mut self, arg: &GenericArg) -> TyId { - match arg { - GenericArg::Type(ty_arg) => ty_arg - .ty - .to_opt() - .map(|ty| lower_hir_ty(self.db, ty, self.scope)) - .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), - - GenericArg::Const(_) => todo!(), - } - } - /// If the path is resolved to a type, return the resolution. Otherwise, /// returns the `TyId::Invalid` with proper `InvalidCause`. fn resolve_path(&mut self, path: PathId) -> Either { @@ -337,6 +336,52 @@ impl<'db> TyBuilder<'db> { } } +pub(super) fn lower_generic_arg(db: &dyn HirAnalysisDb, arg: &GenericArg, scope: ScopeId) -> TyId { + match arg { + GenericArg::Type(ty_arg) => ty_arg + .ty + .to_opt() + .map(|ty| lower_hir_ty(db, ty, scope)) + .unwrap_or_else(|| TyId::invalid(db, InvalidCause::Other)), + + GenericArg::Const(_) => todo!(), + } +} + +pub(super) fn lower_generic_arg_list_with_diag( + db: &dyn HirAnalysisDb, + generic_args: GenericArgListId, + args_span: LazyGenericArgListSpan, + scope: ScopeId, +) -> (Vec, Vec) { + let mut diags = vec![]; + let mut args = vec![]; + for (i, arg) in generic_args.data(db.as_hir_db()).iter().enumerate() { + let (ty, arg_diags) = lower_generic_arg_with_diag(db, arg, args_span.arg(i), scope); + args.push(ty); + diags.extend(arg_diags); + } + + (args, diags) +} + +pub(super) fn lower_generic_arg_with_diag( + db: &dyn HirAnalysisDb, + arg: &GenericArg, + arg_span: LazyGenericArgSpan, + scope: ScopeId, +) -> (TyId, Vec) { + match arg { + GenericArg::Type(ty_arg) => ty_arg + .ty + .to_opt() + .map(|ty| lower_hir_ty_with_diag(db, ty, arg_span.into_type_arg().ty_moved(), scope)) + .unwrap_or_else(|| (TyId::invalid(db, InvalidCause::Other), vec![])), + + GenericArg::Const(_) => todo!(), + } +} + struct AdtTyBuilder<'db> { db: &'db dyn HirAnalysisDb, adt: AdtRefId, diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index ffbe23329b..1ef778ef25 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -405,6 +405,27 @@ impl TopLevelMod { pub fn all_traits<'db>(self, db: &'db dyn HirDb) -> &'db Vec { all_traits_in_top_mod(db, self) } + + /// Returns all traits in the top level module including ones in nested + /// modules. + pub fn all_impl_traits<'db>(self, db: &'db dyn HirDb) -> &'db Vec { + all_impl_trait_in_top_mod(db, self) + } +} + +#[salsa::tracked(return_ref)] +pub fn all_top_mod_in_ingot(db: &dyn HirDb, ingot: IngotId) -> Vec { + let tree = ingot.module_tree(db); + tree.all_modules().collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_impl_trait_in_ingot(db: &dyn HirDb, ingot: IngotId) -> Vec { + ingot + .all_modules(db) + .iter() + .flat_map(|top_mod| top_mod.all_impl_traits(db).iter().copied()) + .collect() } #[salsa::tracked(return_ref)] @@ -467,6 +488,17 @@ pub fn all_traits_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec .collect() } +#[salsa::tracked(return_ref)] +pub fn all_impl_trait_in_top_mod(db: &dyn HirDb, top_mod: TopLevelMod) -> Vec { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::ImplTrait(impl_trait) => Some(*impl_trait), + _ => None, + }) + .collect() +} + #[salsa::tracked] pub struct Mod { #[id] diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs index 633f8dbb9b..a344a0418b 100644 --- a/crates/hir/src/hir_def/mod.rs +++ b/crates/hir/src/hir_def/mod.rs @@ -43,17 +43,25 @@ impl IngotId { module_tree_impl(db, self.inner(db)) } + pub fn all_modules(self, db: &dyn HirDb) -> &Vec { + all_top_mod_in_ingot(db, self) + } + pub fn root_mod(self, db: &dyn HirDb) -> TopLevelMod { self.module_tree(db).root_data().top_mod } - pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, TopLevelMod)] { + pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, IngotId)] { external_ingots_impl(db, self.inner(db)).as_slice() } pub fn kind(self, db: &dyn HirDb) -> IngotKind { self.inner(db).kind(db.as_input_db()) } + + pub fn all_impl_trait(self, db: &dyn HirDb) -> &Vec { + all_impl_trait_in_ingot(db, self) + } } #[salsa::interned] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 66d9370042..888ed3af06 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1,6 +1,6 @@ use analysis_pass::ModuleAnalysisPass; use common::{InputDb, InputIngot}; -use hir_def::{module_tree_impl, IdentId, TopLevelMod}; +use hir_def::{module_tree_impl, IdentId, IngotId, TopLevelMod}; pub use lower::parse::ParserError; use lower::{ map_file_to_mod_impl, @@ -50,12 +50,15 @@ pub struct Jar( hir_def::TupleTypeId, hir_def::UsePathId, /// Utility methods for analysis. + hir_def::all_top_mod_in_ingot, + hir_def::all_impl_trait_in_ingot, hir_def::all_items_in_top_mod, hir_def::all_structs_in_top_mod, hir_def::all_enums_in_top_mod, hir_def::all_traits_in_top_mod, hir_def::all_contracts_in_top_mod, hir_def::all_type_aliases_in_top_mod, + hir_def::all_impl_trait_in_top_mod, /// Accumulated diagnostics. ParseErrorAccumulator, /// Private tracked functions. These are not part of the public API, and @@ -108,15 +111,15 @@ impl<'db> ModuleAnalysisPass for ParsingPass<'db> { // The reason why this function is not a public API is that we want to prohibit users of `HirDb` to // access `InputIngot` directly. #[salsa::tracked(return_ref)] -pub(crate) fn external_ingots_impl( - db: &dyn HirDb, - ingot: InputIngot, -) -> Vec<(IdentId, TopLevelMod)> { +pub(crate) fn external_ingots_impl(db: &dyn HirDb, ingot: InputIngot) -> Vec<(IdentId, IngotId)> { let mut res = Vec::new(); for dep in ingot.external_ingots(db.as_input_db()) { let name = IdentId::new(db, dep.name.to_string()); - let root = module_tree_impl(db, dep.ingot).root_data().top_mod; - res.push((name, root)) + let ingot = module_tree_impl(db, dep.ingot) + .root_data() + .top_mod + .ingot(db); + res.push((name, ingot)) } res } From d4b28f8540124d23efce318f5038bf2065d89cb6 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Wed, 20 Sep 2023 22:46:35 +0200 Subject: [PATCH 320/678] Remove `TraitBound` syntax kind and generalize them into `TraitRef` --- .../hir-analysis/src/name_resolution/mod.rs | 22 +++++-- crates/hir-analysis/src/ty/adt_analysis.rs | 13 ++-- crates/hir-analysis/src/ty/trait_lower.rs | 15 +++-- crates/hir-analysis/src/ty/ty.rs | 32 +++++++--- crates/hir-analysis/src/ty/ty_lower.rs | 24 ++++--- crates/hir/src/hir_def/item.rs | 2 +- crates/hir/src/hir_def/params.rs | 6 +- crates/hir/src/hir_def/types.rs | 14 +++-- crates/hir/src/lower/params.rs | 14 +---- crates/hir/src/lower/types.rs | 10 +-- crates/hir/src/span/item.rs | 7 ++- crates/hir/src/span/mod.rs | 2 +- crates/hir/src/span/params.rs | 6 +- crates/hir/src/visitor.rs | 62 +++++-------------- crates/parser2/src/ast/item.rs | 8 +-- crates/parser2/src/ast/param.rs | 8 +-- crates/parser2/src/parser/item.rs | 10 +-- crates/parser2/src/parser/param.rs | 8 +-- crates/parser2/src/syntax_kind.rs | 4 +- .../test_files/error_recovery/items/func.snap | 6 +- .../error_recovery/items/impl_.snap | 2 +- .../error_recovery/items/impl_trait.snap | 10 +-- .../error_recovery/items/struct_.snap | 2 +- .../error_recovery/items/trait_.snap | 6 +- .../test_files/syntax_node/items/enums.snap | 12 ++-- .../test_files/syntax_node/items/func.snap | 6 +- .../test_files/syntax_node/items/impl.snap | 8 +-- .../syntax_node/items/impl_trait.snap | 18 +++--- .../test_files/syntax_node/items/trait.snap | 24 +++---- .../syntax_node/structs/generics.snap | 26 ++++---- 30 files changed, 192 insertions(+), 195 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs index 38b4fd270f..f650080d53 100644 --- a/crates/hir-analysis/src/name_resolution/mod.rs +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -16,8 +16,8 @@ use hir::{ analysis_pass::ModuleAnalysisPass, diagnostics::DiagnosticVoucher, hir_def::{ - scope_graph::ScopeId, Expr, ExprId, IdentId, IngotId, ItemKind, Partial, Pat, PatId, - PathId, TopLevelMod, TypeBound, TypeId, + scope_graph::ScopeId, Expr, ExprId, GenericArgListId, IdentId, IngotId, ItemKind, Partial, + Pat, PatId, PathId, TopLevelMod, TraitRef, TypeId, }, visitor::prelude::*, }; @@ -361,13 +361,13 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { self.path_ctxt.pop(); } - fn visit_type_bound( + fn visit_trait_ref( &mut self, - ctxt: &mut VisitorCtxt<'_, LazyTypeBoundSpan>, - bound: &TypeBound, + ctxt: &mut VisitorCtxt<'_, LazyTraitRefSpan>, + trait_ref: TraitRef, ) { self.path_ctxt.push(ExpectedPathKind::Trait); - walk_type_bound(self, ctxt, bound); + walk_trait_ref(self, ctxt, trait_ref); self.path_ctxt.pop(); } @@ -401,6 +401,16 @@ impl<'db, 'a> Visitor for EarlyPathVisitor<'db, 'a> { walk_generic_param(self, ctxt, param); } + fn visit_generic_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'_, LazyGenericArgListSpan>, + args: GenericArgListId, + ) { + self.path_ctxt.push(ExpectedPathKind::Type); + walk_generic_arg_list(self, ctxt, args); + self.path_ctxt.pop(); + } + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'_, LazyTySpan>, ty: TypeId) { self.path_ctxt.push(ExpectedPathKind::Type); walk_ty(self, ctxt, ty); diff --git a/crates/hir-analysis/src/ty/adt_analysis.rs b/crates/hir-analysis/src/ty/adt_analysis.rs index 50f3a1e1d9..43ce4dbc88 100644 --- a/crates/hir-analysis/src/ty/adt_analysis.rs +++ b/crates/hir-analysis/src/ty/adt_analysis.rs @@ -1,5 +1,5 @@ use hir::{ - hir_def::{scope_graph::ScopeId, FieldDef, TypeId as HirTyId}, + hir_def::{scope_graph::ScopeId, FieldDef, TypeId as HirTyId, VariantKind}, visitor::prelude::*, }; use rustc_hash::FxHashSet; @@ -85,8 +85,9 @@ impl<'db> Visitor for AdtDefAnalysisVisitor<'db> { ctxt: &mut VisitorCtxt<'_, LazyVariantDefSpan>, variant: &hir::hir_def::VariantDef, ) { - if let Some(ty) = variant.ty { - self.verify_fully_applied(ty, ctxt.span().unwrap().ty().into()); + if let VariantKind::Tuple(tuple_id) = variant.kind { + let ty = tuple_id.to_ty(self.db.as_hir_db()); + self.verify_fully_applied(ty, ctxt.span().unwrap().tuple_type().into()); } walk_variant_def(self, ctxt, variant); @@ -114,13 +115,13 @@ fn check_recursive_adt_impl( .collect(); let adt_def = lower_adt(db, adt); - for (i, field) in adt_def.fields(db).iter().enumerate() { - for ty in field.iter_types(db) { + for (field_idx, field) in adt_def.fields(db).iter().enumerate() { + for (ty_idx, ty) in field.iter_types(db).enumerate() { for field_adt_ref in ty.collect_direct_adts(db) { if participants.contains(&field_adt_ref) && participants.contains(&adt) { let diag = TyLowerDiag::recursive_type( adt.name_span(db), - adt_def.variant_ty_span(db, i), + adt_def.variant_ty_span(db, field_idx, ty_idx), ); return Some(diag); } diff --git a/crates/hir-analysis/src/ty/trait_lower.rs b/crates/hir-analysis/src/ty/trait_lower.rs index 872af7e3f2..6e17619d07 100644 --- a/crates/hir-analysis/src/ty/trait_lower.rs +++ b/crates/hir-analysis/src/ty/trait_lower.rs @@ -4,7 +4,7 @@ use hir::{ hir_def::{ scope_graph::ScopeId, ImplTrait, IngotId, ItemKind, Partial, TopLevelMod, Trait, TraitRef, }, - visitor::prelude::LazyPathTypeSpan, + visitor::prelude::{LazyPathTypeSpan, LazyTraitRefSpan}, }; use rustc_hash::FxHashMap; @@ -47,16 +47,15 @@ pub(crate) fn collect_trait_impl( pub(super) fn lower_trait_ref( db: &dyn HirAnalysisDb, trait_ref: TraitRef, - ref_span: LazyPathTypeSpan, + ref_span: LazyTraitRefSpan, scope: ScopeId, ) -> (Option, Vec) { let hir_db = db.as_hir_db(); - let (args, mut diags) = lower_generic_arg_list_with_diag( - db, - trait_ref.generic_args, - ref_span.generic_args(), - scope, - ); + let (args, mut diags) = if let Some(args) = trait_ref.generic_args { + lower_generic_arg_list_with_diag(db, args, ref_span.generic_args(), scope) + } else { + (vec![], vec![]) + }; let Partial::Present(path) = trait_ref.path else { return (None, diags); diff --git a/crates/hir-analysis/src/ty/ty.rs b/crates/hir-analysis/src/ty/ty.rs index e39ba9e139..5115284575 100644 --- a/crates/hir-analysis/src/ty/ty.rs +++ b/crates/hir-analysis/src/ty/ty.rs @@ -6,7 +6,7 @@ use hir::{ prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, scope_graph::ScopeId, Contract, Enum, IdentId, IngotId, ItemKind, Partial, Struct, TypeAlias as HirTypeAlias, - TypeId as HirTyId, + TypeId as HirTyId, VariantKind, }, span::DynLazySpan, }; @@ -177,26 +177,38 @@ pub struct AdtDef { } impl AdtDef { - pub fn variant_ty_span(self, db: &dyn HirAnalysisDb, idx: usize) -> DynLazySpan { + pub fn variant_ty_span( + self, + db: &dyn HirAnalysisDb, + field_idx: usize, + ty_idx: usize, + ) -> DynLazySpan { match self.adt_ref(db).data(db) { - AdtRef::Enum(e) => e - .lazy_span() - .variants_moved() - .variant_moved(idx) - .ty_moved() - .into(), + AdtRef::Enum(e) => { + let span = e.lazy_span().variants_moved().variant_moved(field_idx); + match e.variants(db.as_hir_db()).data(db.as_hir_db())[field_idx].kind { + VariantKind::Tuple(_) => { + debug_assert!(ty_idx == 0); + span.tuple_type_moved().into() + } + VariantKind::Record(_) => { + span.fields_moved().field_moved(ty_idx).ty_moved().into() + } + VariantKind::Unit => unreachable!(), + } + } AdtRef::Struct(s) => s .lazy_span() .fields_moved() - .field_moved(idx) + .field_moved(field_idx) .ty_moved() .into(), AdtRef::Contract(c) => c .lazy_span() .fields_moved() - .field_moved(idx) + .field_moved(field_idx) .ty_moved() .into(), } diff --git a/crates/hir-analysis/src/ty/ty_lower.rs b/crates/hir-analysis/src/ty/ty_lower.rs index b77113af9a..22ccdfd918 100644 --- a/crates/hir-analysis/src/ty/ty_lower.rs +++ b/crates/hir-analysis/src/ty/ty_lower.rs @@ -3,8 +3,8 @@ use hir::{ hir_def::{ scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, Trait, - TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, - WherePredicate, + TupleTypeId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, + VariantDefListId, VariantKind, WherePredicate, }, visitor::prelude::*, }; @@ -174,7 +174,7 @@ impl<'db> TyBuilder<'db> { HirTyKind::SelfType(args) => self.lower_self_ty(*args), - HirTyKind::Tuple(elems) => self.lower_tuple(elems), + HirTyKind::Tuple(tuple_id) => self.lower_tuple(*tuple_id), HirTyKind::Array(_, _) => { todo!() @@ -260,7 +260,8 @@ impl<'db> TyBuilder<'db> { TyId::app(self.db, ptr, pointee) } - fn lower_tuple(&mut self, elems: &[Partial]) -> TyId { + fn lower_tuple(&mut self, tuple_id: TupleTypeId) -> TyId { + let elems = tuple_id.data(self.db.as_hir_db()); let len = elems.len(); let tuple = TyId::tuple(self.db, len); elems.iter().fold(tuple, |acc, elem| { @@ -449,11 +450,18 @@ impl<'db> AdtTyBuilder<'db> { .iter() .for_each(|variant| { // TODO: FIX here when record variant is introduced. - let tys = match variant.ty { - Some(ty) => { - vec![Some(ty).into()] + let tys = match variant.kind { + VariantKind::Tuple(tuple_id) => { + vec![Partial::Present(tuple_id.to_ty(self.db.as_hir_db()))] } - None => vec![], + + VariantKind::Record(fields) => fields + .data(self.db.as_hir_db()) + .iter() + .map(|field| field.ty) + .collect(), + + VariantKind::Unit => vec![], }; let variant = AdtField::new(variant.name, tys, scope); diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs index 1ef778ef25..deeb91ba69 100644 --- a/crates/hir/src/hir_def/item.rs +++ b/crates/hir/src/hir_def/item.rs @@ -904,7 +904,7 @@ pub struct VariantDef { pub kind: VariantKind, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum VariantKind { Unit, Tuple(TupleTypeId), diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs index 8452a26fb0..4dc5199f46 100644 --- a/crates/hir/src/hir_def/params.rs +++ b/crates/hir/src/hir_def/params.rs @@ -132,12 +132,12 @@ impl FuncParamName { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TypeBound { - Trait(TraitBound), + Trait(TraitRef), Kind(Partial), } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct TraitBound { +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct TraitRef { /// The path to the trait. pub path: Partial, /// The type arguments of the trait. diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs index 2ce79bd1fa..32a4e3b44f 100644 --- a/crates/hir/src/hir_def/types.rs +++ b/crates/hir/src/hir_def/types.rs @@ -1,3 +1,5 @@ +use crate::HirDb; + use super::{Body, GenericArgListId, Partial, PathId}; #[salsa::interned] @@ -19,14 +21,14 @@ pub enum TypeKind { Array(Partial, Partial), } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct TraitRef { - pub path: Partial, - pub generic_args: GenericArgListId, -} - #[salsa::interned] pub struct TupleTypeId { #[return_ref] pub data: Vec>, } + +impl TupleTypeId { + pub fn to_ty(self, db: &dyn HirDb) -> TypeId { + TypeId::new(db, TypeKind::Tuple(self)) + } +} diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs index af6cb159b4..0ff0470603 100644 --- a/crates/hir/src/lower/params.rs +++ b/crates/hir/src/lower/params.rs @@ -1,6 +1,6 @@ use parser::ast::{self}; -use crate::hir_def::{kw, params::*, Body, IdentId, Partial, PathId, TypeId}; +use crate::hir_def::{kw, params::*, Body, IdentId, Partial, TypeId}; use super::FileLowerCtxt; @@ -175,23 +175,13 @@ impl WherePredicate { impl TypeBound { fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TypeBound) -> Self { if let Some(trait_bound) = ast.trait_bound() { - Self::Trait(TraitBound::lower_ast(ctxt, trait_bound)) + Self::Trait(TraitRef::lower_ast(ctxt, trait_bound)) } else { Self::Kind(KindBound::lower_ast_opt(ctxt, ast.kind_bound())) } } } -impl TraitBound { - fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TraitBound) -> Self { - let path = ast.path().map(|ast| PathId::lower_ast(ctxt, ast)).into(); - let generic_args = ast - .generic_args() - .map(|args| GenericArgListId::lower_ast(ctxt, args)); - Self { path, generic_args } - } -} - impl KindBound { fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Partial { let Some(ast) = ast else { diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs index e39b30cee9..4a2a4c3e7d 100644 --- a/crates/hir/src/lower/types.rs +++ b/crates/hir/src/lower/types.rs @@ -59,15 +59,17 @@ impl TupleTypeId { } impl TraitRef { - pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::PathType) -> Self { - let path = PathId::lower_ast_partial(ctxt, ast.path()); - let generic_args = GenericArgListId::lower_ast_opt(ctxt, ast.generic_args()); + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'_>, ast: ast::TraitRef) -> Self { + let path = ast.path().map(|ast| PathId::lower_ast(ctxt, ast)).into(); + let generic_args = ast + .generic_args() + .map(|args| GenericArgListId::lower_ast(ctxt, args)); Self { path, generic_args } } pub(super) fn lower_ast_partial( ctxt: &mut FileLowerCtxt<'_>, - ast: Option, + ast: Option, ) -> Partial { ast.map(|ast| Self::lower_ast(ctxt, ast)).into() } diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs index eb7ada4583..cabc0aafe0 100644 --- a/crates/hir/src/span/item.rs +++ b/crates/hir/src/span/item.rs @@ -6,6 +6,7 @@ use crate::{ Trait, TypeAlias, Use, }, span::{ + params::LazyTraitRefSpan, transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, use_tree::LazyUsePathSpan, DesugaredOrigin, DesugaredUseFocus, @@ -17,7 +18,7 @@ use super::{ define_lazy_span_node, params::{LazyFuncParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, transition::SpanTransitionChain, - types::{LazyPathTypeSpan, LazyTupleTypeSpan, LazyTySpan}, + types::{LazyTupleTypeSpan, LazyTySpan}, use_tree::LazyUseAliasSpan, }; @@ -154,7 +155,7 @@ define_lazy_span_node!( LazySuperTraitListSpan, ast::SuperTraitList, @idx { - (super_trait, LazyPathTypeSpan), + (super_trait, LazyTraitRefSpan), } ); @@ -166,7 +167,7 @@ define_lazy_span_node!( (attributes, attr_list, LazyAttrListSpan), (generic_params, generic_params, LazyGenericParamListSpan), (where_clause, where_clause, LazyWhereClauseSpan), - (trait_ref, trait_ref, LazyPathTypeSpan), + (trait_ref, trait_ref, LazyTraitRefSpan), (ty, ty, LazyTySpan), } ); diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs index 5b195539f1..5f7e8d722a 100644 --- a/crates/hir/src/span/mod.rs +++ b/crates/hir/src/span/mod.rs @@ -49,7 +49,7 @@ pub mod lazy_spans { pub use super::params::{ LazyConstGenericParamSpan, LazyFuncParamListSpan, LazyFuncParamSpan, LazyGenericArgListSpan, LazyGenericArgSpan, LazyGenericParamListSpan, LazyGenericParamSpan, - LazyKindBoundSpan, LazyTraitBoundSpan, LazyTypeBoundListSpan, LazyTypeBoundSpan, + LazyKindBoundSpan, LazyTraitRefSpan, LazyTypeBoundListSpan, LazyTypeBoundSpan, LazyTypeGenericArgSpan, LazyWhereClauseSpan, LazyWherePredicateSpan, }; diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs index 680379810b..e8794be91b 100644 --- a/crates/hir/src/span/params.rs +++ b/crates/hir/src/span/params.rs @@ -122,14 +122,14 @@ define_lazy_span_node!( LazyTypeBoundSpan, ast::TypeBound, @node { - (trait_bound, trait_bound, LazyTraitBoundSpan), + (trait_bound, trait_bound, LazyTraitRefSpan), (kind_bound, kind_bound, LazyKindBoundSpan), } ); define_lazy_span_node!( - LazyTraitBoundSpan, - ast::TraitBound, + LazyTraitRefSpan, + ast::TraitRef, @node { (path, path, LazyPathSpan), (generic_args, generic_args, LazyGenericArgListSpan), diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs index 3be7ecae7c..b98e97c9f8 100644 --- a/crates/hir/src/visitor.rs +++ b/crates/hir/src/visitor.rs @@ -8,11 +8,11 @@ use crate::{ FieldIndex, Func, FuncParam, FuncParamLabel, FuncParamListId, FuncParamName, GenericArg, GenericArgListId, GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, ItemKind, KindBound, LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, - TopLevelMod, Trait, TraitBound, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, + TopLevelMod, Trait, TraitRef, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, UsePathId, UsePathSegment, VariantDef, VariantDefListId, VariantKind, WhereClauseId, WherePredicate, }, - span::{lazy_spans::*, params::LazyTraitBoundSpan, transition::ChainRoot, SpanDowncast}, + span::{lazy_spans::*, params::LazyTraitRefSpan, transition::ChainRoot, SpanDowncast}, HirDb, }; @@ -23,7 +23,7 @@ pub mod prelude { walk_field_def, walk_field_def_list, walk_field_list, walk_func, walk_func_param, walk_func_param_list, walk_generic_arg, walk_generic_arg_list, walk_generic_param, walk_generic_param_list, walk_impl, walk_impl_trait, walk_item, walk_kind_bound, walk_mod, - walk_pat, walk_path, walk_stmt, walk_struct, walk_top_mod, walk_trait, walk_trait_bound, + walk_pat, walk_path, walk_stmt, walk_struct, walk_top_mod, walk_trait, walk_trait_ref, walk_ty, walk_type_alias, walk_type_bound, walk_type_bound_list, walk_use, walk_use_path, walk_variant_def, walk_variant_def_list, walk_where_clause, walk_where_predicate, Visitor, VisitorCtxt, @@ -170,12 +170,12 @@ pub trait Visitor { walk_type_bound(self, ctxt, bound); } - fn visit_trait_bound( + fn visit_trait_ref( &mut self, - ctxt: &mut VisitorCtxt<'_, LazyTraitBoundSpan>, - bound: &TraitBound, + ctxt: &mut VisitorCtxt<'_, LazyTraitRefSpan>, + trait_ref: TraitRef, ) { - walk_trait_bound(self, ctxt, bound); + walk_trait_ref(self, ctxt, trait_ref); } fn visit_kind_bound( @@ -711,25 +711,11 @@ where ctxt.with_new_ctxt( |span| span.super_traits(), |ctxt| { - for (i, trait_ref) in trait_.super_traits(ctxt.db).iter().enumerate() { + for (i, &trait_ref) in trait_.super_traits(ctxt.db).iter().enumerate() { ctxt.with_new_ctxt( |span| span.super_trait(i), |ctxt| { - if let Some(path) = trait_ref.path.to_opt() { - ctxt.with_new_ctxt( - |span| span.path_moved(), - |ctxt| { - visitor.visit_path(ctxt, path); - }, - ) - }; - - ctxt.with_new_ctxt( - |span| span.generic_args_moved(), - |ctxt| { - visitor.visit_generic_arg_list(ctxt, trait_ref.generic_args); - }, - ); + visitor.visit_trait_ref(ctxt, trait_ref); }, ); } @@ -760,21 +746,7 @@ pub fn walk_impl_trait( ctxt.with_new_ctxt( |span| span.trait_ref_moved(), |ctxt| { - if let Some(path) = trait_ref.path.to_opt() { - ctxt.with_new_ctxt( - |span| span.path_moved(), - |ctxt| { - visitor.visit_path(ctxt, path); - }, - ) - }; - - ctxt.with_new_ctxt( - |span| span.generic_args_moved(), - |ctxt| { - visitor.visit_generic_arg_list(ctxt, trait_ref.generic_args); - }, - ); + visitor.visit_trait_ref(ctxt, trait_ref); }, ) } @@ -1771,9 +1743,9 @@ pub fn walk_type_bound( V: Visitor + ?Sized, { match bound { - TypeBound::Trait(trait_bound) => ctxt.with_new_ctxt( + TypeBound::Trait(trait_ref) => ctxt.with_new_ctxt( |span| span.trait_bound_moved(), - |ctxt| visitor.visit_trait_bound(ctxt, trait_bound), + |ctxt| visitor.visit_trait_ref(ctxt, *trait_ref), ), TypeBound::Kind(Partial::Present(kind_bound)) => ctxt.with_new_ctxt( |span| span.kind_bound_moved(), @@ -1785,14 +1757,14 @@ pub fn walk_type_bound( } } -pub fn walk_trait_bound( +pub fn walk_trait_ref( visitor: &mut V, - ctxt: &mut VisitorCtxt<'_, LazyTraitBoundSpan>, - bound: &TraitBound, + ctxt: &mut VisitorCtxt<'_, LazyTraitRefSpan>, + trait_ref: TraitRef, ) where V: Visitor + ?Sized, { - if let Some(path) = bound.path.to_opt() { + if let Some(path) = trait_ref.path.to_opt() { ctxt.with_new_ctxt( |span| span.path_moved(), |ctxt| { @@ -1801,7 +1773,7 @@ pub fn walk_trait_bound( ) } - if let Some(args) = bound.generic_args { + if let Some(args) = trait_ref.generic_args { ctxt.with_new_ctxt( |span| span.generic_args_moved(), |ctxt| { diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs index 3d1d0196e1..7d578cf451 100644 --- a/crates/parser2/src/ast/item.rs +++ b/crates/parser2/src/ast/item.rs @@ -1,4 +1,4 @@ -use super::{ast_node, PathType, TupleType}; +use super::{ast_node, TraitRef, TupleType}; use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; use rowan::ast::{support, AstNode}; @@ -205,7 +205,7 @@ impl Trait { ast_node! { pub struct SuperTraitList, SK::SuperTraitList, - IntoIterator + IntoIterator } impl SuperTraitList { pub fn colon(&self) -> Option { @@ -247,14 +247,14 @@ impl super::AttrListOwner for ImplTrait {} impl ImplTrait { /// Returns the trait of the impl. /// `Foo` in `impl Foo for Bar { .. }` - pub fn trait_ref(&self) -> Option { + pub fn trait_ref(&self) -> Option { support::child(self.syntax()) } /// Returns the type of the impl. /// `Bar` in `impl Foo for Bar { .. }` pub fn ty(&self) -> Option { - support::children(self.syntax()).nth(1) + support::child(self.syntax()) } /// Returns the trait impl item list. diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs index 3bd1f17a24..936d27bbd1 100644 --- a/crates/parser2/src/ast/param.rs +++ b/crates/parser2/src/ast/param.rs @@ -283,7 +283,7 @@ ast_node! { } impl TypeBound { /// A path of the type bound. - pub fn trait_bound(&self) -> Option { + pub fn trait_bound(&self) -> Option { support::child(self.syntax()) } @@ -293,10 +293,10 @@ impl TypeBound { } ast_node! { - pub struct TraitBound, - SK::TraitBound + pub struct TraitRef, + SK::TraitRef } -impl TraitBound { +impl TraitRef { /// A path to the trait. pub fn path(&self) -> Option { support::child(self.syntax()) diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs index bbbf9a9df3..0913e99ef6 100644 --- a/crates/parser2/src/parser/item.rs +++ b/crates/parser2/src/parser/item.rs @@ -6,10 +6,10 @@ use super::{ attr, define_scope, expr::parse_expr, func::FuncDefScope, - param::{parse_generic_params_opt, parse_where_clause_opt}, + param::{parse_generic_params_opt, parse_where_clause_opt, TraitRefScope}, struct_::RecordFieldDefListScope, token_stream::{LexicalToken, TokenStream}, - type_::{parse_type, PathTypeScope, TupleTypeScope}, + type_::{parse_type, TupleTypeScope}, use_tree::UseTreeScope, Parser, }; @@ -346,9 +346,9 @@ define_scope! {SuperTraitListScope, SuperTraitList, Inheritance(Plus)} impl super::Parse for SuperTraitListScope { fn parse(&mut self, parser: &mut Parser) { parser.bump_expected(SyntaxKind::Colon); - parser.parse(PathTypeScope::default(), None); + parser.parse(TraitRefScope::default(), None); while parser.bump_if(SyntaxKind::Plus) { - parser.parse(PathTypeScope::default(), None); + parser.parse(TraitRefScope::default(), None); } } } @@ -381,7 +381,7 @@ impl super::Parse for ImplScope { self.set_kind(SyntaxKind::ImplTrait); parser.with_next_expected_tokens( |parser| { - parser.parse(PathTypeScope::default(), None); + parser.parse(TraitRefScope::default(), None); }, &[SyntaxKind::ForKw], ); diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs index 8e800823b9..96abb2644d 100644 --- a/crates/parser2/src/parser/param.rs +++ b/crates/parser2/src/parser/param.rs @@ -185,7 +185,7 @@ impl super::Parse for TypeBoundScope { parser.error_and_recover("trait bounds are not allowed here", None); return; } - parser.parse(TraitBoundScope::default(), None); + parser.parse(TraitRefScope::default(), None); } } } @@ -233,11 +233,11 @@ impl super::Parse for KindBoundAbsScope { } define_scope! { - TraitBoundScope, - TraitBound, + pub(super) TraitRefScope, + TraitRef, Inheritance } -impl super::Parse for TraitBoundScope { +impl super::Parse for TraitRefScope { fn parse(&mut self, parser: &mut Parser) { parser.parse(PathScope::default(), None); if parser.current_kind() == Some(SyntaxKind::Lt) { diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs index b3f879ac6f..774acb49e0 100644 --- a/crates/parser2/src/syntax_kind.rs +++ b/crates/parser2/src/syntax_kind.rs @@ -445,8 +445,8 @@ pub enum SyntaxKind { TypeBoundList, /// `TraitBound` or `TypeKind`. TypeBound, - /// `Trait1` - TraitBound, + /// `Trait1` + TraitRef, /// `* -> *` or `(*-> *) -> *` KindBoundAbs, /// `*`. diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap index 1fb7840d0c..efd0618d1e 100644 --- a/crates/parser2/test_files/error_recovery/items/func.snap +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -18,7 +18,7 @@ Root@0..133 Colon@8..9 ":" WhiteSpace@9..10 " " TypeBound@10..15 - TraitBound@10..15 + TraitRef@10..15 Path@10..15 PathSegment@10..15 Ident@10..15 "Trait" @@ -85,7 +85,7 @@ Root@0..133 Colon@64..65 ":" WhiteSpace@65..66 " " TypeBound@66..72 - TraitBound@66..72 + TraitRef@66..72 Path@66..72 PathSegment@66..72 Ident@66..72 "Trait2" @@ -139,7 +139,7 @@ Root@0..133 Colon@118..119 ":" WhiteSpace@119..120 " " TypeBound@120..126 - TraitBound@120..126 + TraitRef@120..126 Path@120..126 PathSegment@120..126 Ident@120..126 "Trait2" diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap index 16100031a3..cc199736ec 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -43,7 +43,7 @@ Root@0..56 Colon@24..25 ":" WhiteSpace@25..26 " " TypeBound@26..33 - TraitBound@26..33 + TraitRef@26..33 Path@26..33 PathSegment@26..33 Ident@26..33 "Integer" diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.snap b/crates/parser2/test_files/error_recovery/items/impl_trait.snap index 1222dab101..d5e0333cd0 100644 --- a/crates/parser2/test_files/error_recovery/items/impl_trait.snap +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.snap @@ -9,7 +9,7 @@ Root@0..90 ImplTrait@0..36 ImplKw@0..4 "impl" WhiteSpace@4..5 " " - PathType@5..14 + TraitRef@5..14 Path@5..6 PathSegment@5..6 Ident@5..6 "X" @@ -60,7 +60,7 @@ Root@0..90 Colon@30..31 ":" WhiteSpace@31..32 " " TypeBound@32..33 - TraitBound@32..33 + TraitRef@32..33 Path@32..33 PathSegment@32..33 Ident@32..33 "X" @@ -74,7 +74,7 @@ Root@0..90 ImplTrait@38..71 ImplKw@38..42 "impl" WhiteSpace@42..43 " " - PathType@43..50 + TraitRef@43..50 Path@43..44 PathSegment@43..44 Ident@43..44 "X" @@ -123,7 +123,7 @@ Root@0..90 Colon@65..66 ":" WhiteSpace@66..67 " " TypeBound@67..68 - TraitBound@67..68 + TraitRef@67..68 Path@67..68 PathSegment@67..68 Ident@67..68 "X" @@ -137,7 +137,7 @@ Root@0..90 ImplTrait@73..90 ImplKw@73..77 "impl" WhiteSpace@77..78 " " - PathType@78..79 + TraitRef@78..79 Path@78..79 PathSegment@78..79 Ident@78..79 "X" diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap index 986c495346..84290b8a94 100644 --- a/crates/parser2/test_files/error_recovery/items/struct_.snap +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -44,7 +44,7 @@ Root@0..160 Colon@32..33 ":" WhiteSpace@33..34 " " TypeBound@34..39 - TraitBound@34..39 + TraitRef@34..39 Path@34..39 PathSegment@34..39 Ident@34..39 "Trait" diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap index 1a0b56157d..74f5cac32f 100644 --- a/crates/parser2/test_files/error_recovery/items/trait_.snap +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -94,7 +94,7 @@ Root@0..133 Colon@77..78 ":" WhiteSpace@78..79 " " TypeBound@79..82 - TraitBound@79..82 + TraitRef@79..82 Path@79..82 PathSegment@79..82 Ident@79..82 "Add" @@ -123,7 +123,7 @@ Root@0..133 Colon@102..103 ":" WhiteSpace@103..104 " " TypeBound@104..107 - TraitBound@104..107 + TraitRef@104..107 Path@104..107 PathSegment@104..107 Ident@104..107 "Sub" @@ -143,7 +143,7 @@ Root@0..133 Colon@122..123 ":" WhiteSpace@123..124 " " TypeBound@124..127 - TraitBound@124..127 + TraitRef@124..127 Path@124..127 PathSegment@124..127 Ident@124..127 "Add" diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap index d04226ba96..e81f8281dd 100644 --- a/crates/parser2/test_files/syntax_node/items/enums.snap +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -128,7 +128,7 @@ Root@0..428 Colon@166..167 ":" WhiteSpace@167..168 " " TypeBound@168..173 - TraitBound@168..173 + TraitRef@168..173 Path@168..173 PathSegment@168..173 Ident@168..173 "Clone" @@ -166,7 +166,7 @@ Root@0..428 Colon@216..217 ":" WhiteSpace@217..218 " " TypeBound@218..221 - TraitBound@218..221 + TraitRef@218..221 Path@218..221 PathSegment@218..221 Ident@218..221 "Add" @@ -174,7 +174,7 @@ Root@0..428 Plus@222..223 "+" WhiteSpace@223..224 " " TypeBound@224..227 - TraitBound@224..227 + TraitRef@224..227 Path@224..227 PathSegment@224..227 Ident@224..227 "Mul" @@ -187,7 +187,7 @@ Root@0..428 Colon@231..232 ":" WhiteSpace@232..233 " " TypeBound@233..236 - TraitBound@233..236 + TraitRef@233..236 Path@233..236 PathSegment@233..236 Ident@233..236 "Sub" @@ -195,7 +195,7 @@ Root@0..428 Plus@237..238 "+" WhiteSpace@238..239 " " TypeBound@239..242 - TraitBound@239..242 + TraitRef@239..242 Path@239..242 PathSegment@239..242 Ident@239..242 "Div" @@ -224,7 +224,7 @@ Root@0..428 Colon@261..262 ":" WhiteSpace@262..263 " " TypeBound@263..268 - TraitBound@263..268 + TraitRef@263..268 Path@263..268 PathSegment@263..268 Ident@263..268 "Trait" diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap index 1fa2050503..88833e5dfb 100644 --- a/crates/parser2/test_files/syntax_node/items/func.snap +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -172,7 +172,7 @@ Root@0..361 Colon@194..195 ":" WhiteSpace@195..196 " " TypeBound@196..201 - TraitBound@196..201 + TraitRef@196..201 Path@196..201 PathSegment@196..201 Ident@196..201 "Trait" @@ -239,7 +239,7 @@ Root@0..361 Colon@250..251 ":" WhiteSpace@251..252 " " TypeBound@252..257 - TraitBound@252..257 + TraitRef@252..257 Path@252..257 PathSegment@252..257 Ident@252..257 "Trait" @@ -263,7 +263,7 @@ Root@0..361 Colon@278..279 ":" WhiteSpace@279..280 " " TypeBound@280..285 - TraitBound@280..285 + TraitRef@280..285 Path@280..285 PathSegment@280..285 Ident@280..285 "Clone" diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap index 208c2f3980..002a709caa 100644 --- a/crates/parser2/test_files/syntax_node/items/impl.snap +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -16,7 +16,7 @@ Root@0..272 Colon@6..7 ":" WhiteSpace@7..8 " " TypeBound@8..11 - TraitBound@8..11 + TraitRef@8..11 Path@8..11 PathSegment@8..11 Ident@8..11 "Add" @@ -156,7 +156,7 @@ Root@0..272 Colon@167..168 ":" WhiteSpace@168..169 " " TypeBound@169..174 - TraitBound@169..174 + TraitRef@169..174 Path@169..174 PathSegment@169..174 Ident@169..174 "Clone" @@ -177,7 +177,7 @@ Root@0..272 Colon@189..190 ":" WhiteSpace@190..191 " " TypeBound@191..197 - TraitBound@191..197 + TraitRef@191..197 Path@191..194 PathSegment@191..194 Ident@191..194 "Add" @@ -220,7 +220,7 @@ Root@0..272 Colon@229..230 ":" WhiteSpace@230..231 " " TypeBound@231..235 - TraitBound@231..235 + TraitRef@231..235 Path@231..235 PathSegment@231..235 Ident@231..235 "Copy" diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap index 787cd3371a..b792987ec3 100644 --- a/crates/parser2/test_files/syntax_node/items/impl_trait.snap +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -14,7 +14,7 @@ Root@0..335 Ident@5..6 "T" Gt@6..7 ">" WhiteSpace@7..8 " " - PathType@8..16 + TraitRef@8..16 Path@8..13 PathSegment@8..13 Ident@8..13 "Trait" @@ -83,7 +83,7 @@ Root@0..335 Ident@77..78 "U" Gt@78..79 ">" WhiteSpace@79..80 " " - PathType@80..91 + TraitRef@80..91 Path@80..85 PathSegment@80..85 Ident@80..85 "Trait" @@ -131,7 +131,7 @@ Root@0..335 Colon@109..110 ":" WhiteSpace@110..111 " " TypeBound@111..116 - TraitBound@111..116 + TraitRef@111..116 Path@111..116 PathSegment@111..116 Ident@111..116 "Clone" @@ -146,7 +146,7 @@ Root@0..335 Colon@124..125 ":" WhiteSpace@125..126 " " TypeBound@126..129 - TraitBound@126..129 + TraitRef@126..129 Path@126..129 PathSegment@126..129 Ident@126..129 "Bar" @@ -167,7 +167,7 @@ Root@0..335 Colon@144..145 ":" WhiteSpace@145..146 " " TypeBound@146..159 - TraitBound@146..159 + TraitRef@146..159 Path@146..156 PathSegment@146..156 Ident@146..156 "OtherTrait" @@ -235,7 +235,7 @@ Root@0..335 Colon@213..214 ":" WhiteSpace@214..215 " " TypeBound@215..220 - TraitBound@215..220 + TraitRef@215..220 Path@215..220 PathSegment@215..220 Ident@215..220 "Clone" @@ -245,7 +245,7 @@ Root@0..335 Ident@222..223 "U" Gt@223..224 ">" WhiteSpace@224..225 " " - PathType@225..236 + TraitRef@225..236 Path@225..230 PathSegment@225..230 Ident@225..230 "Trait" @@ -293,7 +293,7 @@ Root@0..335 Colon@254..255 ":" WhiteSpace@255..256 " " TypeBound@256..259 - TraitBound@256..259 + TraitRef@256..259 Path@256..259 PathSegment@256..259 Ident@256..259 "Bar" @@ -314,7 +314,7 @@ Root@0..335 Colon@274..275 ":" WhiteSpace@275..276 " " TypeBound@276..289 - TraitBound@276..289 + TraitRef@276..289 Path@276..286 PathSegment@276..286 Ident@276..286 "OtherTrait" diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap index 3a3197f5ca..3e14ddc27e 100644 --- a/crates/parser2/test_files/syntax_node/items/trait.snap +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -40,7 +40,7 @@ Root@0..652 Colon@45..46 ":" WhiteSpace@46..47 " " TypeBound@47..52 - TraitBound@47..52 + TraitRef@47..52 Path@47..52 PathSegment@47..52 Ident@47..52 "Trait" @@ -92,7 +92,7 @@ Root@0..652 Colon@104..105 ":" WhiteSpace@105..106 " " TypeBound@106..109 - TraitBound@106..109 + TraitRef@106..109 Path@106..109 PathSegment@106..109 Ident@106..109 "Add" @@ -100,7 +100,7 @@ Root@0..652 Plus@110..111 "+" WhiteSpace@111..112 " " TypeBound@112..115 - TraitBound@112..115 + TraitRef@112..115 Path@112..115 PathSegment@112..115 Ident@112..115 "Sub" @@ -192,7 +192,7 @@ Root@0..652 Colon@201..202 ":" WhiteSpace@202..203 " " TypeBound@203..206 - TraitBound@203..206 + TraitRef@203..206 Path@203..206 PathSegment@203..206 Ident@203..206 "Add" @@ -242,7 +242,7 @@ Root@0..652 Colon@264..265 ":" WhiteSpace@265..266 " " TypeBound@266..269 - TraitBound@266..269 + TraitRef@266..269 Path@266..269 PathSegment@266..269 Ident@266..269 "Sub" @@ -274,7 +274,7 @@ Root@0..652 Colon@306..307 ":" WhiteSpace@307..308 " " TypeBound@308..319 - TraitBound@308..319 + TraitRef@308..319 Path@308..319 PathSegment@308..319 Ident@308..319 "TokenStream" @@ -345,7 +345,7 @@ Root@0..652 Colon@387..388 ":" WhiteSpace@388..389 " " TypeBound@389..400 - TraitBound@389..400 + TraitRef@389..400 Path@389..400 PathSegment@389..400 Ident@389..400 "TokenStream" @@ -353,7 +353,7 @@ Root@0..652 Plus@401..402 "+" WhiteSpace@402..403 " " TypeBound@403..408 - TraitBound@403..408 + TraitRef@403..408 Path@403..408 PathSegment@403..408 Ident@403..408 "Clone" @@ -377,7 +377,7 @@ Root@0..652 Colon@429..430 ":" WhiteSpace@430..431 " " TypeBound@431..436 - TraitBound@431..436 + TraitRef@431..436 Path@431..436 PathSegment@431..436 Ident@431..436 "Parse" @@ -500,14 +500,14 @@ Root@0..652 SuperTraitList@616..632 Colon@616..617 ":" WhiteSpace@617..618 " " - PathType@618..623 + TraitRef@618..623 Path@618..623 PathSegment@618..623 Ident@618..623 "Parse" WhiteSpace@623..624 " " Plus@624..625 "+" WhiteSpace@625..626 " " - PathType@626..632 + TraitRef@626..632 Path@626..629 PathSegment@626..629 Ident@626..629 "Add" @@ -533,7 +533,7 @@ Root@0..652 Colon@641..642 ":" WhiteSpace@642..643 " " TypeBound@643..649 - TraitBound@643..649 + TraitRef@643..649 Path@643..646 PathSegment@643..646 Ident@643..646 "Add" diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap index af44340318..eae9111356 100644 --- a/crates/parser2/test_files/syntax_node/structs/generics.snap +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -88,7 +88,7 @@ Root@0..560 Colon@126..127 ":" WhiteSpace@127..128 " " TypeBound@128..138 - TraitBound@128..138 + TraitRef@128..138 Path@128..138 PathSegment@128..131 Ident@128..131 "foo" @@ -172,7 +172,7 @@ Root@0..560 Colon@233..234 ":" WhiteSpace@234..235 " " TypeBound@235..245 - TraitBound@235..245 + TraitRef@235..245 Path@235..245 PathSegment@235..238 Ident@235..238 "foo" @@ -183,7 +183,7 @@ Root@0..560 Plus@246..247 "+" WhiteSpace@247..248 " " TypeBound@248..258 - TraitBound@248..258 + TraitRef@248..258 Path@248..258 PathSegment@248..251 Ident@248..251 "bar" @@ -204,7 +204,7 @@ Root@0..560 Colon@272..273 ":" WhiteSpace@273..274 " " TypeBound@274..284 - TraitBound@274..284 + TraitRef@274..284 Path@274..284 PathSegment@274..277 Ident@274..277 "bar" @@ -227,7 +227,7 @@ Root@0..560 Colon@298..299 ":" WhiteSpace@299..300 " " TypeBound@300..306 - TraitBound@300..306 + TraitRef@300..306 Path@300..306 PathSegment@300..306 Ident@300..306 "Trait1" @@ -235,7 +235,7 @@ Root@0..560 Plus@307..308 "+" WhiteSpace@308..309 " " TypeBound@309..315 - TraitBound@309..315 + TraitRef@309..315 Path@309..315 PathSegment@309..315 Ident@309..315 "Trait2" @@ -258,7 +258,7 @@ Root@0..560 Colon@329..330 ":" WhiteSpace@330..331 " " TypeBound@331..337 - TraitBound@331..337 + TraitRef@331..337 Path@331..337 PathSegment@331..337 Ident@331..337 "Trait1" @@ -266,7 +266,7 @@ Root@0..560 Plus@338..339 "+" WhiteSpace@339..340 " " TypeBound@340..346 - TraitBound@340..346 + TraitRef@340..346 Path@340..346 PathSegment@340..346 Ident@340..346 "Trait2" @@ -289,7 +289,7 @@ Root@0..560 Colon@360..361 ":" WhiteSpace@361..362 " " TypeBound@362..368 - TraitBound@362..368 + TraitRef@362..368 Path@362..368 PathSegment@362..368 Ident@362..368 "Trait2" @@ -297,7 +297,7 @@ Root@0..560 Plus@369..370 "+" WhiteSpace@370..371 " " TypeBound@371..377 - TraitBound@371..377 + TraitRef@371..377 Path@371..377 PathSegment@371..377 Ident@371..377 "Trait3" @@ -356,7 +356,7 @@ Root@0..560 Colon@431..432 ":" WhiteSpace@432..433 " " TypeBound@433..446 - TraitBound@433..446 + TraitRef@433..446 Path@433..446 PathSegment@433..436 Ident@433..436 "std" @@ -407,7 +407,7 @@ Root@0..560 Colon@491..492 ":" WhiteSpace@492..493 " " TypeBound@493..498 - TraitBound@493..498 + TraitRef@493..498 Path@493..498 PathSegment@493..498 Ident@493..498 "Trait" @@ -415,7 +415,7 @@ Root@0..560 Plus@499..500 "+" WhiteSpace@500..501 " " TypeBound@501..514 - TraitBound@501..514 + TraitRef@501..514 Path@501..506 PathSegment@501..506 Ident@501..506 "Trait" From fa50af7337c6cdd533b9f921adf81410641680bf Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 21 Sep 2023 00:27:20 +0200 Subject: [PATCH 321/678] Fix a bug in trait implementation conflict check --- crates/hir-analysis/src/ty/diagnostics.rs | 38 ++++++---- crates/hir-analysis/src/ty/mod.rs | 14 ++++ crates/hir-analysis/src/ty/trait_.rs | 4 ++ crates/hir-analysis/src/ty/trait_lower.rs | 71 +++++++++++++++---- crates/hir-analysis/src/ty/ty_lower.rs | 5 +- crates/hir-analysis/src/ty/unify.rs | 32 +++++---- crates/hir-analysis/src/ty/visitor.rs | 2 +- .../fixtures/ty/alias_kind_mismatch.snap | 6 +- crates/uitest/fixtures/ty/kind_bound.snap | 6 +- crates/uitest/fixtures/ty/kind_mismatch.snap | 8 +-- 10 files changed, 131 insertions(+), 55 deletions(-) diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 9f431329a4..546dfce30a 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -20,7 +20,7 @@ pub struct GenericParamDiagAccumulator(pub(super) TyLowerDiag); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TyLowerDiag { NotFullyAppliedType(DynLazySpan), - KindMismatch(DynLazySpan, String), + InvalidTypeArg(DynLazySpan, String), RecursiveType { primary_span: DynLazySpan, field_span: DynLazySpan, @@ -45,7 +45,7 @@ impl TyLowerDiag { Self::NotFullyAppliedType(span) } - pub fn kind_mismatch(span: DynLazySpan, expected: Option, actual: Kind) -> Self { + pub fn invalid_type_arg(span: DynLazySpan, expected: Option, actual: Kind) -> Self { let msg = if let Some(expected) = expected { debug_assert!(expected != actual); @@ -54,7 +54,7 @@ impl TyLowerDiag { "too many generic arguments".to_string() }; - Self::KindMismatch(span, msg) + Self::InvalidTypeArg(span, msg) } pub(super) fn recursive_type(primary_span: DynLazySpan, field_span: DynLazySpan) -> Self { @@ -87,7 +87,7 @@ impl TyLowerDiag { fn local_code(&self) -> u16 { match self { Self::NotFullyAppliedType(_) => 0, - Self::KindMismatch(_, _) => 1, + Self::InvalidTypeArg(_, _) => 1, Self::RecursiveType { .. } => 2, Self::TypeAliasArgumentMismatch { .. } => 3, Self::TypeAliasCycle(_) => 4, @@ -100,7 +100,7 @@ impl TyLowerDiag { fn message(&self, db: &dyn HirDb) -> String { match self { Self::NotFullyAppliedType(_) => "expected fully applied type".to_string(), - Self::KindMismatch(_, _) => "kind mismatch between two types".to_string(), + Self::InvalidTypeArg(_, _) => "invalid type argument given".to_string(), Self::RecursiveType { .. } => "recursive type is not allowed".to_string(), Self::TypeAliasArgumentMismatch { @@ -129,7 +129,7 @@ impl TyLowerDiag { span.resolve(db), )], - Self::KindMismatch(span, msg) => vec![SubDiagnostic::new( + Self::InvalidTypeArg(span, msg) => vec![SubDiagnostic::new( LabelStyle::Primary, msg.clone(), span.resolve(db), @@ -323,18 +323,26 @@ pub enum TraitSatisfactionDiag { trait_def: Trait, }, - TraitArgumentMismatch { + TraitArgNumMismatch { span: DynLazySpan, trait_: Trait, n_given_arg: usize, }, + + TraitArgKindMismatch(DynLazySpan, String), } impl TraitSatisfactionDiag { + pub fn trait_arg_kind_mismatch(span: DynLazySpan, expected: &Kind, actual: &Kind) -> Self { + let msg = format!("expected `{}` kind, but found `{}` kind", expected, actual); + Self::TraitArgKindMismatch(span, msg) + } + fn local_code(&self) -> u16 { match self { Self::KindMismatch { .. } => 0, - Self::TraitArgumentMismatch { .. } => 1, + Self::TraitArgNumMismatch { .. } => 1, + Self::TraitArgKindMismatch(_, _) => 2, } } @@ -342,9 +350,9 @@ impl TraitSatisfactionDiag { match self { Self::KindMismatch { .. } => "type doesn't satisfy required kind bound".to_string(), - Self::TraitArgumentMismatch { .. } => { - "given trait argument number mismatch".to_string() - } + Self::TraitArgNumMismatch { .. } => "given trait argument number mismatch".to_string(), + + Self::TraitArgKindMismatch(_, _) => "given trait argument kind mismatch".to_string(), } } @@ -363,7 +371,7 @@ impl TraitSatisfactionDiag { ), ], - Self::TraitArgumentMismatch { + Self::TraitArgNumMismatch { span, trait_, n_given_arg, @@ -385,6 +393,12 @@ impl TraitSatisfactionDiag { ), ] } + + Self::TraitArgKindMismatch(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], } } diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs index e53c571842..9c5b24ddb3 100644 --- a/crates/hir-analysis/src/ty/mod.rs +++ b/crates/hir-analysis/src/ty/mod.rs @@ -169,6 +169,20 @@ impl<'db> ModuleAnalysisPass for ImplTraitAnalysisPass<'db> { TraitImplDiag::Satisfaction(diag) => Box::new(diag.clone()) as _, TraitImplDiag::TraitImplLower(diag) => Box::new(diag.clone()) as _, }) + .chain( + top_mod + .all_impl_traits(self.db.as_hir_db()) + .iter() + .copied() + .map(|impl_trait| { + let owner_id = GenericParamOwnerId::new(self.db, impl_trait.into()); + collect_generic_params::accumulated::( + self.db, owner_id, + ) + }) + .flatten() + .map(|diag| Box::new(diag) as _), + ) .collect() } } diff --git a/crates/hir-analysis/src/ty/trait_.rs b/crates/hir-analysis/src/ty/trait_.rs index 323b7b351e..6842a18f8a 100644 --- a/crates/hir-analysis/src/ty/trait_.rs +++ b/crates/hir-analysis/src/ty/trait_.rs @@ -22,6 +22,10 @@ impl Implementor { pub(crate) fn trait_def(self, db: &dyn HirAnalysisDb) -> TraitDef { self.trait_(db).def(db) } + + pub(crate) fn substs(self, db: &dyn HirAnalysisDb) -> &Vec { + self.trait_(db).substs(db) + } } #[derive(Clone, Debug, PartialEq, Eq, Default)] diff --git a/crates/hir-analysis/src/ty/trait_lower.rs b/crates/hir-analysis/src/ty/trait_lower.rs index 6e17619d07..dcec06ee8f 100644 --- a/crates/hir-analysis/src/ty/trait_lower.rs +++ b/crates/hir-analysis/src/ty/trait_lower.rs @@ -49,43 +49,79 @@ pub(super) fn lower_trait_ref( trait_ref: TraitRef, ref_span: LazyTraitRefSpan, scope: ScopeId, -) -> (Option, Vec) { +) -> (Option, Vec) { let hir_db = db.as_hir_db(); - let (args, mut diags) = if let Some(args) = trait_ref.generic_args { + let (args, diags) = if let Some(args) = trait_ref.generic_args { lower_generic_arg_list_with_diag(db, args, ref_span.generic_args(), scope) } else { (vec![], vec![]) }; + let mut diags = diags + .into_iter() + .map(TraitRefLowerDiag::Ty) + .collect::>(); + let Partial::Present(path) = trait_ref.path else { return (None, diags); }; - match resolve_path_early(db, path, scope) { + let trait_def = match resolve_path_early(db, path, scope) { EarlyResolvedPath::Full(bucket) => match bucket.pick(NameDomain::Type) { Ok(res) => { let NameResKind::Scope(ScopeId::Item(ItemKind::Trait(trait_))) = res.kind else { return (None, diags); }; - let trait_def = lower_trait(db, trait_); - (Some(TraitInstId::new(db, trait_def, args)), diags) + lower_trait(db, trait_) } - Err(_) => (None, diags), + Err(_) => return (None, diags), }, EarlyResolvedPath::Partial { .. } => { - diags.push(TyLowerDiag::AssocTy(ref_span.path_moved().into()).into()); - (None, diags) + diags.push(TyLowerDiag::AssocTy(ref_span.path().into()).into()); + return (None, diags); + } + }; + + if trait_def.args(db).len() != args.len() { + diags.push( + TraitSatisfactionDiag::TraitArgNumMismatch { + span: ref_span.into(), + trait_: trait_def.trait_(db), + n_given_arg: args.len(), + } + .into(), + ); + return (None, diags); + } + + let mut has_error = false; + for (i, (expected, given)) in trait_def.args(db).iter().zip(&args).enumerate() { + if expected.kind(db) != given.kind(db) { + let span = ref_span.generic_args().arg_moved(i).into(); + let diag = TraitSatisfactionDiag::trait_arg_kind_mismatch( + span, + expected.kind(db), + given.kind(db), + ); + diags.push(diag.into()); + has_error = true; } } + + if !has_error { + (Some(TraitInstId::new(db, trait_def, args)), diags) + } else { + (None, diags) + } } struct TraitBuilder<'db> { db: &'db dyn HirAnalysisDb, trait_: Trait, params: Vec, - self_args: TyId, + self_arg: TyId, // TODO: We need to lower associated methods here. // methods: Vec } @@ -98,12 +134,12 @@ impl<'db> TraitBuilder<'db> { db, trait_, params: params_set.params.clone(), - self_args: params_set.trait_self.unwrap(), + self_arg: params_set.trait_self.unwrap(), } } fn build(self) -> TraitDef { - TraitDef::new(self.db, self.trait_, self.params, self.self_args) + TraitDef::new(self.db, self.trait_, self.params, self.self_arg) } } @@ -276,14 +312,19 @@ impl Implementor { return false; } - let generalized = self.generalize(db, table); - for (&self_param, &other_param) in generalized.params(db).iter().zip(other.params(db)) { - if !table.unify(self_param, other_param) { + let generalized_self = self.generalize(db, table); + let generalized_other = other.generalize(db, table); + for (&self_arg, &other_arg) in generalized_self + .substs(db) + .iter() + .zip(generalized_other.substs(db)) + { + if !table.unify(self_arg, other_arg) { return false; } } - table.unify(generalized.ty(db), other.ty(db)) + table.unify(generalized_self.ty(db), generalized_other.ty(db)) } } diff --git a/crates/hir-analysis/src/ty/ty_lower.rs b/crates/hir-analysis/src/ty/ty_lower.rs index 22ccdfd918..927b10d4de 100644 --- a/crates/hir-analysis/src/ty/ty_lower.rs +++ b/crates/hir-analysis/src/ty/ty_lower.rs @@ -1,7 +1,7 @@ use either::Either; use hir::{ hir_def::{ - scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, + kw, scope_graph::ScopeId, FieldDefListId, GenericArg, GenericArgListId, GenericParam, GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, Trait, TupleTypeId, TypeAlias as HirTypeAlias, TypeId as HirTyId, TypeKind as HirTyKind, VariantDefListId, VariantKind, WherePredicate, @@ -489,7 +489,7 @@ struct GenericParamCollector<'db> { impl<'db> GenericParamCollector<'db> { fn new(db: &'db dyn HirAnalysisDb, parent: GenericParamOwner) -> Self { let trait_self = TyParamPrecursor { - name: Partial::Absent, + name: Partial::Present(kw::SELF_TY), idx: None, kind: Kind::Star, kind_span: None, @@ -652,6 +652,7 @@ impl<'db> Visitor for GenericParamCollector<'db> { } } +#[derive(Debug)] struct TyParamPrecursor { name: Partial, idx: Option, diff --git a/crates/hir-analysis/src/ty/unify.rs b/crates/hir-analysis/src/ty/unify.rs index 1196003cc4..576ff4a47d 100644 --- a/crates/hir-analysis/src/ty/unify.rs +++ b/crates/hir-analysis/src/ty/unify.rs @@ -59,22 +59,20 @@ impl<'db> UnificationTable<'db> { let ty2 = self.apply(self.db, ty2); match (ty1.data(self.db), ty2.data(self.db)) { - (TyData::TyVar(var), _) if !ty2.free_inference_keys(self.db).contains(&var.key) => { - self.table - .union_value(var.key, InferenceValue::Bounded(ty2)); - true - } - - (_, TyData::TyVar(var)) if !ty1.free_inference_keys(self.db).contains(&var.key) => { - self.table - .union_value(var.key, InferenceValue::Bounded(ty2)); - true - } (TyData::TyVar(var1), TyData::TyVar(var2)) => { - self.table.union(var1.key, var2.key); - true + self.table.unify_var_var(var1.key, var2.key).is_ok() } + (TyData::TyVar(var), _) if !ty2.free_inference_keys(self.db).contains(&var.key) => self + .table + .unify_var_value(var.key, InferenceValue::Bounded(ty2)) + .is_ok(), + + (_, TyData::TyVar(var)) if !ty1.free_inference_keys(self.db).contains(&var.key) => self + .table + .unify_var_value(var.key, InferenceValue::Bounded(ty1)) + .is_ok(), + (TyData::TyApp(ty1_1, ty1_2), TyData::TyApp(ty2_1, ty2_2)) => { let ok = self.unify_impl(ty1_1, ty2_1); if ok { @@ -132,7 +130,7 @@ impl UnifyKey for InferenceKey { } impl UnifyValue for InferenceValue { - type Error = NoError; + type Error = (); fn unify_values(v1: &Self, v2: &Self) -> Result { match (v1, v2) { @@ -147,7 +145,11 @@ impl UnifyValue for InferenceValue { } (InferenceValue::Bounded(ty1), InferenceValue::Bounded(ty2)) => { - panic!("trying to unify two bounded types {:?} and {:?}", ty1, ty2) + if ty1 != ty2 { + Err(()) + } else { + Ok(InferenceValue::Bounded(*ty1)) + } } } } diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs index 200d5f45d1..50865941cb 100644 --- a/crates/hir-analysis/src/ty/visitor.rs +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -100,7 +100,7 @@ impl<'db> TyDiagCollector<'db> { } InvalidCause::KindMismatch { expected, given } => { - let diag = TyLowerDiag::kind_mismatch(span, expected, given); + let diag = TyLowerDiag::invalid_type_arg(span, expected, given); self.diags.push(diag); } diff --git a/crates/uitest/fixtures/ty/alias_kind_mismatch.snap b/crates/uitest/fixtures/ty/alias_kind_mismatch.snap index 26e031e4ee..7b45a8d70e 100644 --- a/crates/uitest/fixtures/ty/alias_kind_mismatch.snap +++ b/crates/uitest/fixtures/ty/alias_kind_mismatch.snap @@ -3,19 +3,19 @@ source: crates/uitest/tests/ty.rs expression: diags input_file: crates/uitest/fixtures/ty/alias_kind_mismatch.fe --- -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ alias_kind_mismatch.fe:6:24 │ 6 │ type T1 = S0 │ ^^^ too many generic arguments -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ alias_kind_mismatch.fe:7:14 │ 7 │ type T2 = S0 │ ^^ expected `*` kind, but found `(* -> (* -> *))` kind -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ alias_kind_mismatch.fe:12:16 │ 12 │ t: T3 diff --git a/crates/uitest/fixtures/ty/kind_bound.snap b/crates/uitest/fixtures/ty/kind_bound.snap index 79f759265e..7e29ad0beb 100644 --- a/crates/uitest/fixtures/ty/kind_bound.snap +++ b/crates/uitest/fixtures/ty/kind_bound.snap @@ -9,19 +9,19 @@ error[2-0002]: `wrapper3` is not found 34 │ bar_err3: wrapper3 │ ^^^^^^^^ `wrapper3` is not found -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_bound.fe:25:23 │ 25 │ foo_err: Wrapper2 │ ^^^ expected `(* -> *)` kind, but found `*` kind -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_bound.fe:32:24 │ 32 │ bar_err1: Wrapper3 │ ^^^ expected `((* -> *) -> (* -> *))` kind, but found `*` kind -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_bound.fe:33:34 │ 33 │ bar_err2: Wrapper3 diff --git a/crates/uitest/fixtures/ty/kind_mismatch.snap b/crates/uitest/fixtures/ty/kind_mismatch.snap index 390db2011b..b3b4955e82 100644 --- a/crates/uitest/fixtures/ty/kind_mismatch.snap +++ b/crates/uitest/fixtures/ty/kind_mismatch.snap @@ -11,25 +11,25 @@ error[2-0001]: `baz` conflicts with other definitions 10 │ baz: Foo> │ --- `baz` is redefined here -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_mismatch.fe:7:19 │ 7 │ foo: Foo │ ^^^ expected `*` kind, but found `(* -> (* -> *))` kind -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_mismatch.fe:8:14 │ 8 │ bar: Foo │ ^^^ expected `*` kind, but found `(* -> (* -> *))` kind -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_mismatch.fe:9:24 │ 9 │ baz: Foo │ ^^^^ too many generic arguments -error[3-0001]: kind mismatch between two types +error[3-0001]: invalid type argument given ┌─ kind_mismatch.fe:10:33 │ 10 │ baz: Foo> From 685dc0adeda5e0325ca6a9b31ca6a220bf2d5e45 Mon Sep 17 00:00:00 2001 From: Yoshitomo Nakanishi Date: Thu, 21 Sep 2023 00:55:25 +0200 Subject: [PATCH 322/678] Add uitest fixtures for trait impl --- .../uitest/fixtures/ty/trait_arg_mismatch.fe | 11 ++++++++ .../fixtures/ty/trait_arg_mismatch.snap | 27 +++++++++++++++++++ .../uitest/fixtures/ty/trait_impl_conflict.fe | 17 ++++++++++++ .../fixtures/ty/trait_impl_conflict.snap | 23 ++++++++++++++++ .../fixtures/ty/trait_impl_kind_mismatch.fe | 12 +++++++++ .../fixtures/ty/trait_impl_kind_mismatch.snap | 24 +++++++++++++++++ 6 files changed, 114 insertions(+) create mode 100644 crates/uitest/fixtures/ty/trait_arg_mismatch.fe create mode 100644 crates/uitest/fixtures/ty/trait_arg_mismatch.snap create mode 100644 crates/uitest/fixtures/ty/trait_impl_conflict.fe create mode 100644 crates/uitest/fixtures/ty/trait_impl_conflict.snap create mode 100644 crates/uitest/fixtures/ty/trait_impl_kind_mismatch.fe create mode 100644 crates/uitest/fixtures/ty/trait_impl_kind_mismatch.snap diff --git a/crates/uitest/fixtures/ty/trait_arg_mismatch.fe b/crates/uitest/fixtures/ty/trait_arg_mismatch.fe new file mode 100644 index 0000000000..99dca4bc39 --- /dev/null +++ b/crates/uitest/fixtures/ty/trait_arg_mismatch.fe @@ -0,0 +1,11 @@ +pub trait Foo *> {} + +enum Option { + Some(T) + None +} + +impl Foo for i32 {} +impl Foo> for i32 {} +impl Foo