Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Add basic support for @decorators, and use them to implement the @Stub
decorator to remove the need for the `stub_mode` in the parser.
  • Loading branch information
kengorab committed Oct 12, 2023
1 parent bb9d2c5 commit 08ffde0
Show file tree
Hide file tree
Showing 10 changed files with 520 additions and 243 deletions.
4 changes: 3 additions & 1 deletion abra_core/src/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,7 @@ impl<'a> Lexer<'a> {
',' => Ok(Some(Token::Comma(pos))),
':' => Ok(Some(Token::Colon(pos))),
'.' => Ok(Some(Token::Dot(pos))),
'@' => Ok(Some(Token::At(pos))),
_ => Ok(None)
}
}
Expand Down Expand Up @@ -602,7 +603,7 @@ mod tests {

#[test]
fn test_tokenize_separators() {
let input = "( ) [ ] { } | , : ? #{";
let input = "( ) [ ] { } | , : ? #{ @";
let tokens = tokenize(input).unwrap();
let expected = vec![
Token::LParen(Position::new(1, 1), false),
Expand All @@ -616,6 +617,7 @@ mod tests {
Token::Colon(Position::new(1, 17)),
Token::Question(Position::new(1, 19)),
Token::LBraceHash(Position::new(1, 21)),
Token::At(Position::new(1, 24)),
];
assert_eq!(expected, tokens);
}
Expand Down
5 changes: 4 additions & 1 deletion abra_core/src/lexer/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ pub enum Token {
#[strum(to_string = ".", serialize = "Dot")] Dot(Position),
#[strum(to_string = "?.", serialize = "QuestionDot")] QuestionDot(Position),
#[strum(to_string = "=>", serialize = "Arrow")] Arrow(Position),
#[strum(to_string = "@", serialize = "At")] At(Position),
}

impl Token {
Expand Down Expand Up @@ -212,7 +213,8 @@ impl Token {
Token::Question(pos) |
Token::Dot(pos) |
Token::QuestionDot(pos) |
Token::Arrow(pos) => pos
Token::Arrow(pos) |
Token::At(pos) => pos
};
pos.clone()
}
Expand Down Expand Up @@ -296,6 +298,7 @@ impl Token {
Token::Dot(pos) => Range::with_length(pos, 0),
Token::QuestionDot(pos) => Range::with_length(pos, 1),
Token::Arrow(pos) => Range::with_length(pos, 1),
Token::At(pos) => Range::with_length(pos, 0),
}
}

Expand Down
10 changes: 0 additions & 10 deletions abra_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,6 @@ fn tokenize_and_parse(module_id: &ModuleId, input: &String) -> Result<ParseResul
}
}

fn tokenize_and_parse_stub(module_id: &ModuleId, input: &String) -> Result<ParseResult, Error> {
match lexer::lexer::tokenize(module_id, input) {
Err(e) => Err(Error::LexerError(e)),
Ok(tokens) => match parser::parser::parse_stub(module_id.clone(), tokens) {
Err(e) => Err(Error::ParseError(e)),
Ok(nodes) => Ok(nodes)
}
}
}

pub fn typecheck<R>(module_id: ModuleId, input: &String, loader: &mut ModuleLoader<R>) -> Result<TypedModule, Error>
where R: ModuleReader
{
Expand Down
12 changes: 12 additions & 0 deletions abra_core/src/parser/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ impl BindingPattern {

#[derive(Clone, Debug, PartialEq)]
pub struct BindingDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
pub binding: BindingPattern,
pub type_ann: Option<TypeIdentifier>,
Expand All @@ -253,6 +254,7 @@ pub fn args_to_parameters(raw_arg_tuple: &(Token, Option<TypeIdentifier>, bool,

#[derive(Clone, Debug, PartialEq)]
pub struct FunctionDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand Down Expand Up @@ -284,6 +286,7 @@ impl LambdaNode {

#[derive(Clone, Debug, PartialEq)]
pub struct TypeDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand All @@ -303,6 +306,7 @@ pub struct TypeDeclField {

#[derive(Clone, Debug, PartialEq)]
pub struct EnumDeclNode {
pub decorators: Vec<DecoratorNode>,
pub export_token: Option<Token>,
// Must be a Token::Ident
pub name: Token,
Expand Down Expand Up @@ -440,6 +444,14 @@ pub struct ImportNode {
pub module_id: ModuleId,
}

#[derive(Clone, Debug, PartialEq)]
pub struct DecoratorNode {
pub at_token: Token,
// Must be a Token::Ident
pub name: Token,
pub args: Vec<(Option<Token>, AstNode)>,
}

#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum ModulePathSegment {
CurrentDir,
Expand Down
Loading

0 comments on commit 08ffde0

Please sign in to comment.