Skip to content

Commit

Permalink
order precedence
Browse files Browse the repository at this point in the history
  • Loading branch information
Selyss committed Dec 13, 2023
1 parent 4e6455d commit 71ddfbf
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 1 deletion.
56 changes: 55 additions & 1 deletion parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,26 @@ import (
"github.com/Selyss/AutoCal/token"
)

const (
_ int = iota
LOWEST
EQUALS // ==
LESSGREATER // > or <
SUM // +
PRODUCT // *
PREFIX // -X or !X
CALL // myFunction(X)
)

type Parser struct {
l *lexer.Lexer

errors []string
curToken token.Token
peekToken token.Token

prefixParseFns map[token.TokenType]prefixParseFn
infixParseFns map[token.TokenType]infixParseFn
}

func New(l *lexer.Lexer) *Parser {
Expand All @@ -22,13 +36,33 @@ func New(l *lexer.Lexer) *Parser {
errors: []string{},
}

p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
p.registerPrefix(token.IDENT, p.parseIdentifier)

// Read two tokens, so curToken and peekToken are both set
p.nextToken()
p.nextToken()

return p
}

func (p *Parser) parseIdentifier() ast.Expression {
return &ast.Identifier{Token: p.curToken, Value: p.curToken.Literal}
}

type (
prefixParseFn func() ast.Expression
infixParseFn func(ast.Expression) ast.Expression
)

func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
p.prefixParseFns[tokenType] = fn
}

func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
p.infixParseFns[tokenType] = fn
}

func (p *Parser) Errors() []string {
return p.errors
}
Expand Down Expand Up @@ -57,12 +91,32 @@ func (p *Parser) ParseProgram() *ast.Program {
return program
}

func (p *Parser) parseExpression(precedence int) ast.Expression {
prefix := p.prefixParseFns[p.curToken.Type]
if prefix == nil {
return nil
}
leftExp := prefix()
return leftExp
}

func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
stmt := &ast.ExpressionStatement{Token: p.curToken}
stmt.Expression = p.parseExpression(LOWEST)

if p.peekTokenIs(token.PERIOD) {
p.nextToken()
}

return stmt
}

func (p *Parser) parseStatement() ast.Statement {
switch p.curToken.Type {
case token.LET:
return p.parseLetStatement()
default:
return nil
return p.parseExpressionStatement()
}
}

Expand Down
28 changes: 28 additions & 0 deletions parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,34 @@ func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
return true
}

func TestIdentifierExpression(t *testing.T) {
input := "foobar."
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
checkParserErrors(t, p)
if len(program.Statements) != 1 {
t.Fatalf("program has not enough statements. got=%d",
len(program.Statements))
}
stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. got=%T",
program.Statements[0])
}
ident, ok := stmt.Expression.(*ast.Identifier)
if !ok {
t.Fatalf("exp not *ast.Identifier. got=%T", stmt.Expression)
}
if ident.Value != "foobar" {
t.Errorf("ident.Value not %s. got=%s", "foobar", ident.Value)
}
if ident.TokenLiteral() != "foobar" {
t.Errorf("ident.TokenLiteral not %s. got=%s", "foobar",
ident.TokenLiteral())
}
}

func checkParserErrors(t *testing.T, p *Parser) {
errors := p.Errors()
if len(errors) == 0 {
Expand Down

0 comments on commit 71ddfbf

Please sign in to comment.