Skip to content

Commit

Permalink
format tokenType with location and offset cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
gravataLonga committed Jul 1, 2022
1 parent 6a423c6 commit d5b3d10
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 128 deletions.
26 changes: 15 additions & 11 deletions evaluator/import_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package evaluator

import (
"fmt"
"ninja/object"
"testing"
)
Expand Down Expand Up @@ -32,25 +33,28 @@ func TestErrorImportHandling(t *testing.T) {
},
{
`import "../fixtures/stub-with-error.nj"`,
"../fixtures/stub-with-error.nj: expected next token to be (, got EOF instead. [line: 1, character: 14]",
"../fixtures/stub-with-error.nj: expected next token to be (, got EOF (\x00) at [Line: 1, Offset: 13] instead.",
},
{
`import "../fixtures/stub-with-error-in-function.nj"`,
"../fixtures/stub-with-error-in-function.nj: Function expected 2 arguments, got 3",
},
}

for _, tt := range tests {
evaluated := testEval(tt.input, t)
for i, tt := range tests {

t.Run(fmt.Sprintf("TestErrorImportHandling_%d", i), func(t *testing.T) {
evaluated := testEval(tt.input, t)

errObj, ok := evaluated.(*object.Error)
if !ok {
t.Fatalf("no error object returned. got=%T(%+v)", evaluated, evaluated)
}

errObj, ok := evaluated.(*object.Error)
if !ok {
t.Errorf("no error object returned. got=%T(%+v)", evaluated, evaluated)
continue
}
if errObj.Message != tt.expectedMessage {
t.Errorf("wrong error message. expected=%q, got=%q", tt.expectedMessage, errObj.Message)
}
})

if errObj.Message != tt.expectedMessage {
t.Errorf("wrong error message. expected=%q, got=%q", tt.expectedMessage, errObj.Message)
}
}
}
65 changes: 28 additions & 37 deletions lexer/lexer.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package lexer

import (
"fmt"
"io"
"ninja/token"
)
Expand Down Expand Up @@ -61,12 +60,11 @@ func (l *Lexer) NextToken() token.Token {
})

case ';':
tok = newToken(token.SEMICOLON, []byte{l.ch})
tok = l.newToken(token.SEMICOLON, []byte{l.ch})
case '"':
tok.Type = token.STRING
tok.Literal = l.readString()
tok = l.newToken(token.STRING, l.readString())
case '*':
tok = newToken(token.ASTERISK, []byte{l.ch})
tok = l.newToken(token.ASTERISK, []byte{l.ch})
case '/':
if l.peekChar() == '/' {
l.skipSingleLineComment()
Expand All @@ -77,27 +75,27 @@ func (l *Lexer) NextToken() token.Token {
l.skipMultiLineComment()
return l.NextToken()
}
tok = newToken(token.SLASH, []byte{l.ch})
tok = l.newToken(token.SLASH, []byte{l.ch})
case '&':
if l.peekChar() != '&' {
ch := l.ch
l.readChar()
tok = newToken(token.ILLEGAL, []byte{ch, l.ch})
tok = l.newToken(token.ILLEGAL, []byte{ch, l.ch})
} else {
ch := l.ch
l.readChar()
tok = newToken(token.AND, []byte{ch, l.ch})
tok = l.newToken(token.AND, []byte{ch, l.ch})
}

case '|':
if l.peekChar() != '|' {
ch := l.ch
l.readChar()
tok = newToken(token.ILLEGAL, []byte{ch, l.ch})
tok = l.newToken(token.ILLEGAL, []byte{ch, l.ch})
} else {
ch := l.ch
l.readChar()
tok = newToken(token.OR, []byte{ch, l.ch})
tok = l.newToken(token.OR, []byte{ch, l.ch})
}

case '-':
Expand Down Expand Up @@ -130,62 +128,55 @@ func (l *Lexer) NextToken() token.Token {
})

case ':':
tok = newToken(token.COLON, []byte{l.ch})
tok = l.newToken(token.COLON, []byte{l.ch})
case '(':
tok = newToken(token.LPAREN, []byte{l.ch})
tok = l.newToken(token.LPAREN, []byte{l.ch})
case ')':
tok = newToken(token.RPAREN, []byte{l.ch})
tok = l.newToken(token.RPAREN, []byte{l.ch})
case '{':
tok = newToken(token.LBRACE, []byte{l.ch})
tok = l.newToken(token.LBRACE, []byte{l.ch})
case '}':
tok = newToken(token.RBRACE, []byte{l.ch})
tok = l.newToken(token.RBRACE, []byte{l.ch})
case '[':
tok = newToken(token.LBRACKET, []byte{l.ch})
tok = l.newToken(token.LBRACKET, []byte{l.ch})
case ']':
tok = newToken(token.RBRACKET, []byte{l.ch})
tok = l.newToken(token.RBRACKET, []byte{l.ch})
case ',':
tok = newToken(token.COMMA, []byte{l.ch})
tok = l.newToken(token.COMMA, []byte{l.ch})
case '.':
tok = newToken(token.DOT, []byte{l.ch})
tok = l.newToken(token.DOT, []byte{l.ch})
case 0:
tok.Literal = []byte{0}
tok.Type = token.EOF

tok = l.newToken(token.EOF, []byte{0})
default:
if isLetter(l.ch) {
tok.Literal = l.readIdentifier()
tok.Type = token.LookupIdentifier(tok.Literal)
return tok
literal := l.readIdentifier()
return l.newToken(token.LookupIdentifier(literal), literal)
} else if isDigit(l.ch) {
tok.Literal = l.readDigit()
tok.Type = token.DigitType(tok.Literal)
return tok
literal := l.readDigit()
return l.newToken(token.DigitType(literal), literal)
} else {
tok = newToken(token.ILLEGAL, []byte{l.ch})
tok = l.newToken(token.ILLEGAL, []byte{l.ch})
}
}

l.readChar()
return tok
}

func (l *Lexer) FormatLineCharacter() string {
return fmt.Sprintf("[line: %d, character: %d]", l.lineNumber+1, l.characterPositionInLine)
}

func newToken(tokenType token.TokenType, ch []byte) token.Token {
return token.Token{Type: tokenType, Literal: ch}
func (l *Lexer) newToken(tokenType token.TokenType, ch []byte) token.Token {
location := token.Location{Line: l.lineNumber + 1, Offset: l.characterPositionInLine}
return token.Token{Type: tokenType, Literal: ch, Location: location}
}

func (l *Lexer) newTokenPeekOrDefault(tokenType token.TokenType, expectedPeek map[byte]token.TokenType) token.Token {
peekToken, ok := expectedPeek[l.peekChar()]
if !ok {
return newToken(tokenType, []byte{l.ch})
return l.newToken(tokenType, []byte{l.ch})
}

ch := l.ch
l.readChar()
return newToken(peekToken, []byte{ch, l.ch})
return l.newToken(peekToken, []byte{ch, l.ch})
}

func isLetter(ch byte) bool {
Expand Down
63 changes: 0 additions & 63 deletions lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -163,66 +163,3 @@ func TestStringAcceptUtf8Character(t *testing.T) {
}

}

func TestLexer_KeepTrackPosition(t *testing.T) {
// not we are looking by identifier "a"
tests := []struct {
input string
linePosition int
charPosition int
expected string
}{
{
`var a = 0;`,
0,
4,
"[line: 1, character: 4]",
},
{
`
var b = 0;
var a = 0;`,
2,
4,
"[line: 3, character: 4]",
}, {
`
var b = 0;
/*
Hello World
*/
var a = 0;`,
5,
4,
"[line: 6, character: 4]",
},
}

for _, tt := range tests {
l := New(strings.NewReader(tt.input))
for {
curToken := l.NextToken()
if l.peekChar() == 'a' {
break
}

if curToken.Type == token.EOF {
t.Fatalf("Unable to find token a. Got: EOF")
break
}
}

if tt.linePosition != l.lineNumber {
t.Errorf("Wrong line, expected %d. Got: %d", tt.linePosition, l.lineNumber)
}

if tt.charPosition != l.characterPositionInLine {
t.Errorf("Wrong line, expected %d. Got: %d", tt.charPosition, l.characterPositionInLine)
}

if tt.expected != l.FormatLineCharacter() {
t.Errorf("l.formatLineCharacter Expected %s. Got: %s", tt.expected, l.FormatLineCharacter())
}

}
}
2 changes: 1 addition & 1 deletion parser/assign.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func (p *Parser) parseVarStatement() *ast.VarStatement {
p.nextToken()

if p.curTokenIs(token.ASSIGN) {
p.newError("expected next token to be %s, got %s instead. %s", token.IDENT, p.curToken.Type, p.l.FormatLineCharacter())
p.newError("expected next token to be %s, got %s instead.", token.IDENT, p.curToken)
return nil
}

Expand Down
8 changes: 4 additions & 4 deletions parser/assign_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,10 @@ var = =;
tests := []struct {
expectedError string
}{
{fmt.Sprintf("expected next token to be %s, got %s instead. [line: 2, character: 11]", token.ASSIGN, token.TRUE)},
{fmt.Sprintf("expected next token to be %s, got %s instead. [line: 3, character: 6]", token.IDENT, token.ASSIGN)},
{fmt.Sprintf("expected next token to be %s, got %s instead. [line: 4, character: 8]", token.IDENT, token.VAR)},
{fmt.Sprintf("expected next token to be %s, got %s instead. [line: 5, character: 6]", token.IDENT, token.ASSIGN)},
{fmt.Sprintf("expected next token to be %s, got %s (true) at [Line: 2, Offset: 11] instead.", token.ASSIGN, token.TRUE)},
{fmt.Sprintf("expected next token to be %s, got %s (=) at [Line: 3, Offset: 5] instead.", token.IDENT, token.ASSIGN)},
{fmt.Sprintf("expected next token to be %s, got %s (var) at [Line: 4, Offset: 8] instead.", token.IDENT, token.VAR)},
{fmt.Sprintf("expected next token to be %s, got %s (=) at [Line: 5, Offset: 5] instead.", token.IDENT, token.ASSIGN)},
}

errors := p.Errors()
Expand Down
4 changes: 2 additions & 2 deletions parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ func (p *Parser) expectPeek(tok token.TokenType) bool {
func (p *Parser) peekError(t ...token.TokenType) {

if len(t) == 1 {
p.newError("expected next token to be %s, got %s instead. %s", t[0], p.peekToken.Type, p.l.FormatLineCharacter())
p.newError("expected next token to be %s, got %s instead.", t[0], p.peekToken)
return
}

Expand All @@ -194,7 +194,7 @@ func (p *Parser) peekError(t ...token.TokenType) {
listTokens += listTokens + " " + fmt.Sprintf("%s", i)
}

p.newError("expected next token to be %s, got %s instead. %s", listTokens, p.peekToken.Type, p.l.FormatLineCharacter())
p.newError("expected next token to be %s, got %s instead.", listTokens, p.peekToken)
}

func (p *Parser) peekPrecedence() int {
Expand Down
2 changes: 1 addition & 1 deletion parser/return.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ func (p *Parser) parseReturnStatement() *ast.ReturnStatement {

// @todo probably i'm doing something wrong.
if p.peekTokenAny(token.VAR, token.RETURN, token.DECRE, token.INCRE, token.NEQ, token.PLUS, token.MINUS, token.LTE, token.LT, token.GT, token.GTE) {
p.newError("Next token expected to be nil or expression. Got: %s. %s", p.peekToken.Type, p.l.FormatLineCharacter())
p.newError("Next token expected to be nil or expression. Got: %s.", p.peekToken)
p.nextToken()
if p.peekTokenIs(token.SEMICOLON) {
p.nextToken()
Expand Down
17 changes: 10 additions & 7 deletions parser/return_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,17 +71,20 @@ return >=;
tests := []struct {
expectedError string
}{
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s. [line: 2, character: 11]", token.VAR)},
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s. [line: 3, character: 14]", token.RETURN)},
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s. [line: 4, character: 10]", token.DECRE)},
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s (var) at [Line: 2, Offset: 11].", token.VAR)},
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s (return) at [Line: 3, Offset: 14].", token.RETURN)},
{fmt.Sprintf("Next token expected to be nil or expression. Got: %s (--) at [Line: 4, Offset: 9].", token.DECRE)},
}

errors := p.Errors()

for i, tt := range tests {
err := errors[i]
if err != tt.expectedError {
t.Errorf("Error \"%s\" got=%s", tt.expectedError, err)
}
t.Run(fmt.Sprintf("TestReturnStatementErrors_%d", i), func(t *testing.T) {
err := errors[i]
if err != tt.expectedError {
t.Errorf("Error \"%s\" got=%s", tt.expectedError, err)
}
})

}
}
9 changes: 7 additions & 2 deletions token/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@ package token

import (
"bytes"
"fmt"
)

type TokenType int8

type Location struct {
Line int32
Offset int32
Line int
Offset int
}

type Token struct {
Expand Down Expand Up @@ -163,3 +164,7 @@ func DigitType(digit []byte) TokenType {
}
return INT
}

func (t Token) String() string {
return fmt.Sprintf("%s (%s) at [Line: %d, Offset: %d]", t.Type, t.Literal, t.Line, t.Offset)
}

0 comments on commit d5b3d10

Please sign in to comment.