Skip to content

Commit

Permalink
cleanup some internal lexer functions
Browse files Browse the repository at this point in the history
  • Loading branch information
gravataLonga committed Jul 1, 2022
1 parent 6257940 commit 6a423c6
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 49 deletions.
79 changes: 37 additions & 42 deletions lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,10 @@ func (l *Lexer) NextToken() token.Token {
l.skipWhitespace()
switch l.ch {
case '=':
if l.peekChar() == '=' {
ch := l.ch
l.readChar()
tok = newToken(token.EQ, []byte{ch, l.ch})
} else {
tok = newToken(token.ASSIGN, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.ASSIGN, map[byte]token.TokenType{
'=': token.EQ,
})

case ';':
tok = newToken(token.SEMICOLON, []byte{l.ch})
Expand Down Expand Up @@ -104,47 +101,34 @@ func (l *Lexer) NextToken() token.Token {
}

case '-':
if l.peekChar() == '-' {
ch := l.ch
l.readChar()
tok = newToken(token.DECRE, []byte{ch, l.ch})
} else {
tok = newToken(token.MINUS, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.MINUS, map[byte]token.TokenType{
'-': token.DECRE,
})

case '+':
if l.peekChar() == '+' {
ch := l.ch
l.readChar()
tok = newToken(token.INCRE, []byte{ch, l.ch})
} else {
tok = newToken(token.PLUS, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.PLUS, map[byte]token.TokenType{
'+': token.INCRE,
})
case '>':
if l.peekChar() == '=' {
ch := l.ch
l.readChar()
tok = newToken(token.GTE, []byte{ch, l.ch})
} else {
tok = newToken(token.GT, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.GT, map[byte]token.TokenType{
'=': token.GTE,
})

case '<':
if l.peekChar() == '=' {
ch := l.ch
l.readChar()
tok = newToken(token.LTE, []byte{ch, l.ch})
} else {
tok = newToken(token.LT, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.LT, map[byte]token.TokenType{
'=': token.LTE,
})

case '!':
if l.peekChar() == '=' {
ch := l.ch
l.readChar()
tok = newToken(token.NEQ, []byte{ch, l.ch})
} else {
tok = newToken(token.BANG, []byte{l.ch})
}

tok = l.newTokenPeekOrDefault(token.BANG, map[byte]token.TokenType{
'=': token.NEQ,
})

case ':':
tok = newToken(token.COLON, []byte{l.ch})
case '(':
Expand Down Expand Up @@ -193,6 +177,17 @@ func newToken(tokenType token.TokenType, ch []byte) token.Token {
return token.Token{Type: tokenType, Literal: ch}
}

func (l *Lexer) newTokenPeekOrDefault(tokenType token.TokenType, expectedPeek map[byte]token.TokenType) token.Token {
peekToken, ok := expectedPeek[l.peekChar()]
if !ok {
return newToken(tokenType, []byte{l.ch})
}

ch := l.ch
l.readChar()
return newToken(peekToken, []byte{ch, l.ch})
}

func isLetter(ch byte) bool {
return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
}
Expand Down
6 changes: 3 additions & 3 deletions object/function.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ type Function struct {
func (f *Function) Type() ObjectType { return FUNCTION_OBJ }
func (f *Function) Inspect() string {
var out bytes.Buffer
params := []string{}
for _, p := range f.Parameters {
params = append(params, p.String())
params := make([]string, len(f.Parameters))
for i, p := range f.Parameters {
params[i] = p.String()
}
out.WriteString("function")
out.WriteString("(")
Expand Down
6 changes: 3 additions & 3 deletions object/function_literal.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ type FunctionLiteral struct {
func (f *FunctionLiteral) Type() ObjectType { return FUNCTION_OBJ }
func (f *FunctionLiteral) Inspect() string {
var out bytes.Buffer
params := []string{}
for _, p := range f.Parameters {
params = append(params, p.String())
params := make([]string, len(f.Parameters))
for i, p := range f.Parameters {
params[i] = p.String()
}
out.WriteString("function")
out.WriteString("(")
Expand Down
8 changes: 7 additions & 1 deletion token/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,15 @@ import (

type TokenType int8

type Location struct {
Line int32
Offset int32
}

type Token struct {
Type TokenType
Literal []byte // be a string don't have same performance as using int or byte
Literal []byte
Location
}

// String() is used to transform TokenType int8 in it is string format, for better
Expand Down

0 comments on commit 6a423c6

Please sign in to comment.