lexer/lexer.go

224 lines
3.7 KiB
Go
Raw Normal View History

2023-03-19 13:03:20 +01:00
package lexer
import (
"fmt"
"io"
"git.milar.in/milarin/bufr"
"git.milar.in/milarin/slices"
)
type Lexer struct {
src *bufr.Reader
Indent string
}
func New(r io.Reader) *Lexer {
return &Lexer{
src: bufr.New(r),
}
}
func (t *Lexer) Pos() bufr.Position {
index, line, column := t.src.Pos()
return bufr.Position{Index: index, Line: line, Column: column}
}
func (t *Lexer) Next() (*Token, error) {
rn, err := t.src.Rune()
if err != nil {
return nil, err
}
rn2, err := t.src.Rune()
if err != nil {
return nil, err
}
if err := t.src.UnreadRunes(2); err != nil {
return nil, err
}
if rn == '\t' || (rn == ' ' && rn2 == ' ') {
return t.parseIndent()
} else if rn == '#' {
return t.parseComment()
} else if slices.Contains(Separators, rn) {
return t.parseSeparator()
} else if slices.Contains(StringSeparators, rn) {
return t.parseStringLiteral()
} else if slices.Contains(Operators, rn) {
return t.parseOperator()
}
str, err := t.src.PeekStringUntil(bufr.OneOf(" \n"))
if err != nil {
return nil, err
}
if slices.Contains(Keywords, str) {
return t.parseKeyword()
}
return t.parseWord()
}
func (t *Lexer) parseComment() (*Token, error) {
start := t.Pos()
comment, err := t.src.StringUntil(bufr.IsNewLine)
if err != nil {
return nil, err
}
if err := t.src.UnreadRune(); err != nil {
return nil, err
}
return &Token{
Type: TokenTypeComment,
Value: comment,
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseKeyword() (*Token, error) {
start := t.Pos()
keyword, err := t.src.StringUntil(bufr.IsWhitespace)
if err != nil {
return nil, err
}
if err := t.src.UnreadRune(); err != nil {
return nil, err
}
return &Token{
Type: TokenTypeKeyword,
Value: keyword,
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseWord() (*Token, error) {
start := t.Pos()
word, err := t.src.StringUntil(bufr.IsWhitespace, bufr.Is('='))
if err != nil {
return nil, err
}
if err := t.src.UnreadRune(); err != nil {
return nil, err
}
return &Token{
Type: TokenTypeWord,
Value: word,
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseSeparator() (*Token, error) {
start := t.Pos()
rn, err := t.src.Rune()
if err != nil {
return nil, err
}
return &Token{
Type: TokenTypeSeparator,
Value: string(rn),
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseOperator() (*Token, error) {
start := t.Pos()
rn, err := t.src.Rune()
if err != nil {
return nil, err
}
return &Token{
Type: TokenTypeOperator,
Value: string(rn),
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseStringLiteral() (*Token, error) {
start := t.Pos()
startRn, err := t.src.Rune()
if err != nil {
return nil, err
}
literal, err := t.src.StringUntil(bufr.Is(startRn))
if err != nil {
return nil, err
}
literal = EscSeqReplacer.Replace(literal)
return &Token{
Type: TokenTypeWord,
Value: literal,
Start: start,
End: t.Pos(),
}, nil
}
func (t *Lexer) parseIndent() (*Token, error) {
start := t.Pos()
// no indentation set yet
if t.Indent == "" {
str, err := t.src.StringWhile(bufr.OneOf("\t "))
if err != nil {
return nil, err
}
if err := t.src.UnreadRune(); err != nil {
return nil, err
}
//fmt.Printf("indentation set to '%s'\n", EscSeqReplacer.Replace(str))
t.Indent = str
return &Token{
Type: TokenTypeIndent,
Value: str,
Start: start,
End: t.Pos(),
}, nil
}
for _, rn := range t.Indent {
ok, err := t.src.ExpectRune(bufr.Is(rn))
if err != nil {
return nil, err
}
if !ok {
return nil, fmt.Errorf("mixed indentation styles at (%d:%d)", start.Line, start.Column)
}
}
return &Token{
Type: TokenTypeIndent,
Value: t.Indent,
Start: start,
End: t.Pos(),
}, nil
}