lexer/token.go

52 lines
914 B
Go
Raw Permalink Normal View History

2023-03-19 13:03:20 +01:00
package lexer
import (
"fmt"
"git.milar.in/milarin/bufr"
)
type Token struct {
Type TokenType
Value string
Start bufr.Position
End bufr.Position
}
func (t Token) String() string {
return fmt.Sprintf(
"type: %s | value: '%s' | start: (%d:%d) | end: (%d:%d)",
t.Type, EscSeqPrintReplacer.Replace(t.Value), t.Start.Line, t.Start.Column, t.End.Line, t.End.Column,
)
}
type TokenType uint8
const (
TokenTypeComment TokenType = iota
TokenTypeKeyword
TokenTypeIndent
TokenTypeSeparator
TokenTypeWord
TokenTypeOperator
)
func (tt TokenType) String() string {
switch tt {
case TokenTypeComment:
return "comment"
case TokenTypeKeyword:
return "keyword"
case TokenTypeIndent:
return "indent"
case TokenTypeSeparator:
return "separator"
case TokenTypeWord:
return "word"
case TokenTypeOperator:
return "operator"
default:
panic(fmt.Sprintf("invalid token type: %d", tt))
}
}