lexer: scanner is more Go idiomatic

This commit is contained in:
Fatih Arslan 2015-10-03 21:25:21 +03:00
parent 1b6ead19c2
commit 4b72853d41

View File

@ -11,53 +11,53 @@ import (
const eof = rune(0) const eof = rune(0)
// Lexer defines a lexical scanner // Lexer defines a lexical scanner
type Lexer struct { type Scanner struct {
src *bufio.Reader // input src *bufio.Reader // input
ch rune // current character ch rune // current character
} }
// NewLexer returns a new instance of Lexer. // NewLexer returns a new instance of Lexer.
func NewLexer(src io.Reader) *Lexer { func NewLexer(src io.Reader) *Scanner {
return &Lexer{ return &Scanner{
src: bufio.NewReader(src), src: bufio.NewReader(src),
} }
} }
// next reads the next rune from the bufferred reader. Returns the rune(0) if // next reads the next rune from the bufferred reader. Returns the rune(0) if
// an error occurs (or io.EOF is returned). // an error occurs (or io.EOF is returned).
func (l *Lexer) next() rune { func (s *Scanner) next() rune {
var err error var err error
l.ch, _, err = l.src.ReadRune() s.ch, _, err = s.src.ReadRune()
if err != nil { if err != nil {
return eof return eof
} }
return l.ch return s.ch
} }
// unread places the previously read rune back on the reader. // unread places the previously read rune back on the reader.
func (l *Lexer) unread() { _ = l.src.UnreadRune() } func (s *Scanner) unread() { _ = s.src.UnreadRune() }
func (l *Lexer) peek() rune { func (s *Scanner) peek() rune {
prev := l.ch prev := s.ch
peekCh := l.next() peekCh := s.next()
l.unread() s.unread()
l.ch = prev s.ch = prev
return peekCh return peekCh
} }
// Scan scans the next token and returns the token and it's literal string. // Scan scans the next token and returns the token and it's literal string.
func (l *Lexer) Scan() (tok Token, lit string) { func (s *Scanner) Scan() (tok Token, lit string) {
ch := l.next() ch := s.next()
// skip white space // skip white space
for isWhitespace(ch) { for isWhitespace(ch) {
ch = l.next() ch = s.next()
} }
// identifier // identifier
if isLetter(ch) { if isLetter(ch) {
return l.scanIdentifier() return s.scanIdentifier()
} }
switch ch { switch ch {
@ -68,13 +68,13 @@ func (l *Lexer) Scan() (tok Token, lit string) {
return 0, "" return 0, ""
} }
func (l *Lexer) scanIdentifier() (Token, string) { func (s *Scanner) scanIdentifier() (Token, string) {
// Create a buffer and read the current character into it. // Create a buffer and read the current character into it.
var buf bytes.Buffer var buf bytes.Buffer
for isLetter(l.ch) || isDigit(l.ch) { for isLetter(s.ch) || isDigit(s.ch) {
buf.WriteRune(l.ch) buf.WriteRune(s.ch)
l.next() s.next()
} }
return IDENT, buf.String() return IDENT, buf.String()
@ -82,7 +82,7 @@ func (l *Lexer) scanIdentifier() (Token, string) {
// Pos returns the position of the character immediately after the character or // Pos returns the position of the character immediately after the character or
// token returned by the last call to Next or Scan. // token returned by the last call to Next or Scan.
func (l *Lexer) Pos() Position { func (s *Scanner) Pos() Position {
return Position{} return Position{}
} }