add some docs and make token channel not be buffered

This commit is contained in:
Brian Picciano 2014-10-18 12:32:54 -04:00
parent af6d4dc3c3
commit 377ea8edbb

View File

@ -1,3 +1,6 @@
// The lexer package implements a lexical reader which can take in any
// io.Reader. It does not care about the meaning or logical validity of the
// tokens it parses out, it simply does its job.
package lexer package lexer
import ( import (
@ -30,7 +33,8 @@ var invalidBareStringRunes = map[rune]bool{
'}': true, '}': true,
} }
// Token represents a single set of characters that are a "thing" in the syntax // Token represents a single set of characters which *could* be a valid token of
// the given type
type Token struct { type Token struct {
Type TokenType Type TokenType
Val string Val string
@ -48,7 +52,9 @@ type Lexer struct {
} }
// NewLexer constructs a new Lexer struct and returns it. r is internally // NewLexer constructs a new Lexer struct and returns it. r is internally
// wrapped with a bufio.Reader, unless it already is one. // wrapped with a bufio.Reader, unless it already is one. This will spawn a
// go-routine which reads from r until it hits an error, at which point it will
// end execution.
func NewLexer(r io.Reader) *Lexer { func NewLexer(r io.Reader) *Lexer {
var br *bufio.Reader var br *bufio.Reader
var ok bool var ok bool
@ -58,7 +64,7 @@ func NewLexer(r io.Reader) *Lexer {
l := Lexer{ l := Lexer{
r: br, r: br,
ch: make(chan *Token, 1), ch: make(chan *Token),
outbuf: bytes.NewBuffer(make([]byte, 0, 1024)), outbuf: bytes.NewBuffer(make([]byte, 0, 1024)),
} }
@ -77,6 +83,9 @@ func (l *Lexer) spin() {
} }
} }
// Returns the next available token, or nil if EOF has been reached. If an error
// other than EOF has been reached it will be returned as the Err token type,
// and this method should not be called again after that.
func (l *Lexer) Next() *Token { func (l *Lexer) Next() *Token {
t := <-l.ch t := <-l.ch
if t.Type == eof { if t.Type == eof {