go fmt ./...
This commit is contained in:
parent
b307273223
commit
4bd9c94f82
@ -13,6 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type TokenType int
|
type TokenType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
BareString TokenType = iota
|
BareString TokenType = iota
|
||||||
QuotedString
|
QuotedString
|
||||||
@ -58,9 +59,9 @@ var (
|
|||||||
|
|
||||||
// Lexer reads through an io.Reader and emits Tokens from it.
|
// Lexer reads through an io.Reader and emits Tokens from it.
|
||||||
type Lexer struct {
|
type Lexer struct {
|
||||||
r *bufio.Reader
|
r *bufio.Reader
|
||||||
outbuf *bytes.Buffer
|
outbuf *bytes.Buffer
|
||||||
ch chan *Token
|
ch chan *Token
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewLexer constructs a new Lexer struct and returns it. r is internally
|
// NewLexer constructs a new Lexer struct and returns it. r is internally
|
||||||
@ -75,8 +76,8 @@ func NewLexer(r io.Reader) *Lexer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
l := Lexer{
|
l := Lexer{
|
||||||
r: br,
|
r: br,
|
||||||
ch: make(chan *Token),
|
ch: make(chan *Token),
|
||||||
outbuf: bytes.NewBuffer(make([]byte, 0, 1024)),
|
outbuf: bytes.NewBuffer(make([]byte, 0, 1024)),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,7 +111,7 @@ func (l *Lexer) emit(t TokenType) {
|
|||||||
str := l.outbuf.String()
|
str := l.outbuf.String()
|
||||||
l.ch <- &Token{
|
l.ch <- &Token{
|
||||||
Type: t,
|
Type: t,
|
||||||
Val: str,
|
Val: str,
|
||||||
}
|
}
|
||||||
l.outbuf.Reset()
|
l.outbuf.Reset()
|
||||||
}
|
}
|
||||||
@ -166,7 +167,7 @@ func lexWhitespace(l *Lexer) lexerFunc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
l.outbuf.WriteRune(r)
|
l.outbuf.WriteRune(r)
|
||||||
|
|
||||||
switch r {
|
switch r {
|
||||||
case '"':
|
case '"':
|
||||||
return lexQuotedString
|
return lexQuotedString
|
||||||
@ -199,7 +200,7 @@ func lexQuotedString(l *Lexer) lexerFunc {
|
|||||||
l.outbuf.WriteRune(r)
|
l.outbuf.WriteRune(r)
|
||||||
buf := l.outbuf.Bytes()
|
buf := l.outbuf.Bytes()
|
||||||
|
|
||||||
if r == '"' && buf[len(buf) - 2] != '\\' {
|
if r == '"' && buf[len(buf)-2] != '\\' {
|
||||||
l.emit(QuotedString)
|
l.emit(QuotedString)
|
||||||
return lexWhitespace
|
return lexWhitespace
|
||||||
}
|
}
|
||||||
|
@ -7,34 +7,34 @@ import (
|
|||||||
|
|
||||||
func TestLexer(t *T) {
|
func TestLexer(t *T) {
|
||||||
m := map[string][]Token{
|
m := map[string][]Token{
|
||||||
"": {{eof, ""}},
|
"": {{eof, ""}},
|
||||||
" \t": {{eof, ""}},
|
" \t": {{eof, ""}},
|
||||||
"a b c": {{BareString, "a"},
|
"a b c": {{BareString, "a"},
|
||||||
{BareString, "b"},
|
{BareString, "b"},
|
||||||
{BareString, "c"},
|
{BareString, "c"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
"\"foo\" bar": {{QuotedString, "\"foo\""},
|
"\"foo\" bar": {{QuotedString, "\"foo\""},
|
||||||
{BareString, "bar"},
|
{BareString, "bar"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
"\"foo\nbar\" baz": {{QuotedString, "\"foo\nbar\""},
|
"\"foo\nbar\" baz": {{QuotedString, "\"foo\nbar\""},
|
||||||
{BareString, "baz"},
|
{BareString, "baz"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
"( foo bar ) baz": {{Open, "("},
|
"( foo bar ) baz": {{Open, "("},
|
||||||
{BareString, "foo"},
|
{BareString, "foo"},
|
||||||
{BareString, "bar"},
|
{BareString, "bar"},
|
||||||
{Close, ")"},
|
{Close, ")"},
|
||||||
{BareString, "baz"},
|
{BareString, "baz"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
"((foo-bar))": {{Open, "("},
|
"((foo-bar))": {{Open, "("},
|
||||||
{Open, "("},
|
{Open, "("},
|
||||||
{BareString, "foo-bar"},
|
{BareString, "foo-bar"},
|
||||||
{Close, ")"},
|
{Close, ")"},
|
||||||
{Close, ")"},
|
{Close, ")"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
"(\"foo\nbar\")": {{Open, "("},
|
"(\"foo\nbar\")": {{Open, "("},
|
||||||
{QuotedString, "\"foo\nbar\""},
|
{QuotedString, "\"foo\nbar\""},
|
||||||
{Close, ")"},
|
{Close, ")"},
|
||||||
{eof, ""}},
|
{eof, ""}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for input, output := range m {
|
for input, output := range m {
|
||||||
|
@ -5,8 +5,8 @@ package parse
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"strconv"
|
"strconv"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ func parseBareString(tok *lex.Token) types.Elem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if tok.Val[0] != ':' {
|
if tok.Val[0] != ':' {
|
||||||
return types.GoType{":"+tok.Val}
|
return types.GoType{":" + tok.Val}
|
||||||
}
|
}
|
||||||
|
|
||||||
return types.GoType{tok.Val}
|
return types.GoType{tok.Val}
|
||||||
@ -100,10 +100,10 @@ func (p *Parser) parseToken(tok *lex.Token) (types.Elem, error) {
|
|||||||
if tok.Val == "(" {
|
if tok.Val == "(" {
|
||||||
return seq.NewList(series...), nil
|
return seq.NewList(series...), nil
|
||||||
} else if tok.Val == "{" {
|
} else if tok.Val == "{" {
|
||||||
if len(series) % 2 != 0 {
|
if len(series)%2 != 0 {
|
||||||
return nil, fmt.Errorf("hash must have even number of elements")
|
return nil, fmt.Errorf("hash must have even number of elements")
|
||||||
}
|
}
|
||||||
kvs := make([]*seq.KV, 0, len(series) / 2)
|
kvs := make([]*seq.KV, 0, len(series)/2)
|
||||||
for i := 0; i < len(series); i += 2 {
|
for i := 0; i < len(series); i += 2 {
|
||||||
kv := seq.KV{series[i], series[i+1]}
|
kv := seq.KV{series[i], series[i+1]}
|
||||||
kvs = append(kvs, &kv)
|
kvs = append(kvs, &kv)
|
||||||
@ -112,13 +112,12 @@ func (p *Parser) parseToken(tok *lex.Token) (types.Elem, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
panic("should never get here")
|
panic("should never get here")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("Unexpected %q", tok.Val)
|
return nil, fmt.Errorf("Unexpected %q", tok.Val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (p *Parser) readUntil(closer string) ([]types.Elem, error) {
|
func (p *Parser) readUntil(closer string) ([]types.Elem, error) {
|
||||||
series := make([]types.Elem, 0, 4)
|
series := make([]types.Elem, 0, 4)
|
||||||
for {
|
for {
|
||||||
|
@ -11,13 +11,13 @@ import (
|
|||||||
|
|
||||||
func TestParse(t *T) {
|
func TestParse(t *T) {
|
||||||
m := map[string]types.Elem{
|
m := map[string]types.Elem{
|
||||||
"1": types.GoType{int(1)},
|
"1": types.GoType{int(1)},
|
||||||
"-1": types.GoType{int(-1)},
|
"-1": types.GoType{int(-1)},
|
||||||
"+1": types.GoType{int(1)},
|
"+1": types.GoType{int(1)},
|
||||||
|
|
||||||
"1.5": types.GoType{float32(1.5)},
|
"1.5": types.GoType{float32(1.5)},
|
||||||
"-1.5": types.GoType{float32(-1.5)},
|
"-1.5": types.GoType{float32(-1.5)},
|
||||||
"+1.5": types.GoType{float32(1.5)},
|
"+1.5": types.GoType{float32(1.5)},
|
||||||
"1.5e1": types.GoType{float32(15)},
|
"1.5e1": types.GoType{float32(15)},
|
||||||
|
|
||||||
"foo": types.GoType{":foo"},
|
"foo": types.GoType{":foo"},
|
||||||
|
@ -89,7 +89,7 @@ func (hm *HashMap) Equal(e types.Elem) bool {
|
|||||||
s := Seq(hm)
|
s := Seq(hm)
|
||||||
size := uint64(0)
|
size := uint64(0)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
el, s, ok = s.FirstRest()
|
el, s, ok = s.FirstRest()
|
||||||
if !ok {
|
if !ok {
|
||||||
return size == hm2.Size()
|
return size == hm2.Size()
|
||||||
|
@ -43,7 +43,7 @@ func TestHashMapEqual(t *T) {
|
|||||||
hm = NewHashMap(keyValV(1, "one"), keyValV(2, "two"))
|
hm = NewHashMap(keyValV(1, "one"), keyValV(2, "two"))
|
||||||
assertValue(hm.Equal(hm2), false, t)
|
assertValue(hm.Equal(hm2), false, t)
|
||||||
assertValue(hm2.Equal(hm), false, t)
|
assertValue(hm2.Equal(hm), false, t)
|
||||||
|
|
||||||
hm2 = NewHashMap(keyValV(1, "one"))
|
hm2 = NewHashMap(keyValV(1, "one"))
|
||||||
assertValue(hm.Equal(hm2), false, t)
|
assertValue(hm.Equal(hm2), false, t)
|
||||||
assertValue(hm2.Equal(hm), false, t)
|
assertValue(hm2.Equal(hm), false, t)
|
||||||
|
@ -288,7 +288,7 @@ func (set *Set) Equal(e types.Elem) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var el types.Elem
|
var el types.Elem
|
||||||
s := Seq(set)
|
s := Seq(set)
|
||||||
size := uint64(0)
|
size := uint64(0)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
|
@ -8,7 +8,7 @@ import (
|
|||||||
// Elem is a generic type which can be used as a wrapper type for all ginger
|
// Elem is a generic type which can be used as a wrapper type for all ginger
|
||||||
// types, both base types and data structures
|
// types, both base types and data structures
|
||||||
type Elem interface {
|
type Elem interface {
|
||||||
|
|
||||||
// Returns whether one element is equal to another. Since all ginger values
|
// Returns whether one element is equal to another. Since all ginger values
|
||||||
// are immutable, this must be a deep-equals check.
|
// are immutable, this must be a deep-equals check.
|
||||||
Equal(Elem) bool
|
Equal(Elem) bool
|
||||||
|
Loading…
Reference in New Issue
Block a user