diff --git a/parse/lex/lex.go b/parse/lex/lex.go index a1e95e7..4a41c2e 100644 --- a/parse/lex/lex.go +++ b/parse/lex/lex.go @@ -13,6 +13,7 @@ import ( ) type TokenType int + const ( BareString TokenType = iota QuotedString @@ -58,9 +59,9 @@ var ( // Lexer reads through an io.Reader and emits Tokens from it. type Lexer struct { - r *bufio.Reader + r *bufio.Reader outbuf *bytes.Buffer - ch chan *Token + ch chan *Token } // NewLexer constructs a new Lexer struct and returns it. r is internally @@ -75,8 +76,8 @@ func NewLexer(r io.Reader) *Lexer { } l := Lexer{ - r: br, - ch: make(chan *Token), + r: br, + ch: make(chan *Token), outbuf: bytes.NewBuffer(make([]byte, 0, 1024)), } @@ -110,7 +111,7 @@ func (l *Lexer) emit(t TokenType) { str := l.outbuf.String() l.ch <- &Token{ Type: t, - Val: str, + Val: str, } l.outbuf.Reset() } @@ -166,7 +167,7 @@ func lexWhitespace(l *Lexer) lexerFunc { } l.outbuf.WriteRune(r) - + switch r { case '"': return lexQuotedString @@ -199,7 +200,7 @@ func lexQuotedString(l *Lexer) lexerFunc { l.outbuf.WriteRune(r) buf := l.outbuf.Bytes() - if r == '"' && buf[len(buf) - 2] != '\\' { + if r == '"' && buf[len(buf)-2] != '\\' { l.emit(QuotedString) return lexWhitespace } diff --git a/parse/lex/lex_test.go b/parse/lex/lex_test.go index a84b4e9..712fa7f 100644 --- a/parse/lex/lex_test.go +++ b/parse/lex/lex_test.go @@ -7,34 +7,34 @@ import ( func TestLexer(t *T) { m := map[string][]Token{ - "": {{eof, ""}}, + "": {{eof, ""}}, " \t": {{eof, ""}}, "a b c": {{BareString, "a"}, - {BareString, "b"}, - {BareString, "c"}, - {eof, ""}}, + {BareString, "b"}, + {BareString, "c"}, + {eof, ""}}, "\"foo\" bar": {{QuotedString, "\"foo\""}, - {BareString, "bar"}, - {eof, ""}}, + {BareString, "bar"}, + {eof, ""}}, "\"foo\nbar\" baz": {{QuotedString, "\"foo\nbar\""}, - {BareString, "baz"}, - {eof, ""}}, + {BareString, "baz"}, + {eof, ""}}, "( foo bar ) baz": {{Open, "("}, - {BareString, "foo"}, - {BareString, "bar"}, - {Close, ")"}, - {BareString, "baz"}, - {eof, ""}}, - "((foo-bar))": {{Open, "("}, - {Open, "("}, - {BareString, "foo-bar"}, - {Close, ")"}, - {Close, ")"}, - {eof, ""}}, - "(\"foo\nbar\")": {{Open, "("}, - {QuotedString, "\"foo\nbar\""}, - {Close, ")"}, - {eof, ""}}, + {BareString, "foo"}, + {BareString, "bar"}, + {Close, ")"}, + {BareString, "baz"}, + {eof, ""}}, + "((foo-bar))": {{Open, "("}, + {Open, "("}, + {BareString, "foo-bar"}, + {Close, ")"}, + {Close, ")"}, + {eof, ""}}, + "(\"foo\nbar\")": {{Open, "("}, + {QuotedString, "\"foo\nbar\""}, + {Close, ")"}, + {eof, ""}}, } for input, output := range m { diff --git a/parse/parse.go b/parse/parse.go index ad00eab..f8d3ced 100644 --- a/parse/parse.go +++ b/parse/parse.go @@ -5,8 +5,8 @@ package parse import ( "bytes" - "io" "fmt" + "io" "strconv" "unsafe" @@ -45,7 +45,7 @@ func parseBareString(tok *lex.Token) types.Elem { } if tok.Val[0] != ':' { - return types.GoType{":"+tok.Val} + return types.GoType{":" + tok.Val} } return types.GoType{tok.Val} @@ -100,10 +100,10 @@ func (p *Parser) parseToken(tok *lex.Token) (types.Elem, error) { if tok.Val == "(" { return seq.NewList(series...), nil } else if tok.Val == "{" { - if len(series) % 2 != 0 { + if len(series)%2 != 0 { return nil, fmt.Errorf("hash must have even number of elements") } - kvs := make([]*seq.KV, 0, len(series) / 2) + kvs := make([]*seq.KV, 0, len(series)/2) for i := 0; i < len(series); i += 2 { kv := seq.KV{series[i], series[i+1]} kvs = append(kvs, &kv) @@ -112,13 +112,12 @@ func (p *Parser) parseToken(tok *lex.Token) (types.Elem, error) { } panic("should never get here") - + default: return nil, fmt.Errorf("Unexpected %q", tok.Val) } } - func (p *Parser) readUntil(closer string) ([]types.Elem, error) { series := make([]types.Elem, 0, 4) for { diff --git a/parse/parse_test.go b/parse/parse_test.go index 0a4f851..316920b 100644 --- a/parse/parse_test.go +++ b/parse/parse_test.go @@ -11,13 +11,13 @@ import ( func TestParse(t *T) { m := map[string]types.Elem{ - "1": types.GoType{int(1)}, + "1": types.GoType{int(1)}, "-1": types.GoType{int(-1)}, "+1": types.GoType{int(1)}, - "1.5": types.GoType{float32(1.5)}, - "-1.5": types.GoType{float32(-1.5)}, - "+1.5": types.GoType{float32(1.5)}, + "1.5": types.GoType{float32(1.5)}, + "-1.5": types.GoType{float32(-1.5)}, + "+1.5": types.GoType{float32(1.5)}, "1.5e1": types.GoType{float32(15)}, "foo": types.GoType{":foo"}, diff --git a/seq/hashmap.go b/seq/hashmap.go index 157573f..eaa087a 100644 --- a/seq/hashmap.go +++ b/seq/hashmap.go @@ -89,7 +89,7 @@ func (hm *HashMap) Equal(e types.Elem) bool { s := Seq(hm) size := uint64(0) - for { + for { el, s, ok = s.FirstRest() if !ok { return size == hm2.Size() diff --git a/seq/hashmap_test.go b/seq/hashmap_test.go index 1951b83..36e778b 100644 --- a/seq/hashmap_test.go +++ b/seq/hashmap_test.go @@ -43,7 +43,7 @@ func TestHashMapEqual(t *T) { hm = NewHashMap(keyValV(1, "one"), keyValV(2, "two")) assertValue(hm.Equal(hm2), false, t) assertValue(hm2.Equal(hm), false, t) - + hm2 = NewHashMap(keyValV(1, "one")) assertValue(hm.Equal(hm2), false, t) assertValue(hm2.Equal(hm), false, t) diff --git a/seq/hashset.go b/seq/hashset.go index f5458ca..b0dadc6 100644 --- a/seq/hashset.go +++ b/seq/hashset.go @@ -288,7 +288,7 @@ func (set *Set) Equal(e types.Elem) bool { } var el types.Elem - s := Seq(set) + s := Seq(set) size := uint64(0) for { diff --git a/types/types.go b/types/types.go index 185e784..062724b 100644 --- a/types/types.go +++ b/types/types.go @@ -8,7 +8,7 @@ import ( // Elem is a generic type which can be used as a wrapper type for all ginger // types, both base types and data structures type Elem interface { - + // Returns whether one element is equal to another. Since all ginger values // are immutable, this must be a deep-equals check. Equal(Elem) bool