From bdd5711773dab05270de9e730493613eac06ca39 Mon Sep 17 00:00:00 2001 From: Brian Picciano Date: Fri, 5 Aug 2016 11:48:42 -0600 Subject: [PATCH] comment out a bunch of types I don't feel like supporting right now, and all of the parsing code --- expr/expr.go | 9 +- expr/parse.go | 598 ++++++++++++++++++++++----------------------- expr/parse_test.go | 298 +++++++++++----------- 3 files changed, 448 insertions(+), 457 deletions(-) diff --git a/expr/expr.go b/expr/expr.go index 9ab9feb..42ee4dc 100644 --- a/expr/expr.go +++ b/expr/expr.go @@ -106,7 +106,7 @@ func (v Void) LLVMVal(ctx *Ctx, lctx LLVMCtx) llvm.Value { } //////////////////////////////////////////////////////////////////////////////// - +/* // Bool represents a true or false value type Bool bool @@ -132,7 +132,7 @@ func (b Bool) equal(e equaler) bool { } return bb == b } - +*/ //////////////////////////////////////////////////////////////////////////////// // Int represents an integer value @@ -164,7 +164,7 @@ func (i Int) equal(e equaler) bool { } //////////////////////////////////////////////////////////////////////////////// - +/* // String represents a string value type String string @@ -190,7 +190,7 @@ func (s String) equal(e equaler) bool { } return ss == s } - +*/ //////////////////////////////////////////////////////////////////////////////// // Identifier represents a binding to some other value which has been given a @@ -303,6 +303,7 @@ func (tup Tuple) equal(e equaler) bool { // used as the input to the pipe, and the output of the pipe is the output of // the statement type Statement struct { + // TODO change to Op and Arg In Expr To Expr } diff --git a/expr/parse.go b/expr/parse.go index 81f99a9..7d5c197 100644 --- a/expr/parse.go +++ b/expr/parse.go @@ -1,303 +1,299 @@ package expr -import ( - "fmt" - "io" - "strconv" - - "github.com/mediocregopher/ginger/lexer" -) - -type exprErr struct { - reason string - err error - tok lexer.Token - tokCtx string // e.g. "block starting at" or "open paren at" -} - -func (e exprErr) Error() string { - var msg string - if e.err != nil { - msg = e.err.Error() - } else { - msg = e.reason - } - if err := e.tok.Err(); err != nil { - msg += " - token error: " + err.Error() - } else if (e.tok != lexer.Token{}) { - msg += " - " - if e.tokCtx != "" { - msg += e.tokCtx + ": " - } - msg = fmt.Sprintf("%s [line:%d col:%d]", msg, e.tok.Row, e.tok.Col) - } - return msg -} - -//////////////////////////////////////////////////////////////////////////////// - -// toks[0] must be start -func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) { - c := 1 - ret := []lexer.Token{} - first := toks[0] - for i, tok := range toks[1:] { - if tok.Err() != nil { - return nil, nil, exprErr{ - reason: fmt.Sprintf("missing closing %v", end), - tok: tok, - } - } - - if tok.Equal(start) { - c++ - } else if tok.Equal(end) { - c-- - } - if c == 0 { - return ret, toks[2+i:], nil - } - ret = append(ret, tok) - } - - return nil, nil, exprErr{ - reason: fmt.Sprintf("missing closing %v", end), - tok: first, - tokCtx: "starting at", - } -} - -// Parse reads in all expressions it can from the given io.Reader and returns -// them -func Parse(r io.Reader) ([]Expr, error) { - toks := readAllToks(r) - var ret []Expr - var expr Expr - var err error - for len(toks) > 0 { - if toks[0].TokenType == lexer.EOF { - return ret, nil - } - expr, toks, err = parse(toks) - if err != nil { - return nil, err - } - ret = append(ret, expr) - } - return ret, nil -} - -// ParseAsBlock reads the given io.Reader as if it was implicitly surrounded by -// curly braces, making it into a Block. This means all expressions from the -// io.Reader *must* be statements. The returned Expr's Actual will always be a -// Block. -func ParseAsBlock(r io.Reader) (Expr, error) { - return parseBlock(readAllToks(r)) -} - -func readAllToks(r io.Reader) []lexer.Token { - l := lexer.New(r) - var toks []lexer.Token - for l.HasNext() { - toks = append(toks, l.Next()) - } - return toks -} - -// For all parse methods it is assumed that toks is not empty - -var ( - openParen = lexer.Token{TokenType: lexer.Wrapper, Val: "("} - closeParen = lexer.Token{TokenType: lexer.Wrapper, Val: ")"} - openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"} - closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"} - comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","} - arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"} -) - -func parse(toks []lexer.Token) (Expr, []lexer.Token, error) { - expr, toks, err := parseSingle(toks) - if err != nil { - return Expr{}, nil, err - } - - if len(toks) > 0 && toks[0].TokenType == lexer.Punctuation { - return parseConnectingPunct(toks, expr) - } - - return expr, toks, nil -} - -func parseSingle(toks []lexer.Token) (Expr, []lexer.Token, error) { - var expr Expr - var err error - - if toks[0].Err() != nil { - return Expr{}, nil, exprErr{ - reason: "could not parse token", - tok: toks[0], - } - } - - if toks[0].Equal(openParen) { - starter := toks[0] - var ptoks []lexer.Token - ptoks, toks, err = sliceEnclosedToks(toks, openParen, closeParen) - if err != nil { - return Expr{}, nil, err - } - - if expr, ptoks, err = parse(ptoks); err != nil { - return Expr{}, nil, err - } else if len(ptoks) > 0 { - return Expr{}, nil, exprErr{ - reason: "multiple expressions inside parenthesis", - tok: starter, - tokCtx: "starting at", - } - } - return expr, toks, nil - - } else if toks[0].Equal(openCurly) { - var btoks []lexer.Token - btoks, toks, err = sliceEnclosedToks(toks, openCurly, closeCurly) - if err != nil { - return Expr{}, nil, err - } - - if expr, err = parseBlock(btoks); err != nil { - return Expr{}, nil, err - } - return expr, toks, nil - } - - if expr, err = parseNonPunct(toks[0]); err != nil { - return Expr{}, nil, err - } - return expr, toks[1:], nil -} - -func parseNonPunct(tok lexer.Token) (Expr, error) { - if tok.TokenType == lexer.Identifier { - return parseIdentifier(tok) - } else if tok.TokenType == lexer.String { - return parseString(tok) - } - - return Expr{}, exprErr{ - reason: "unexpected non-punctuation token", - tok: tok, - } -} - -func parseIdentifier(t lexer.Token) (Expr, error) { - e := Expr{Token: t} - if t.Val[0] == '-' || (t.Val[0] >= '0' && t.Val[0] <= '9') { - n, err := strconv.ParseInt(t.Val, 10, 64) - if err != nil { - return Expr{}, exprErr{ - err: err, - tok: t, - } - } - e.Actual = Int(n) - - } else if t.Val == "%true" { - e.Actual = Bool(true) - - } else if t.Val == "%false" { - e.Actual = Bool(false) - - } else if t.Val[0] == '%' { - e.Actual = Macro(t.Val[1:]) - - } else { - e.Actual = Identifier(t.Val) - } - - return e, nil -} - -func parseString(t lexer.Token) (Expr, error) { - str, err := strconv.Unquote(t.Val) - if err != nil { - return Expr{}, exprErr{ - err: err, - tok: t, - } - } - return Expr{Token: t, Actual: String(str)}, nil -} - -func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { - if toks[0].Equal(comma) { - return parseTuple(toks, root) - - } else if toks[0].Equal(arrow) { - expr, toks, err := parse(toks[1:]) - if err != nil { - return Expr{}, nil, err - } - return Expr{Token: root.Token, Actual: Statement{In: root, To: expr}}, toks, nil - } - - return root, toks, nil -} - -func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { - rootTup, ok := root.Actual.(Tuple) - if !ok { - rootTup = Tuple{root} - } - - // rootTup is modified throughout, be we need to make it into an Expr for - // every return, which is annoying. so make a function to do it on the fly - mkRoot := func() Expr { - return Expr{Token: rootTup[0].Token, Actual: rootTup} - } - - if len(toks) < 2 { - return mkRoot(), toks, nil - } else if !toks[0].Equal(comma) { - if toks[0].TokenType == lexer.Punctuation { - return parseConnectingPunct(toks, mkRoot()) - } - return mkRoot(), toks, nil - } - - var expr Expr - var err error - if expr, toks, err = parseSingle(toks[1:]); err != nil { - return Expr{}, nil, err - } - - rootTup = append(rootTup, expr) - return parseTuple(toks, mkRoot()) -} - -// parseBlock assumes that the given token list is the entire block, already -// pulled from outer curly braces by sliceEnclosedToks, or determined to be the -// entire block in some other way. -func parseBlock(toks []lexer.Token) (Expr, error) { - b := Block{} - first := toks[0] - var expr Expr - var err error - for { - if len(toks) == 0 { - return Expr{Token: first, Actual: b}, nil - } - - if expr, toks, err = parse(toks); err != nil { - return Expr{}, err - } - if _, ok := expr.Actual.(Statement); !ok { - return Expr{}, exprErr{ - reason: "blocks may only contain full statements", - tok: expr.Token, - tokCtx: "non-statement here", - } - } - b = append(b, expr) - } -} +//type exprErr struct { +// reason string +// err error +// tok lexer.Token +// tokCtx string // e.g. "block starting at" or "open paren at" +//} +// +//func (e exprErr) Error() string { +// var msg string +// if e.err != nil { +// msg = e.err.Error() +// } else { +// msg = e.reason +// } +// if err := e.tok.Err(); err != nil { +// msg += " - token error: " + err.Error() +// } else if (e.tok != lexer.Token{}) { +// msg += " - " +// if e.tokCtx != "" { +// msg += e.tokCtx + ": " +// } +// msg = fmt.Sprintf("%s [line:%d col:%d]", msg, e.tok.Row, e.tok.Col) +// } +// return msg +//} +// +////////////////////////////////////////////////////////////////////////////////// +// +//// toks[0] must be start +//func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) { +// c := 1 +// ret := []lexer.Token{} +// first := toks[0] +// for i, tok := range toks[1:] { +// if tok.Err() != nil { +// return nil, nil, exprErr{ +// reason: fmt.Sprintf("missing closing %v", end), +// tok: tok, +// } +// } +// +// if tok.Equal(start) { +// c++ +// } else if tok.Equal(end) { +// c-- +// } +// if c == 0 { +// return ret, toks[2+i:], nil +// } +// ret = append(ret, tok) +// } +// +// return nil, nil, exprErr{ +// reason: fmt.Sprintf("missing closing %v", end), +// tok: first, +// tokCtx: "starting at", +// } +//} +// +//// Parse reads in all expressions it can from the given io.Reader and returns +//// them +//func Parse(r io.Reader) ([]Expr, error) { +// toks := readAllToks(r) +// var ret []Expr +// var expr Expr +// var err error +// for len(toks) > 0 { +// if toks[0].TokenType == lexer.EOF { +// return ret, nil +// } +// expr, toks, err = parse(toks) +// if err != nil { +// return nil, err +// } +// ret = append(ret, expr) +// } +// return ret, nil +//} +// +//// ParseAsBlock reads the given io.Reader as if it was implicitly surrounded by +//// curly braces, making it into a Block. This means all expressions from the +//// io.Reader *must* be statements. The returned Expr's Actual will always be a +//// Block. +//func ParseAsBlock(r io.Reader) (Expr, error) { +// return parseBlock(readAllToks(r)) +//} +// +//func readAllToks(r io.Reader) []lexer.Token { +// l := lexer.New(r) +// var toks []lexer.Token +// for l.HasNext() { +// toks = append(toks, l.Next()) +// } +// return toks +//} +// +//// For all parse methods it is assumed that toks is not empty +// +//var ( +// openParen = lexer.Token{TokenType: lexer.Wrapper, Val: "("} +// closeParen = lexer.Token{TokenType: lexer.Wrapper, Val: ")"} +// openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"} +// closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"} +// comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","} +// arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"} +//) +// +//func parse(toks []lexer.Token) (Expr, []lexer.Token, error) { +// expr, toks, err := parseSingle(toks) +// if err != nil { +// return Expr{}, nil, err +// } +// +// if len(toks) > 0 && toks[0].TokenType == lexer.Punctuation { +// return parseConnectingPunct(toks, expr) +// } +// +// return expr, toks, nil +//} +// +//func parseSingle(toks []lexer.Token) (Expr, []lexer.Token, error) { +// var expr Expr +// var err error +// +// if toks[0].Err() != nil { +// return Expr{}, nil, exprErr{ +// reason: "could not parse token", +// tok: toks[0], +// } +// } +// +// if toks[0].Equal(openParen) { +// starter := toks[0] +// var ptoks []lexer.Token +// ptoks, toks, err = sliceEnclosedToks(toks, openParen, closeParen) +// if err != nil { +// return Expr{}, nil, err +// } +// +// if expr, ptoks, err = parse(ptoks); err != nil { +// return Expr{}, nil, err +// } else if len(ptoks) > 0 { +// return Expr{}, nil, exprErr{ +// reason: "multiple expressions inside parenthesis", +// tok: starter, +// tokCtx: "starting at", +// } +// } +// return expr, toks, nil +// +// } else if toks[0].Equal(openCurly) { +// var btoks []lexer.Token +// btoks, toks, err = sliceEnclosedToks(toks, openCurly, closeCurly) +// if err != nil { +// return Expr{}, nil, err +// } +// +// if expr, err = parseBlock(btoks); err != nil { +// return Expr{}, nil, err +// } +// return expr, toks, nil +// } +// +// if expr, err = parseNonPunct(toks[0]); err != nil { +// return Expr{}, nil, err +// } +// return expr, toks[1:], nil +//} +// +//func parseNonPunct(tok lexer.Token) (Expr, error) { +// if tok.TokenType == lexer.Identifier { +// return parseIdentifier(tok) +// } else if tok.TokenType == lexer.String { +// //return parseString(tok) +// } +// +// return Expr{}, exprErr{ +// reason: "unexpected non-punctuation token", +// tok: tok, +// } +//} +// +//func parseIdentifier(t lexer.Token) (Expr, error) { +// e := Expr{Token: t} +// if t.Val[0] == '-' || (t.Val[0] >= '0' && t.Val[0] <= '9') { +// n, err := strconv.ParseInt(t.Val, 10, 64) +// if err != nil { +// return Expr{}, exprErr{ +// err: err, +// tok: t, +// } +// } +// e.Actual = Int(n) +// +// /* +// } else if t.Val == "%true" { +// e.Actual = Bool(true) +// +// } else if t.Val == "%false" { +// e.Actual = Bool(false) +// */ +// +// } else if t.Val[0] == '%' { +// e.Actual = Macro(t.Val[1:]) +// +// } else { +// e.Actual = Identifier(t.Val) +// } +// +// return e, nil +//} +// +///* +//func parseString(t lexer.Token) (Expr, error) { +// str, err := strconv.Unquote(t.Val) +// if err != nil { +// return Expr{}, exprErr{ +// err: err, +// tok: t, +// } +// } +// return Expr{Token: t, Actual: String(str)}, nil +//} +//*/ +// +//func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { +// if toks[0].Equal(comma) { +// return parseTuple(toks, root) +// +// } else if toks[0].Equal(arrow) { +// expr, toks, err := parse(toks[1:]) +// if err != nil { +// return Expr{}, nil, err +// } +// return Expr{Token: root.Token, Actual: Statement{In: root, To: expr}}, toks, nil +// } +// +// return root, toks, nil +//} +// +//func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { +// rootTup, ok := root.Actual.(Tuple) +// if !ok { +// rootTup = Tuple{root} +// } +// +// // rootTup is modified throughout, be we need to make it into an Expr for +// // every return, which is annoying. so make a function to do it on the fly +// mkRoot := func() Expr { +// return Expr{Token: rootTup[0].Token, Actual: rootTup} +// } +// +// if len(toks) < 2 { +// return mkRoot(), toks, nil +// } else if !toks[0].Equal(comma) { +// if toks[0].TokenType == lexer.Punctuation { +// return parseConnectingPunct(toks, mkRoot()) +// } +// return mkRoot(), toks, nil +// } +// +// var expr Expr +// var err error +// if expr, toks, err = parseSingle(toks[1:]); err != nil { +// return Expr{}, nil, err +// } +// +// rootTup = append(rootTup, expr) +// return parseTuple(toks, mkRoot()) +//} +// +//// parseBlock assumes that the given token list is the entire block, already +//// pulled from outer curly braces by sliceEnclosedToks, or determined to be the +//// entire block in some other way. +//func parseBlock(toks []lexer.Token) (Expr, error) { +// b := Block{} +// first := toks[0] +// var expr Expr +// var err error +// for { +// if len(toks) == 0 { +// return Expr{Token: first, Actual: b}, nil +// } +// +// if expr, toks, err = parse(toks); err != nil { +// return Expr{}, err +// } +// if _, ok := expr.Actual.(Statement); !ok { +// return Expr{}, exprErr{ +// reason: "blocks may only contain full statements", +// tok: expr.Token, +// tokCtx: "non-statement here", +// } +// } +// b = append(b, expr) +// } +//} diff --git a/expr/parse_test.go b/expr/parse_test.go index db4601e..03921ce 100644 --- a/expr/parse_test.go +++ b/expr/parse_test.go @@ -1,155 +1,149 @@ package expr -import ( - . "testing" +//import . "testing" - "github.com/mediocregopher/ginger/lexer" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestSliceEnclosedToks(t *T) { - doAssert := func(in, expOut, expRem []lexer.Token) { - out, rem, err := sliceEnclosedToks(in, openParen, closeParen) - require.Nil(t, err) - assert.Equal(t, expOut, out) - assert.Equal(t, expRem, rem) - } - foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} - bar := lexer.Token{TokenType: lexer.Identifier, Val: "bar"} - - toks := []lexer.Token{openParen, closeParen} - doAssert(toks, []lexer.Token{}, []lexer.Token{}) - - toks = []lexer.Token{openParen, foo, closeParen, bar} - doAssert(toks, []lexer.Token{foo}, []lexer.Token{bar}) - - toks = []lexer.Token{openParen, foo, foo, closeParen, bar, bar} - doAssert(toks, []lexer.Token{foo, foo}, []lexer.Token{bar, bar}) - - toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, closeParen} - doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen}, []lexer.Token{}) - - toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, bar, closeParen, foo} - doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen, bar}, []lexer.Token{foo}) -} - -func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) { - expr, out, err := parse(in) - require.Nil(t, err) - assert.True(t, expExpr.equal(expr), "expr:%+v expExpr:%+v", expr, expExpr) - assert.Equal(t, expOut, out, "out:%v expOut:%v", out, expOut) -} - -func TestParseSingle(t *T) { - foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} - fooM := lexer.Token{TokenType: lexer.Identifier, Val: "%foo"} - fooExpr := Expr{Actual: Identifier("foo")} - fooMExpr := Expr{Actual: Macro("foo")} - - toks := []lexer.Token{foo} - assertParse(t, toks, fooExpr, []lexer.Token{}) - - toks = []lexer.Token{foo, foo} - assertParse(t, toks, fooExpr, []lexer.Token{foo}) - - toks = []lexer.Token{openParen, foo, closeParen, foo} - assertParse(t, toks, fooExpr, []lexer.Token{foo}) - - toks = []lexer.Token{openParen, openParen, foo, closeParen, closeParen, foo} - assertParse(t, toks, fooExpr, []lexer.Token{foo}) - - toks = []lexer.Token{fooM, foo} - assertParse(t, toks, fooMExpr, []lexer.Token{foo}) -} - -func TestParseTuple(t *T) { - tup := func(ee ...Expr) Expr { - return Expr{Actual: Tuple(ee)} - } - - foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} - fooExpr := Expr{Actual: Identifier("foo")} - - toks := []lexer.Token{foo, comma, foo} - assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, comma, foo, foo} - assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{foo}) - - toks = []lexer.Token{foo, comma, foo, comma, foo} - assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, comma, foo, comma, foo, comma, foo} - assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo} - assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo} - assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo}) -} - -func TestParseStatement(t *T) { - stmt := func(in, to Expr) Expr { - return Expr{Actual: Statement{In: in, To: to}} - } - - foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} - fooExpr := Expr{Actual: Identifier("foo")} - - toks := []lexer.Token{foo, arrow, foo} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{openParen, foo, arrow, foo, closeParen} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, arrow, openParen, foo, closeParen} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, arrow, foo} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{foo, arrow, foo, foo} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) - - toks = []lexer.Token{foo, arrow, openParen, foo, closeParen, foo} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) - - toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, closeParen, foo} - assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) - - fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}} - toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, foo} - assertParse(t, toks, stmt(fooExpr, fooTupExpr), []lexer.Token{foo}) - - toks = []lexer.Token{foo, comma, foo, arrow, foo} - assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) - - toks = []lexer.Token{openParen, foo, comma, foo, closeParen, arrow, foo} - assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) -} - -func TestParseBlock(t *T) { - stmt := func(in, to Expr) Expr { - return Expr{Actual: Statement{In: in, To: to}} - } - block := func(stmts ...Expr) Expr { - return Expr{Actual: Block(stmts)} - } - - foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} - fooExpr := Expr{Actual: Identifier("foo")} - - toks := []lexer.Token{openCurly, foo, arrow, foo, closeCurly} - assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{}) - - toks = []lexer.Token{openCurly, foo, arrow, foo, closeCurly, foo} - assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{foo}) - - toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo} - assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo}) - - toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo} - assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo}) -} +//func TestSliceEnclosedToks(t *T) { +// doAssert := func(in, expOut, expRem []lexer.Token) { +// out, rem, err := sliceEnclosedToks(in, openParen, closeParen) +// require.Nil(t, err) +// assert.Equal(t, expOut, out) +// assert.Equal(t, expRem, rem) +// } +// foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} +// bar := lexer.Token{TokenType: lexer.Identifier, Val: "bar"} +// +// toks := []lexer.Token{openParen, closeParen} +// doAssert(toks, []lexer.Token{}, []lexer.Token{}) +// +// toks = []lexer.Token{openParen, foo, closeParen, bar} +// doAssert(toks, []lexer.Token{foo}, []lexer.Token{bar}) +// +// toks = []lexer.Token{openParen, foo, foo, closeParen, bar, bar} +// doAssert(toks, []lexer.Token{foo, foo}, []lexer.Token{bar, bar}) +// +// toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, closeParen} +// doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen}, []lexer.Token{}) +// +// toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, bar, closeParen, foo} +// doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen, bar}, []lexer.Token{foo}) +//} +// +//func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) { +// expr, out, err := parse(in) +// require.Nil(t, err) +// assert.True(t, expExpr.equal(expr), "expr:%+v expExpr:%+v", expr, expExpr) +// assert.Equal(t, expOut, out, "out:%v expOut:%v", out, expOut) +//} +// +//func TestParseSingle(t *T) { +// foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} +// fooM := lexer.Token{TokenType: lexer.Identifier, Val: "%foo"} +// fooExpr := Expr{Actual: Identifier("foo")} +// fooMExpr := Expr{Actual: Macro("foo")} +// +// toks := []lexer.Token{foo} +// assertParse(t, toks, fooExpr, []lexer.Token{}) +// +// toks = []lexer.Token{foo, foo} +// assertParse(t, toks, fooExpr, []lexer.Token{foo}) +// +// toks = []lexer.Token{openParen, foo, closeParen, foo} +// assertParse(t, toks, fooExpr, []lexer.Token{foo}) +// +// toks = []lexer.Token{openParen, openParen, foo, closeParen, closeParen, foo} +// assertParse(t, toks, fooExpr, []lexer.Token{foo}) +// +// toks = []lexer.Token{fooM, foo} +// assertParse(t, toks, fooMExpr, []lexer.Token{foo}) +//} +// +//func TestParseTuple(t *T) { +// tup := func(ee ...Expr) Expr { +// return Expr{Actual: Tuple(ee)} +// } +// +// foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} +// fooExpr := Expr{Actual: Identifier("foo")} +// +// toks := []lexer.Token{foo, comma, foo} +// assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, comma, foo, foo} +// assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{foo}) +// +// toks = []lexer.Token{foo, comma, foo, comma, foo} +// assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, comma, foo, comma, foo, comma, foo} +// assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo} +// assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo} +// assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo}) +//} +// +//func TestParseStatement(t *T) { +// stmt := func(in, to Expr) Expr { +// return Expr{Actual: Statement{In: in, To: to}} +// } +// +// foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} +// fooExpr := Expr{Actual: Identifier("foo")} +// +// toks := []lexer.Token{foo, arrow, foo} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{openParen, foo, arrow, foo, closeParen} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, arrow, openParen, foo, closeParen} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, arrow, foo} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{foo, arrow, foo, foo} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) +// +// toks = []lexer.Token{foo, arrow, openParen, foo, closeParen, foo} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) +// +// toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, closeParen, foo} +// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) +// +// fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}} +// toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, foo} +// assertParse(t, toks, stmt(fooExpr, fooTupExpr), []lexer.Token{foo}) +// +// toks = []lexer.Token{foo, comma, foo, arrow, foo} +// assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) +// +// toks = []lexer.Token{openParen, foo, comma, foo, closeParen, arrow, foo} +// assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) +//} +// +//func TestParseBlock(t *T) { +// stmt := func(in, to Expr) Expr { +// return Expr{Actual: Statement{In: in, To: to}} +// } +// block := func(stmts ...Expr) Expr { +// return Expr{Actual: Block(stmts)} +// } +// +// foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} +// fooExpr := Expr{Actual: Identifier("foo")} +// +// toks := []lexer.Token{openCurly, foo, arrow, foo, closeCurly} +// assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{}) +// +// toks = []lexer.Token{openCurly, foo, arrow, foo, closeCurly, foo} +// assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{foo}) +// +// toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo} +// assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo}) +// +// toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo} +// assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo}) +//}