comment out a bunch of types I don't feel like supporting right now, and all of the parsing code

This commit is contained in:
Brian Picciano 2016-08-05 11:48:42 -06:00
parent 45ce802b35
commit bdd5711773
3 changed files with 448 additions and 457 deletions

View File

@ -106,7 +106,7 @@ func (v Void) LLVMVal(ctx *Ctx, lctx LLVMCtx) llvm.Value {
} }
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/*
// Bool represents a true or false value // Bool represents a true or false value
type Bool bool type Bool bool
@ -132,7 +132,7 @@ func (b Bool) equal(e equaler) bool {
} }
return bb == b return bb == b
} }
*/
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Int represents an integer value // Int represents an integer value
@ -164,7 +164,7 @@ func (i Int) equal(e equaler) bool {
} }
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/*
// String represents a string value // String represents a string value
type String string type String string
@ -190,7 +190,7 @@ func (s String) equal(e equaler) bool {
} }
return ss == s return ss == s
} }
*/
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Identifier represents a binding to some other value which has been given a // Identifier represents a binding to some other value which has been given a
@ -303,6 +303,7 @@ func (tup Tuple) equal(e equaler) bool {
// used as the input to the pipe, and the output of the pipe is the output of // used as the input to the pipe, and the output of the pipe is the output of
// the statement // the statement
type Statement struct { type Statement struct {
// TODO change to Op and Arg
In Expr In Expr
To Expr To Expr
} }

View File

@ -1,303 +1,299 @@
package expr package expr
import ( //type exprErr struct {
"fmt" // reason string
"io" // err error
"strconv" // tok lexer.Token
// tokCtx string // e.g. "block starting at" or "open paren at"
"github.com/mediocregopher/ginger/lexer" //}
) //
//func (e exprErr) Error() string {
type exprErr struct { // var msg string
reason string // if e.err != nil {
err error // msg = e.err.Error()
tok lexer.Token // } else {
tokCtx string // e.g. "block starting at" or "open paren at" // msg = e.reason
} // }
// if err := e.tok.Err(); err != nil {
func (e exprErr) Error() string { // msg += " - token error: " + err.Error()
var msg string // } else if (e.tok != lexer.Token{}) {
if e.err != nil { // msg += " - "
msg = e.err.Error() // if e.tokCtx != "" {
} else { // msg += e.tokCtx + ": "
msg = e.reason // }
} // msg = fmt.Sprintf("%s [line:%d col:%d]", msg, e.tok.Row, e.tok.Col)
if err := e.tok.Err(); err != nil { // }
msg += " - token error: " + err.Error() // return msg
} else if (e.tok != lexer.Token{}) { //}
msg += " - " //
if e.tokCtx != "" { //////////////////////////////////////////////////////////////////////////////////
msg += e.tokCtx + ": " //
} //// toks[0] must be start
msg = fmt.Sprintf("%s [line:%d col:%d]", msg, e.tok.Row, e.tok.Col) //func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) {
} // c := 1
return msg // ret := []lexer.Token{}
} // first := toks[0]
// for i, tok := range toks[1:] {
//////////////////////////////////////////////////////////////////////////////// // if tok.Err() != nil {
// return nil, nil, exprErr{
// toks[0] must be start // reason: fmt.Sprintf("missing closing %v", end),
func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) { // tok: tok,
c := 1 // }
ret := []lexer.Token{} // }
first := toks[0] //
for i, tok := range toks[1:] { // if tok.Equal(start) {
if tok.Err() != nil { // c++
return nil, nil, exprErr{ // } else if tok.Equal(end) {
reason: fmt.Sprintf("missing closing %v", end), // c--
tok: tok, // }
} // if c == 0 {
} // return ret, toks[2+i:], nil
// }
if tok.Equal(start) { // ret = append(ret, tok)
c++ // }
} else if tok.Equal(end) { //
c-- // return nil, nil, exprErr{
} // reason: fmt.Sprintf("missing closing %v", end),
if c == 0 { // tok: first,
return ret, toks[2+i:], nil // tokCtx: "starting at",
} // }
ret = append(ret, tok) //}
} //
//// Parse reads in all expressions it can from the given io.Reader and returns
return nil, nil, exprErr{ //// them
reason: fmt.Sprintf("missing closing %v", end), //func Parse(r io.Reader) ([]Expr, error) {
tok: first, // toks := readAllToks(r)
tokCtx: "starting at", // var ret []Expr
} // var expr Expr
} // var err error
// for len(toks) > 0 {
// Parse reads in all expressions it can from the given io.Reader and returns // if toks[0].TokenType == lexer.EOF {
// them // return ret, nil
func Parse(r io.Reader) ([]Expr, error) { // }
toks := readAllToks(r) // expr, toks, err = parse(toks)
var ret []Expr // if err != nil {
var expr Expr // return nil, err
var err error // }
for len(toks) > 0 { // ret = append(ret, expr)
if toks[0].TokenType == lexer.EOF { // }
return ret, nil // return ret, nil
} //}
expr, toks, err = parse(toks) //
if err != nil { //// ParseAsBlock reads the given io.Reader as if it was implicitly surrounded by
return nil, err //// curly braces, making it into a Block. This means all expressions from the
} //// io.Reader *must* be statements. The returned Expr's Actual will always be a
ret = append(ret, expr) //// Block.
} //func ParseAsBlock(r io.Reader) (Expr, error) {
return ret, nil // return parseBlock(readAllToks(r))
} //}
//
// ParseAsBlock reads the given io.Reader as if it was implicitly surrounded by //func readAllToks(r io.Reader) []lexer.Token {
// curly braces, making it into a Block. This means all expressions from the // l := lexer.New(r)
// io.Reader *must* be statements. The returned Expr's Actual will always be a // var toks []lexer.Token
// Block. // for l.HasNext() {
func ParseAsBlock(r io.Reader) (Expr, error) { // toks = append(toks, l.Next())
return parseBlock(readAllToks(r)) // }
} // return toks
//}
func readAllToks(r io.Reader) []lexer.Token { //
l := lexer.New(r) //// For all parse methods it is assumed that toks is not empty
var toks []lexer.Token //
for l.HasNext() { //var (
toks = append(toks, l.Next()) // openParen = lexer.Token{TokenType: lexer.Wrapper, Val: "("}
} // closeParen = lexer.Token{TokenType: lexer.Wrapper, Val: ")"}
return toks // openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"}
} // closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"}
// comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
// For all parse methods it is assumed that toks is not empty // arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"}
//)
var ( //
openParen = lexer.Token{TokenType: lexer.Wrapper, Val: "("} //func parse(toks []lexer.Token) (Expr, []lexer.Token, error) {
closeParen = lexer.Token{TokenType: lexer.Wrapper, Val: ")"} // expr, toks, err := parseSingle(toks)
openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"} // if err != nil {
closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"} // return Expr{}, nil, err
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","} // }
arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"} //
) // if len(toks) > 0 && toks[0].TokenType == lexer.Punctuation {
// return parseConnectingPunct(toks, expr)
func parse(toks []lexer.Token) (Expr, []lexer.Token, error) { // }
expr, toks, err := parseSingle(toks) //
if err != nil { // return expr, toks, nil
return Expr{}, nil, err //}
} //
//func parseSingle(toks []lexer.Token) (Expr, []lexer.Token, error) {
if len(toks) > 0 && toks[0].TokenType == lexer.Punctuation { // var expr Expr
return parseConnectingPunct(toks, expr) // var err error
} //
// if toks[0].Err() != nil {
return expr, toks, nil // return Expr{}, nil, exprErr{
} // reason: "could not parse token",
// tok: toks[0],
func parseSingle(toks []lexer.Token) (Expr, []lexer.Token, error) { // }
var expr Expr // }
var err error //
// if toks[0].Equal(openParen) {
if toks[0].Err() != nil { // starter := toks[0]
return Expr{}, nil, exprErr{ // var ptoks []lexer.Token
reason: "could not parse token", // ptoks, toks, err = sliceEnclosedToks(toks, openParen, closeParen)
tok: toks[0], // if err != nil {
} // return Expr{}, nil, err
} // }
//
if toks[0].Equal(openParen) { // if expr, ptoks, err = parse(ptoks); err != nil {
starter := toks[0] // return Expr{}, nil, err
var ptoks []lexer.Token // } else if len(ptoks) > 0 {
ptoks, toks, err = sliceEnclosedToks(toks, openParen, closeParen) // return Expr{}, nil, exprErr{
if err != nil { // reason: "multiple expressions inside parenthesis",
return Expr{}, nil, err // tok: starter,
} // tokCtx: "starting at",
// }
if expr, ptoks, err = parse(ptoks); err != nil { // }
return Expr{}, nil, err // return expr, toks, nil
} else if len(ptoks) > 0 { //
return Expr{}, nil, exprErr{ // } else if toks[0].Equal(openCurly) {
reason: "multiple expressions inside parenthesis", // var btoks []lexer.Token
tok: starter, // btoks, toks, err = sliceEnclosedToks(toks, openCurly, closeCurly)
tokCtx: "starting at", // if err != nil {
} // return Expr{}, nil, err
} // }
return expr, toks, nil //
// if expr, err = parseBlock(btoks); err != nil {
} else if toks[0].Equal(openCurly) { // return Expr{}, nil, err
var btoks []lexer.Token // }
btoks, toks, err = sliceEnclosedToks(toks, openCurly, closeCurly) // return expr, toks, nil
if err != nil { // }
return Expr{}, nil, err //
} // if expr, err = parseNonPunct(toks[0]); err != nil {
// return Expr{}, nil, err
if expr, err = parseBlock(btoks); err != nil { // }
return Expr{}, nil, err // return expr, toks[1:], nil
} //}
return expr, toks, nil //
} //func parseNonPunct(tok lexer.Token) (Expr, error) {
// if tok.TokenType == lexer.Identifier {
if expr, err = parseNonPunct(toks[0]); err != nil { // return parseIdentifier(tok)
return Expr{}, nil, err // } else if tok.TokenType == lexer.String {
} // //return parseString(tok)
return expr, toks[1:], nil // }
} //
// return Expr{}, exprErr{
func parseNonPunct(tok lexer.Token) (Expr, error) { // reason: "unexpected non-punctuation token",
if tok.TokenType == lexer.Identifier { // tok: tok,
return parseIdentifier(tok) // }
} else if tok.TokenType == lexer.String { //}
return parseString(tok) //
} //func parseIdentifier(t lexer.Token) (Expr, error) {
// e := Expr{Token: t}
return Expr{}, exprErr{ // if t.Val[0] == '-' || (t.Val[0] >= '0' && t.Val[0] <= '9') {
reason: "unexpected non-punctuation token", // n, err := strconv.ParseInt(t.Val, 10, 64)
tok: tok, // if err != nil {
} // return Expr{}, exprErr{
} // err: err,
// tok: t,
func parseIdentifier(t lexer.Token) (Expr, error) { // }
e := Expr{Token: t} // }
if t.Val[0] == '-' || (t.Val[0] >= '0' && t.Val[0] <= '9') { // e.Actual = Int(n)
n, err := strconv.ParseInt(t.Val, 10, 64) //
if err != nil { // /*
return Expr{}, exprErr{ // } else if t.Val == "%true" {
err: err, // e.Actual = Bool(true)
tok: t, //
} // } else if t.Val == "%false" {
} // e.Actual = Bool(false)
e.Actual = Int(n) // */
//
} else if t.Val == "%true" { // } else if t.Val[0] == '%' {
e.Actual = Bool(true) // e.Actual = Macro(t.Val[1:])
//
} else if t.Val == "%false" { // } else {
e.Actual = Bool(false) // e.Actual = Identifier(t.Val)
// }
} else if t.Val[0] == '%' { //
e.Actual = Macro(t.Val[1:]) // return e, nil
//}
} else { //
e.Actual = Identifier(t.Val) ///*
} //func parseString(t lexer.Token) (Expr, error) {
// str, err := strconv.Unquote(t.Val)
return e, nil // if err != nil {
} // return Expr{}, exprErr{
// err: err,
func parseString(t lexer.Token) (Expr, error) { // tok: t,
str, err := strconv.Unquote(t.Val) // }
if err != nil { // }
return Expr{}, exprErr{ // return Expr{Token: t, Actual: String(str)}, nil
err: err, //}
tok: t, //*/
} //
} //func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
return Expr{Token: t, Actual: String(str)}, nil // if toks[0].Equal(comma) {
} // return parseTuple(toks, root)
//
func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { // } else if toks[0].Equal(arrow) {
if toks[0].Equal(comma) { // expr, toks, err := parse(toks[1:])
return parseTuple(toks, root) // if err != nil {
// return Expr{}, nil, err
} else if toks[0].Equal(arrow) { // }
expr, toks, err := parse(toks[1:]) // return Expr{Token: root.Token, Actual: Statement{In: root, To: expr}}, toks, nil
if err != nil { // }
return Expr{}, nil, err //
} // return root, toks, nil
return Expr{Token: root.Token, Actual: Statement{In: root, To: expr}}, toks, nil //}
} //
//func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
return root, toks, nil // rootTup, ok := root.Actual.(Tuple)
} // if !ok {
// rootTup = Tuple{root}
func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { // }
rootTup, ok := root.Actual.(Tuple) //
if !ok { // // rootTup is modified throughout, be we need to make it into an Expr for
rootTup = Tuple{root} // // every return, which is annoying. so make a function to do it on the fly
} // mkRoot := func() Expr {
// return Expr{Token: rootTup[0].Token, Actual: rootTup}
// rootTup is modified throughout, be we need to make it into an Expr for // }
// every return, which is annoying. so make a function to do it on the fly //
mkRoot := func() Expr { // if len(toks) < 2 {
return Expr{Token: rootTup[0].Token, Actual: rootTup} // return mkRoot(), toks, nil
} // } else if !toks[0].Equal(comma) {
// if toks[0].TokenType == lexer.Punctuation {
if len(toks) < 2 { // return parseConnectingPunct(toks, mkRoot())
return mkRoot(), toks, nil // }
} else if !toks[0].Equal(comma) { // return mkRoot(), toks, nil
if toks[0].TokenType == lexer.Punctuation { // }
return parseConnectingPunct(toks, mkRoot()) //
} // var expr Expr
return mkRoot(), toks, nil // var err error
} // if expr, toks, err = parseSingle(toks[1:]); err != nil {
// return Expr{}, nil, err
var expr Expr // }
var err error //
if expr, toks, err = parseSingle(toks[1:]); err != nil { // rootTup = append(rootTup, expr)
return Expr{}, nil, err // return parseTuple(toks, mkRoot())
} //}
//
rootTup = append(rootTup, expr) //// parseBlock assumes that the given token list is the entire block, already
return parseTuple(toks, mkRoot()) //// pulled from outer curly braces by sliceEnclosedToks, or determined to be the
} //// entire block in some other way.
//func parseBlock(toks []lexer.Token) (Expr, error) {
// parseBlock assumes that the given token list is the entire block, already // b := Block{}
// pulled from outer curly braces by sliceEnclosedToks, or determined to be the // first := toks[0]
// entire block in some other way. // var expr Expr
func parseBlock(toks []lexer.Token) (Expr, error) { // var err error
b := Block{} // for {
first := toks[0] // if len(toks) == 0 {
var expr Expr // return Expr{Token: first, Actual: b}, nil
var err error // }
for { //
if len(toks) == 0 { // if expr, toks, err = parse(toks); err != nil {
return Expr{Token: first, Actual: b}, nil // return Expr{}, err
} // }
// if _, ok := expr.Actual.(Statement); !ok {
if expr, toks, err = parse(toks); err != nil { // return Expr{}, exprErr{
return Expr{}, err // reason: "blocks may only contain full statements",
} // tok: expr.Token,
if _, ok := expr.Actual.(Statement); !ok { // tokCtx: "non-statement here",
return Expr{}, exprErr{ // }
reason: "blocks may only contain full statements", // }
tok: expr.Token, // b = append(b, expr)
tokCtx: "non-statement here", // }
} //}
}
b = append(b, expr)
}
}

View File

@ -1,155 +1,149 @@
package expr package expr
import ( //import . "testing"
. "testing"
"github.com/mediocregopher/ginger/lexer" //func TestSliceEnclosedToks(t *T) {
"github.com/stretchr/testify/assert" // doAssert := func(in, expOut, expRem []lexer.Token) {
"github.com/stretchr/testify/require" // out, rem, err := sliceEnclosedToks(in, openParen, closeParen)
) // require.Nil(t, err)
// assert.Equal(t, expOut, out)
func TestSliceEnclosedToks(t *T) { // assert.Equal(t, expRem, rem)
doAssert := func(in, expOut, expRem []lexer.Token) { // }
out, rem, err := sliceEnclosedToks(in, openParen, closeParen) // foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
require.Nil(t, err) // bar := lexer.Token{TokenType: lexer.Identifier, Val: "bar"}
assert.Equal(t, expOut, out) //
assert.Equal(t, expRem, rem) // toks := []lexer.Token{openParen, closeParen}
} // doAssert(toks, []lexer.Token{}, []lexer.Token{})
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} //
bar := lexer.Token{TokenType: lexer.Identifier, Val: "bar"} // toks = []lexer.Token{openParen, foo, closeParen, bar}
// doAssert(toks, []lexer.Token{foo}, []lexer.Token{bar})
toks := []lexer.Token{openParen, closeParen} //
doAssert(toks, []lexer.Token{}, []lexer.Token{}) // toks = []lexer.Token{openParen, foo, foo, closeParen, bar, bar}
// doAssert(toks, []lexer.Token{foo, foo}, []lexer.Token{bar, bar})
toks = []lexer.Token{openParen, foo, closeParen, bar} //
doAssert(toks, []lexer.Token{foo}, []lexer.Token{bar}) // toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, closeParen}
// doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen}, []lexer.Token{})
toks = []lexer.Token{openParen, foo, foo, closeParen, bar, bar} //
doAssert(toks, []lexer.Token{foo, foo}, []lexer.Token{bar, bar}) // toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, bar, closeParen, foo}
// doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen, bar}, []lexer.Token{foo})
toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, closeParen} //}
doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen}, []lexer.Token{}) //
//func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) {
toks = []lexer.Token{openParen, foo, openParen, bar, closeParen, bar, closeParen, foo} // expr, out, err := parse(in)
doAssert(toks, []lexer.Token{foo, openParen, bar, closeParen, bar}, []lexer.Token{foo}) // require.Nil(t, err)
} // assert.True(t, expExpr.equal(expr), "expr:%+v expExpr:%+v", expr, expExpr)
// assert.Equal(t, expOut, out, "out:%v expOut:%v", out, expOut)
func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) { //}
expr, out, err := parse(in) //
require.Nil(t, err) //func TestParseSingle(t *T) {
assert.True(t, expExpr.equal(expr), "expr:%+v expExpr:%+v", expr, expExpr) // foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
assert.Equal(t, expOut, out, "out:%v expOut:%v", out, expOut) // fooM := lexer.Token{TokenType: lexer.Identifier, Val: "%foo"}
} // fooExpr := Expr{Actual: Identifier("foo")}
// fooMExpr := Expr{Actual: Macro("foo")}
func TestParseSingle(t *T) { //
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} // toks := []lexer.Token{foo}
fooM := lexer.Token{TokenType: lexer.Identifier, Val: "%foo"} // assertParse(t, toks, fooExpr, []lexer.Token{})
fooExpr := Expr{Actual: Identifier("foo")} //
fooMExpr := Expr{Actual: Macro("foo")} // toks = []lexer.Token{foo, foo}
// assertParse(t, toks, fooExpr, []lexer.Token{foo})
toks := []lexer.Token{foo} //
assertParse(t, toks, fooExpr, []lexer.Token{}) // toks = []lexer.Token{openParen, foo, closeParen, foo}
// assertParse(t, toks, fooExpr, []lexer.Token{foo})
toks = []lexer.Token{foo, foo} //
assertParse(t, toks, fooExpr, []lexer.Token{foo}) // toks = []lexer.Token{openParen, openParen, foo, closeParen, closeParen, foo}
// assertParse(t, toks, fooExpr, []lexer.Token{foo})
toks = []lexer.Token{openParen, foo, closeParen, foo} //
assertParse(t, toks, fooExpr, []lexer.Token{foo}) // toks = []lexer.Token{fooM, foo}
// assertParse(t, toks, fooMExpr, []lexer.Token{foo})
toks = []lexer.Token{openParen, openParen, foo, closeParen, closeParen, foo} //}
assertParse(t, toks, fooExpr, []lexer.Token{foo}) //
//func TestParseTuple(t *T) {
toks = []lexer.Token{fooM, foo} // tup := func(ee ...Expr) Expr {
assertParse(t, toks, fooMExpr, []lexer.Token{foo}) // return Expr{Actual: Tuple(ee)}
} // }
//
func TestParseTuple(t *T) { // foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
tup := func(ee ...Expr) Expr { // fooExpr := Expr{Actual: Identifier("foo")}
return Expr{Actual: Tuple(ee)} //
} // toks := []lexer.Token{foo, comma, foo}
// assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{})
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} //
fooExpr := Expr{Actual: Identifier("foo")} // toks = []lexer.Token{foo, comma, foo, foo}
// assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{foo})
toks := []lexer.Token{foo, comma, foo} //
assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, comma, foo, comma, foo}
// assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, comma, foo, foo} //
assertParse(t, toks, tup(fooExpr, fooExpr), []lexer.Token{foo}) // toks = []lexer.Token{foo, comma, foo, comma, foo, comma, foo}
// assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, comma, foo, comma, foo} //
assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo}
// assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, comma, foo, comma, foo, comma, foo} //
assertParse(t, toks, tup(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo}
// assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo} //}
assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{}) //
//func TestParseStatement(t *T) {
toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo} // stmt := func(in, to Expr) Expr {
assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo}) // return Expr{Actual: Statement{In: in, To: to}}
} // }
//
func TestParseStatement(t *T) { // foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
stmt := func(in, to Expr) Expr { // fooExpr := Expr{Actual: Identifier("foo")}
return Expr{Actual: Statement{In: in, To: to}} //
} // toks := []lexer.Token{foo, arrow, foo}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} //
fooExpr := Expr{Actual: Identifier("foo")} // toks = []lexer.Token{openParen, foo, arrow, foo, closeParen}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
toks := []lexer.Token{foo, arrow, foo} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, arrow, openParen, foo, closeParen}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{openParen, foo, arrow, foo, closeParen} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, arrow, foo}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, arrow, openParen, foo, closeParen} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, arrow, foo, foo}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, arrow, foo} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) // toks = []lexer.Token{foo, arrow, openParen, foo, closeParen, foo}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, arrow, foo, foo} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) // toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, closeParen, foo}
// assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, arrow, openParen, foo, closeParen, foo} //
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) // fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}}
// toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, foo}
toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, closeParen, foo} // assertParse(t, toks, stmt(fooExpr, fooTupExpr), []lexer.Token{foo})
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo}) //
// toks = []lexer.Token{foo, comma, foo, arrow, foo}
fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}} // assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, foo} //
assertParse(t, toks, stmt(fooExpr, fooTupExpr), []lexer.Token{foo}) // toks = []lexer.Token{openParen, foo, comma, foo, closeParen, arrow, foo}
// assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, comma, foo, arrow, foo} //}
assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) //
//func TestParseBlock(t *T) {
toks = []lexer.Token{openParen, foo, comma, foo, closeParen, arrow, foo} // stmt := func(in, to Expr) Expr {
assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) // return Expr{Actual: Statement{In: in, To: to}}
} // }
// block := func(stmts ...Expr) Expr {
func TestParseBlock(t *T) { // return Expr{Actual: Block(stmts)}
stmt := func(in, to Expr) Expr { // }
return Expr{Actual: Statement{In: in, To: to}} //
} // foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
block := func(stmts ...Expr) Expr { // fooExpr := Expr{Actual: Identifier("foo")}
return Expr{Actual: Block(stmts)} //
} // toks := []lexer.Token{openCurly, foo, arrow, foo, closeCurly}
// assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{})
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} //
fooExpr := Expr{Actual: Identifier("foo")} // toks = []lexer.Token{openCurly, foo, arrow, foo, closeCurly, foo}
// assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{foo})
toks := []lexer.Token{openCurly, foo, arrow, foo, closeCurly} //
assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{}) // toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo}
// assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
toks = []lexer.Token{openCurly, foo, arrow, foo, closeCurly, foo} //
assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{foo}) // toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo}
// assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo} //}
assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo}
assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
}