implement pipe operator

This commit is contained in:
Brian Picciano 2016-07-22 14:53:04 -06:00
parent 76f963694f
commit bdcd5f3b95
2 changed files with 86 additions and 3 deletions

View File

@ -135,6 +135,37 @@ func (tup Tuple) Equal(e Expr) bool {
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
type Pipe struct {
exprs []Expr
}
func (p Pipe) Token() lexer.Token {
return p.exprs[0].Token()
}
func (p Pipe) String() string {
strs := make([]string, len(p.exprs))
for i := range p.exprs {
strs[i] = p.exprs[i].String()
}
return "(" + strings.Join(strs, "|") + ")"
}
func (p Pipe) Equal(e Expr) bool {
pp, ok := e.(Pipe)
if !ok || len(pp.exprs) != len(p.exprs) {
return false
}
for i := range p.exprs {
if !p.exprs[i].Equal(pp.exprs[i]) {
return false
}
}
return true
}
////////////////////////////////////////////////////////////////////////////////
// toks[0] must be start // toks[0] must be start
func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) { func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) {
c := 1 c := 1
@ -173,6 +204,7 @@ var (
openParen = lexer.Token{TokenType: lexer.Punctuation, Val: "("} openParen = lexer.Token{TokenType: lexer.Punctuation, Val: "("}
closeParen = lexer.Token{TokenType: lexer.Punctuation, Val: ")"} closeParen = lexer.Token{TokenType: lexer.Punctuation, Val: ")"}
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","} comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
) )
func parse(toks []lexer.Token) (Expr, []lexer.Token, error) { func parse(toks []lexer.Token) (Expr, []lexer.Token, error) {
@ -251,6 +283,8 @@ func parseString(t lexer.Token) (Expr, error) {
func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) { func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
if toks[0].Equal(comma) { if toks[0].Equal(comma) {
return parseTuple(toks, root) return parseTuple(toks, root)
} else if toks[0].Equal(pipe) {
return parsePipe(toks, root)
} }
return nil, nil, fmt.Errorf("invalid connecting punctuation: %v", toks[0]) return nil, nil, fmt.Errorf("invalid connecting punctuation: %v", toks[0])
@ -277,3 +311,25 @@ func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
rootTup.exprs = append(rootTup.exprs, expr) rootTup.exprs = append(rootTup.exprs, expr)
return parseTuple(toks, rootTup) return parseTuple(toks, rootTup)
} }
func parsePipe(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
rootTup, ok := root.(Pipe)
if !ok {
rootTup = Pipe{exprs: []Expr{root}}
}
if len(toks) < 2 {
return rootTup, toks, nil
} else if !toks[0].Equal(pipe) {
return rootTup, toks, nil
}
var expr Expr
var err error
if expr, toks, err = parseSingle(toks[1:]); err != nil {
return nil, nil, err
}
rootTup.exprs = append(rootTup.exprs, expr)
return parsePipe(toks, rootTup)
}

View File

@ -37,9 +37,8 @@ func TestSliceEnclosedToks(t *T) {
func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) { func assertParse(t *T, in []lexer.Token, expExpr Expr, expOut []lexer.Token) {
expr, out, err := parse(in) expr, out, err := parse(in)
require.Nil(t, err) require.Nil(t, err)
t.Logf("expr:%v out:%v", expr, out) assert.True(t, expExpr.Equal(expr), "expr:%v expExpr:%v", expr, expExpr)
assert.True(t, expExpr.Equal(expr)) assert.Equal(t, expOut, out, "out:%v expOut:%v", out, expOut)
assert.Equal(t, expOut, out)
} }
func TestParseSingle(t *T) { func TestParseSingle(t *T) {
@ -85,3 +84,31 @@ func TestParseTuple(t *T) {
toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo} toks = []lexer.Token{foo, comma, openParen, foo, comma, foo, closeParen, comma, foo, foo}
assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo}) assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
} }
// This is basically the same as tuple
func TestParsePipe(t *T) {
mkPipe := func(ee ...Expr) Expr {
return Pipe{exprs: ee}
}
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
fooExpr := Identifier{tok: tok(foo), ident: "foo"}
toks := []lexer.Token{foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, foo, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, pipe, foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, foo, pipe, foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
}