get rid of pipe, sorry pipe

This commit is contained in:
Brian Picciano 2016-07-28 14:09:33 -06:00
parent da0d0dfc18
commit 9c9c8afb7d
4 changed files with 24 additions and 130 deletions

View File

@ -17,6 +17,11 @@ import (
// TODO having Equal as part of the Actual interface is going to be annoying. // TODO having Equal as part of the Actual interface is going to be annoying.
// The built in macros which return their own expressions don't really care // The built in macros which return their own expressions don't really care
// about it, and it's really only needed for tests I think. // about it, and it's really only needed for tests I think.
//
// Alternatively, don't have token be embedded in the expression. I'm not sure
// if that's actually possible.
// TODO need to figure out how to test LLVMVal stuff
// Actual represents the actual expression in question, and has certain // Actual represents the actual expression in question, and has certain
// properties. It is wrapped by Expr which also holds onto contextual // properties. It is wrapped by Expr which also holds onto contextual
@ -185,41 +190,6 @@ func (tup Tuple) LLVMVal(builder llvm.Builder) llvm.Value {
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Pipe represents a set of expressions which operate on values and return new
// values. The inputs of one expression in the pipe is the output of the
// previous expression
//
// TODO remove this, sorry Pipe
type Pipe []Expr
func (p Pipe) String() string {
strs := make([]string, len(p))
for i := range p {
strs[i] = fmt.Sprint(p[i].Actual)
}
return "(" + strings.Join(strs, "|") + ")"
}
// Equal implements the Actual method
func (p Pipe) Equal(e Actual) bool {
pp, ok := e.(Pipe)
if !ok || len(pp) != len(p) {
return false
}
for i := range p {
if !p[i].Actual.Equal(pp[i].Actual) {
return false
}
}
return true
}
func (p Pipe) LLVMVal(builder llvm.Builder) llvm.Value {
return llvm.Value{}
}
////////////////////////////////////////////////////////////////////////////////
// Statement represents an actual action which will be taken. The input value is // Statement represents an actual action which will be taken. The input value is
// used as the input to the pipe, and the output of the pipe is the output of // used as the input to the pipe, and the output of the pipe is the output of
// the statement // the statement
@ -396,7 +366,6 @@ var (
openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"} openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"}
closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"} closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"}
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","} comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"} arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"}
) )
@ -518,20 +487,12 @@ func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, e
if toks[0].Equal(comma) { if toks[0].Equal(comma) {
return parseTuple(toks, root) return parseTuple(toks, root)
} else if toks[0].Equal(pipe) {
return parsePipe(toks, root)
} else if toks[0].Equal(arrow) { } else if toks[0].Equal(arrow) {
expr, toks, err := parse(toks[1:]) expr, toks, err := parse(toks[1:])
if err != nil { if err != nil {
return Expr{}, nil, err return Expr{}, nil, err
} }
pipe, ok := expr.Actual.(Pipe) return Expr{Token: root.Token, Actual: Statement{In: root, To: expr}}, toks, nil
if !ok {
pipe = Pipe{expr}
}
pipeExpr := Expr{Actual: pipe, Token: expr.Token}
return Expr{Token: root.Token, Actual: Statement{In: root, To: pipeExpr}}, toks, nil
} }
return root, toks, nil return root, toks, nil
@ -568,37 +529,6 @@ func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
return parseTuple(toks, mkRoot()) return parseTuple(toks, mkRoot())
} }
func parsePipe(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
rootPipe, ok := root.Actual.(Pipe)
if !ok {
rootPipe = Pipe{root}
}
// rootPipe is modified throughout, be we need to make it into an Expr for
// every return, which is annoying. so make a function to do it on the fly
mkRoot := func() Expr {
return Expr{Token: rootPipe[0].Token, Actual: rootPipe}
}
if len(toks) < 2 {
return mkRoot(), toks, nil
} else if !toks[0].Equal(pipe) {
if toks[0].TokenType == lexer.Punctuation {
return parseConnectingPunct(toks, mkRoot())
}
return mkRoot(), toks, nil
}
var expr Expr
var err error
if expr, toks, err = parseSingle(toks[1:]); err != nil {
return Expr{}, nil, err
}
rootPipe = append(rootPipe, expr)
return parsePipe(toks, mkRoot())
}
// parseBlock assumes that the given token list is the entire block, already // parseBlock assumes that the given token list is the entire block, already
// pulled from outer curly braces by sliceEnclosedToks, or determined to be the // pulled from outer curly braces by sliceEnclosedToks, or determined to be the
// entire block in some other way. // entire block in some other way.

View File

@ -90,41 +90,9 @@ func TestParseTuple(t *T) {
assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo}) assertParse(t, toks, tup(fooExpr, tup(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
} }
// This is basically the same as tuple
func TestParsePipe(t *T) {
mkPipe := func(ee ...Expr) Expr {
return Expr{Actual: Pipe(ee)}
}
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
fooExpr := Expr{Actual: Identifier("foo")}
toks := []lexer.Token{foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, foo, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, pipe, foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, foo, pipe, foo, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, fooExpr, fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo}
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}}
toks = []lexer.Token{foo, comma, foo, pipe, foo}
assertParse(t, toks, mkPipe(fooTupExpr, fooExpr), []lexer.Token{})
}
func TestParseStatement(t *T) { func TestParseStatement(t *T) {
stmt := func(in Expr, ee ...Expr) Expr { stmt := func(in, to Expr) Expr {
return Expr{Actual: Statement{in: in, pipe: Pipe(ee)}} return Expr{Actual: Statement{In: in, To: to}}
} }
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"} foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
@ -139,24 +107,21 @@ func TestParseStatement(t *T) {
toks = []lexer.Token{foo, arrow, openParen, foo, closeParen} toks = []lexer.Token{foo, arrow, openParen, foo, closeParen}
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{}) assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, arrow, foo, pipe, foo} toks = []lexer.Token{foo, arrow, foo}
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{}) assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
toks = []lexer.Token{foo, arrow, foo, pipe, foo, foo} toks = []lexer.Token{foo, arrow, foo, foo}
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo}) assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, arrow, openParen, foo, pipe, foo, closeParen, foo} toks = []lexer.Token{foo, arrow, openParen, foo, closeParen, foo}
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo}) assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, pipe, foo, closeParen, foo} toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, closeParen, foo}
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo}) assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{foo})
toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, pipe, foo, closeParen, foo}
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo})
fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}} fooTupExpr := Expr{Actual: Tuple{fooExpr, fooExpr}}
toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, pipe, foo, foo} toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, foo}
assertParse(t, toks, stmt(fooExpr, fooTupExpr, fooExpr), []lexer.Token{foo}) assertParse(t, toks, stmt(fooExpr, fooTupExpr), []lexer.Token{foo})
toks = []lexer.Token{foo, comma, foo, arrow, foo} toks = []lexer.Token{foo, comma, foo, arrow, foo}
assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{}) assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{})
@ -166,8 +131,8 @@ func TestParseStatement(t *T) {
} }
func TestParseBlock(t *T) { func TestParseBlock(t *T) {
stmt := func(in Expr, ee ...Expr) Statement { stmt := func(in, to Expr) Statement {
return Statement{in: in, pipe: Pipe(ee)} return Statement{In: in, To: to}
} }
block := func(stmts ...Statement) Expr { block := func(stmts ...Statement) Expr {
return Expr{Actual: Block(stmts)} return Expr{Actual: Block(stmts)}

View File

@ -198,7 +198,7 @@ func (l *Lexer) Next() Token {
// the actual fsm // the actual fsm
var whitespaceSet = " \n\r\t\v\f" var whitespaceSet = " \n\r\t\v\f"
var punctuationSet = ",<>|" var punctuationSet = ",>"
var wrapperSet = "{}()" var wrapperSet = "{}()"
var identifierSepSet = whitespaceSet + punctuationSet + wrapperSet var identifierSepSet = whitespaceSet + punctuationSet + wrapperSet

View File

@ -32,7 +32,7 @@ var lexTestSrc = `
*/ */
*/ */
(punctuation,is{cool}<> ) (punctuation,is{cool}> )
-tab -tab
"this is a string", "and so is this one" "this is a string", "and so is this one"
@ -67,9 +67,8 @@ func TestLex(t *T) {
assertNext(Wrapper, "{", 24, 17) assertNext(Wrapper, "{", 24, 17)
assertNext(Identifier, "cool", 24, 18) assertNext(Identifier, "cool", 24, 18)
assertNext(Wrapper, "}", 24, 22) assertNext(Wrapper, "}", 24, 22)
assertNext(Punctuation, "<", 24, 23) assertNext(Punctuation, ">", 24, 23)
assertNext(Punctuation, ">", 24, 24) assertNext(Wrapper, ")", 24, 25)
assertNext(Wrapper, ")", 24, 26)
assertNext(Identifier, "-tab", 25, 2) assertNext(Identifier, "-tab", 25, 2)
assertNext(String, `"this is a string"`, 27, 2) assertNext(String, `"this is a string"`, 27, 2)
assertNext(Punctuation, ",", 27, 20) assertNext(Punctuation, ",", 27, 20)