implement Statement and Block
This commit is contained in:
parent
bdcd5f3b95
commit
6fe8a533a2
106
types.go
106
types.go
@ -166,6 +166,57 @@ func (p Pipe) Equal(e Expr) bool {
|
|||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
type Statement struct {
|
||||||
|
in Expr
|
||||||
|
pipe Pipe
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Statement) Token() lexer.Token {
|
||||||
|
return s.in.Token()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Statement) String() string {
|
||||||
|
return fmt.Sprintf("(%s > %s)", s.in.String(), s.pipe.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Statement) Equal(e Expr) bool {
|
||||||
|
ss, ok := e.(Statement)
|
||||||
|
return ok && s.in.Equal(ss.in) && s.pipe.Equal(ss.pipe)
|
||||||
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
type Block struct {
|
||||||
|
stmts []Statement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b Block) Token() lexer.Token {
|
||||||
|
return b.stmts[0].Token()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b Block) String() string {
|
||||||
|
strs := make([]string, len(b.stmts))
|
||||||
|
for i := range b.stmts {
|
||||||
|
strs[i] = b.stmts[i].String()
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("{ %s }", strings.Join(strs, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b Block) Equal(e Expr) bool {
|
||||||
|
bb, ok := e.(Block)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for i := range b.stmts {
|
||||||
|
if !b.stmts[i].Equal(bb.stmts[i]) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
// toks[0] must be start
|
// toks[0] must be start
|
||||||
func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) {
|
func sliceEnclosedToks(toks []lexer.Token, start, end lexer.Token) ([]lexer.Token, []lexer.Token, error) {
|
||||||
c := 1
|
c := 1
|
||||||
@ -203,8 +254,11 @@ func readAllToks(r io.Reader) []lexer.Token {
|
|||||||
var (
|
var (
|
||||||
openParen = lexer.Token{TokenType: lexer.Punctuation, Val: "("}
|
openParen = lexer.Token{TokenType: lexer.Punctuation, Val: "("}
|
||||||
closeParen = lexer.Token{TokenType: lexer.Punctuation, Val: ")"}
|
closeParen = lexer.Token{TokenType: lexer.Punctuation, Val: ")"}
|
||||||
|
openCurly = lexer.Token{TokenType: lexer.Punctuation, Val: "{"}
|
||||||
|
closeCurly = lexer.Token{TokenType: lexer.Punctuation, Val: "}"}
|
||||||
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
|
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
|
||||||
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
|
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
|
||||||
|
arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"}
|
||||||
)
|
)
|
||||||
|
|
||||||
func parse(toks []lexer.Token) (Expr, []lexer.Token, error) {
|
func parse(toks []lexer.Token) (Expr, []lexer.Token, error) {
|
||||||
@ -242,6 +296,18 @@ func parseSingle(toks []lexer.Token) (Expr, []lexer.Token, error) {
|
|||||||
return nil, nil, fmt.Errorf("multiple expressions inside parenthesis; %v", starter)
|
return nil, nil, fmt.Errorf("multiple expressions inside parenthesis; %v", starter)
|
||||||
}
|
}
|
||||||
return expr, toks, nil
|
return expr, toks, nil
|
||||||
|
|
||||||
|
} else if toks[0].Equal(openCurly) {
|
||||||
|
var btoks []lexer.Token
|
||||||
|
btoks, toks, err = sliceEnclosedToks(toks, openCurly, closeCurly)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if expr, err = parseBlock(btoks); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return expr, toks, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if expr, err = parseNonPunct(toks[0]); err != nil {
|
if expr, err = parseNonPunct(toks[0]); err != nil {
|
||||||
@ -283,11 +349,23 @@ func parseString(t lexer.Token) (Expr, error) {
|
|||||||
func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
func parseConnectingPunct(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
||||||
if toks[0].Equal(comma) {
|
if toks[0].Equal(comma) {
|
||||||
return parseTuple(toks, root)
|
return parseTuple(toks, root)
|
||||||
|
|
||||||
} else if toks[0].Equal(pipe) {
|
} else if toks[0].Equal(pipe) {
|
||||||
return parsePipe(toks, root)
|
return parsePipe(toks, root)
|
||||||
|
|
||||||
|
} else if toks[0].Equal(arrow) {
|
||||||
|
expr, toks, err := parse(toks[1:])
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
pipe, ok := expr.(Pipe)
|
||||||
|
if !ok {
|
||||||
|
pipe = Pipe{exprs: []Expr{expr}}
|
||||||
|
}
|
||||||
|
return Statement{in: root, pipe: pipe}, toks, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, nil, fmt.Errorf("invalid connecting punctuation: %v", toks[0])
|
return root, toks, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
||||||
@ -333,3 +411,29 @@ func parsePipe(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
|||||||
rootTup.exprs = append(rootTup.exprs, expr)
|
rootTup.exprs = append(rootTup.exprs, expr)
|
||||||
return parsePipe(toks, rootTup)
|
return parsePipe(toks, rootTup)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parseBlock assumes that the given token list is the entire block, already
|
||||||
|
// pulled from outer curly braces by sliceEnclosedToks, or determined to be the
|
||||||
|
// entire block in some other way.
|
||||||
|
func parseBlock(toks []lexer.Token) (Expr, error) {
|
||||||
|
b := Block{}
|
||||||
|
|
||||||
|
// TODO figure out what we want to do about empty blocks
|
||||||
|
|
||||||
|
var expr Expr
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
if len(toks) == 0 {
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if expr, toks, err = parse(toks); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
stmt, ok := expr.(Statement)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("blocks may only contain full statements: %v", expr)
|
||||||
|
}
|
||||||
|
b.stmts = append(b.stmts, stmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -112,3 +112,64 @@ func TestParsePipe(t *T) {
|
|||||||
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
|
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
|
||||||
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
|
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseStatement(t *T) {
|
||||||
|
stmt := func(in Expr, ee ...Expr) Expr {
|
||||||
|
return Statement{in: in, pipe: Pipe{exprs: ee}}
|
||||||
|
}
|
||||||
|
|
||||||
|
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
|
||||||
|
fooExpr := Identifier{tok: tok(foo), ident: "foo"}
|
||||||
|
|
||||||
|
toks := []lexer.Token{foo, arrow, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openParen, foo, arrow, foo, closeParen}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{foo, arrow, openParen, foo, closeParen}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{foo, arrow, foo, pipe, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{foo, arrow, foo, pipe, foo, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{foo, arrow, openParen, foo, pipe, foo, closeParen, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, pipe, foo, closeParen, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openParen, foo, closeParen, arrow, openParen, foo, pipe, foo, closeParen, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
fooTupExpr := Tuple{exprs: []Expr{fooExpr, fooExpr}}
|
||||||
|
toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, pipe, foo, foo}
|
||||||
|
assertParse(t, toks, stmt(fooExpr, fooTupExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseBlock(t *T) {
|
||||||
|
stmt := func(in Expr, ee ...Expr) Statement {
|
||||||
|
return Statement{in: in, pipe: Pipe{exprs: ee}}
|
||||||
|
}
|
||||||
|
block := func(stmts ...Statement) Block {
|
||||||
|
return Block{stmts: stmts}
|
||||||
|
}
|
||||||
|
|
||||||
|
foo := lexer.Token{TokenType: lexer.Identifier, Val: "foo"}
|
||||||
|
fooExpr := Identifier{tok: tok(foo), ident: "foo"}
|
||||||
|
|
||||||
|
toks := []lexer.Token{openCurly, foo, arrow, foo, closeCurly}
|
||||||
|
assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openCurly, foo, arrow, foo, closeCurly, foo}
|
||||||
|
assertParse(t, toks, block(stmt(fooExpr, fooExpr)), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo}
|
||||||
|
assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openCurly, foo, arrow, foo, openParen, foo, arrow, foo, closeParen, closeCurly, foo}
|
||||||
|
assertParse(t, toks, block(stmt(fooExpr, fooExpr), stmt(fooExpr, fooExpr)), []lexer.Token{foo})
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user