fix problem with arrow not connecting if the left side was a tuple
This commit is contained in:
parent
09ab9399e5
commit
2e76bebab3
14
expr/expr.go
14
expr/expr.go
@ -310,10 +310,10 @@ func readAllToks(r io.Reader) []lexer.Token {
|
|||||||
// For all parse methods it is assumed that toks is not empty
|
// For all parse methods it is assumed that toks is not empty
|
||||||
|
|
||||||
var (
|
var (
|
||||||
openParen = lexer.Token{TokenType: lexer.Punctuation, Val: "("}
|
openParen = lexer.Token{TokenType: lexer.Wrapper, Val: "("}
|
||||||
closeParen = lexer.Token{TokenType: lexer.Punctuation, Val: ")"}
|
closeParen = lexer.Token{TokenType: lexer.Wrapper, Val: ")"}
|
||||||
openCurly = lexer.Token{TokenType: lexer.Punctuation, Val: "{"}
|
openCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "{"}
|
||||||
closeCurly = lexer.Token{TokenType: lexer.Punctuation, Val: "}"}
|
closeCurly = lexer.Token{TokenType: lexer.Wrapper, Val: "}"}
|
||||||
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
|
comma = lexer.Token{TokenType: lexer.Punctuation, Val: ","}
|
||||||
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
|
pipe = lexer.Token{TokenType: lexer.Punctuation, Val: "|"}
|
||||||
arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"}
|
arrow = lexer.Token{TokenType: lexer.Punctuation, Val: ">"}
|
||||||
@ -457,6 +457,9 @@ func parseTuple(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
|||||||
if len(toks) < 2 {
|
if len(toks) < 2 {
|
||||||
return rootTup, toks, nil
|
return rootTup, toks, nil
|
||||||
} else if !toks[0].Equal(comma) {
|
} else if !toks[0].Equal(comma) {
|
||||||
|
if toks[0].TokenType == lexer.Punctuation {
|
||||||
|
return parseConnectingPunct(toks, rootTup)
|
||||||
|
}
|
||||||
return rootTup, toks, nil
|
return rootTup, toks, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -479,6 +482,9 @@ func parsePipe(toks []lexer.Token, root Expr) (Expr, []lexer.Token, error) {
|
|||||||
if len(toks) < 2 {
|
if len(toks) < 2 {
|
||||||
return rootTup, toks, nil
|
return rootTup, toks, nil
|
||||||
} else if !toks[0].Equal(pipe) {
|
} else if !toks[0].Equal(pipe) {
|
||||||
|
if toks[0].TokenType == lexer.Punctuation {
|
||||||
|
return parseConnectingPunct(toks, rootTup)
|
||||||
|
}
|
||||||
return rootTup, toks, nil
|
return rootTup, toks, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,6 +111,10 @@ func TestParsePipe(t *T) {
|
|||||||
|
|
||||||
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
|
toks = []lexer.Token{foo, pipe, openParen, foo, pipe, foo, closeParen, pipe, foo, foo}
|
||||||
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
|
assertParse(t, toks, mkPipe(fooExpr, mkPipe(fooExpr, fooExpr), fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
fooTupExpr := Tuple{exprs: []Expr{fooExpr, fooExpr}}
|
||||||
|
toks = []lexer.Token{foo, comma, foo, pipe, foo}
|
||||||
|
assertParse(t, toks, mkPipe(fooTupExpr, fooExpr), []lexer.Token{})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseStatement(t *T) {
|
func TestParseStatement(t *T) {
|
||||||
@ -148,6 +152,12 @@ func TestParseStatement(t *T) {
|
|||||||
fooTupExpr := Tuple{exprs: []Expr{fooExpr, fooExpr}}
|
fooTupExpr := Tuple{exprs: []Expr{fooExpr, fooExpr}}
|
||||||
toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, pipe, foo, foo}
|
toks = []lexer.Token{foo, arrow, openParen, foo, comma, foo, closeParen, pipe, foo, foo}
|
||||||
assertParse(t, toks, stmt(fooExpr, fooTupExpr, fooExpr), []lexer.Token{foo})
|
assertParse(t, toks, stmt(fooExpr, fooTupExpr, fooExpr), []lexer.Token{foo})
|
||||||
|
|
||||||
|
toks = []lexer.Token{foo, comma, foo, arrow, foo}
|
||||||
|
assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{})
|
||||||
|
|
||||||
|
toks = []lexer.Token{openParen, foo, comma, foo, closeParen, arrow, foo}
|
||||||
|
assertParse(t, toks, stmt(fooTupExpr, fooExpr), []lexer.Token{})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseBlock(t *T) {
|
func TestParseBlock(t *T) {
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"log"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -15,11 +14,16 @@ type TokenType string
|
|||||||
|
|
||||||
// Different token types
|
// Different token types
|
||||||
const (
|
const (
|
||||||
Identifier TokenType = "identifier"
|
Identifier TokenType = "identifier"
|
||||||
|
|
||||||
|
// Punctuation are tokens which connect two other tokens
|
||||||
Punctuation TokenType = "punctuation"
|
Punctuation TokenType = "punctuation"
|
||||||
String TokenType = "string"
|
|
||||||
Err TokenType = "err"
|
// Wrapper wraps one or more tokens
|
||||||
EOF TokenType = "eof"
|
Wrapper TokenType = "wrapper"
|
||||||
|
String TokenType = "string"
|
||||||
|
Err TokenType = "err"
|
||||||
|
EOF TokenType = "eof"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Token is a single token which has been read in. All Tokens have a non-empty
|
// Token is a single token which has been read in. All Tokens have a non-empty
|
||||||
@ -194,8 +198,9 @@ func (l *Lexer) Next() Token {
|
|||||||
// the actual fsm
|
// the actual fsm
|
||||||
|
|
||||||
var whitespaceSet = " \n\r\t\v\f"
|
var whitespaceSet = " \n\r\t\v\f"
|
||||||
var punctuationSet = ",{}()<>|"
|
var punctuationSet = ",<>|"
|
||||||
var identifierSepSet = whitespaceSet + punctuationSet
|
var wrapperSet = "{}()"
|
||||||
|
var identifierSepSet = whitespaceSet + punctuationSet + wrapperSet
|
||||||
|
|
||||||
func lex(l *Lexer) lexerFn {
|
func lex(l *Lexer) lexerFn {
|
||||||
r, err := l.readRune()
|
r, err := l.readRune()
|
||||||
@ -224,6 +229,10 @@ func lexSingleRune(l *Lexer, r rune) lexerFn {
|
|||||||
l.bufferRune(r)
|
l.bufferRune(r)
|
||||||
l.emit(Punctuation)
|
l.emit(Punctuation)
|
||||||
return lex
|
return lex
|
||||||
|
case strings.ContainsRune(wrapperSet, r):
|
||||||
|
l.bufferRune(r)
|
||||||
|
l.emit(Wrapper)
|
||||||
|
return lex
|
||||||
case r == '"' || r == '\'' || r == '`':
|
case r == '"' || r == '\'' || r == '`':
|
||||||
canEscape := r != '`'
|
canEscape := r != '`'
|
||||||
return lexStrStart(l, r, makeLexStr(r, canEscape))
|
return lexStrStart(l, r, makeLexStr(r, canEscape))
|
||||||
@ -266,7 +275,6 @@ func lexLineComment(l *Lexer) lexerFn {
|
|||||||
// assumes the starting / has been read already
|
// assumes the starting / has been read already
|
||||||
func lexBlockComment(l *Lexer) lexerFn {
|
func lexBlockComment(l *Lexer) lexerFn {
|
||||||
depth := 1
|
depth := 1
|
||||||
log.Printf("in block comment")
|
|
||||||
|
|
||||||
var recurse lexerFn
|
var recurse lexerFn
|
||||||
recurse = func(l *Lexer) lexerFn {
|
recurse = func(l *Lexer) lexerFn {
|
||||||
|
@ -60,16 +60,16 @@ func TestLex(t *T) {
|
|||||||
assertNext(Identifier, "100", 6, 2)
|
assertNext(Identifier, "100", 6, 2)
|
||||||
assertNext(Identifier, "1.5", 7, 2)
|
assertNext(Identifier, "1.5", 7, 2)
|
||||||
assertNext(Identifier, "1.5e9", 8, 2)
|
assertNext(Identifier, "1.5e9", 8, 2)
|
||||||
assertNext(Punctuation, "(", 24, 2)
|
assertNext(Wrapper, "(", 24, 2)
|
||||||
assertNext(Identifier, "punctuation", 24, 3)
|
assertNext(Identifier, "punctuation", 24, 3)
|
||||||
assertNext(Punctuation, ",", 24, 14)
|
assertNext(Punctuation, ",", 24, 14)
|
||||||
assertNext(Identifier, "is", 24, 15)
|
assertNext(Identifier, "is", 24, 15)
|
||||||
assertNext(Punctuation, "{", 24, 17)
|
assertNext(Wrapper, "{", 24, 17)
|
||||||
assertNext(Identifier, "cool", 24, 18)
|
assertNext(Identifier, "cool", 24, 18)
|
||||||
assertNext(Punctuation, "}", 24, 22)
|
assertNext(Wrapper, "}", 24, 22)
|
||||||
assertNext(Punctuation, "<", 24, 23)
|
assertNext(Punctuation, "<", 24, 23)
|
||||||
assertNext(Punctuation, ">", 24, 24)
|
assertNext(Punctuation, ">", 24, 24)
|
||||||
assertNext(Punctuation, ")", 24, 26)
|
assertNext(Wrapper, ")", 24, 26)
|
||||||
assertNext(Identifier, "-tab", 25, 2)
|
assertNext(Identifier, "-tab", 25, 2)
|
||||||
assertNext(String, `"this is a string"`, 27, 2)
|
assertNext(String, `"this is a string"`, 27, 2)
|
||||||
assertNext(Punctuation, ",", 27, 20)
|
assertNext(Punctuation, ",", 27, 20)
|
||||||
|
Loading…
Reference in New Issue
Block a user