Improve semantics of tokens and values obtained from them.

Now gg.Values can carry the token used to parse them, which will be
useful later when generating errors.
This commit is contained in:
Brian Picciano 2021-12-28 09:49:02 -07:00
parent 33e59a3836
commit c5aa582226
6 changed files with 132 additions and 94 deletions

View File

@ -19,7 +19,7 @@ const (
)
func decoderErr(tok LexerToken, err error) error {
return fmt.Errorf("%d:%d: %w", tok.Row, tok.Col, err)
return fmt.Errorf("%s: %w", tok.errPrefix(), err)
}
func decoderErrf(tok LexerToken, str string, args ...interface{}) error {
@ -53,7 +53,7 @@ func (d *decoder) parseSingleValue(
tok, rest := toks[0], toks[1:]
if len(rest) == 0 {
return Value{}, nil, false, decoderErrf(tok, "cannot be final token, possibly missing %q", punctTerm)
return ZeroValue, nil, false, decoderErrf(tok, "cannot be final token, possibly missing %q", punctTerm)
}
termed := isTerm(rest[0])
@ -65,20 +65,20 @@ func (d *decoder) parseSingleValue(
switch tok.Kind {
case LexerTokenKindName:
return Value{Name: &tok.Value}, rest, termed, nil
return Value{Name: &tok.Value, LexerToken: &tok}, rest, termed, nil
case LexerTokenKindNumber:
i, err := strconv.ParseInt(tok.Value, 10, 64)
if err != nil {
return Value{}, nil, false, decoderErrf(tok, "parsing %q as integer: %w", tok.Value, err)
return ZeroValue, nil, false, decoderErrf(tok, "parsing %q as integer: %w", tok.Value, err)
}
return Value{Number: &i}, rest, termed, nil
return Value{Number: &i, LexerToken: &tok}, rest, termed, nil
case LexerTokenKindPunctuation:
return Value{}, nil, false, decoderErrf(tok, "expected value, found punctuation %q", tok.Value)
return ZeroValue, nil, false, decoderErrf(tok, "expected value, found punctuation %q", tok.Value)
default:
panic(fmt.Sprintf("unexpected token kind %q", tok.Kind))
@ -116,7 +116,7 @@ func (d *decoder) parseOpenEdge(
}
if termed {
return ValueOut(val, Value{}), toks, nil
return ValueOut(val, ZeroValue), toks, nil
}
opTok, toks := toks[0], toks[1:]
@ -181,7 +181,7 @@ func (d *decoder) parseTuple(
toks = toks[1:]
}
return TupleOut(edges, Value{}), toks, nil
return TupleOut(edges, ZeroValue), toks, nil
}
// returned boolean value indicates if the token following the graph is a term.
@ -211,18 +211,18 @@ func (d *decoder) parseGraphValue(
break
}
return Value{}, nil, false, decoderErrf(openTok, "no matching %q", punctCloseGraph)
return ZeroValue, nil, false, decoderErrf(openTok, "no matching %q", punctCloseGraph)
} else if closingTok := toks[0]; isPunct(closingTok, punctCloseGraph) {
if !expectWrappers {
return Value{}, nil, false, decoderErrf(closingTok, "unexpected %q", punctCloseGraph)
return ZeroValue, nil, false, decoderErrf(closingTok, "unexpected %q", punctCloseGraph)
}
toks = toks[1:]
if len(toks) == 0 {
return Value{}, nil, false, decoderErrf(closingTok, "cannot be final token, possibly missing %q", punctTerm)
return ZeroValue, nil, false, decoderErrf(closingTok, "cannot be final token, possibly missing %q", punctTerm)
}
break
@ -231,7 +231,7 @@ func (d *decoder) parseGraphValue(
var err error
if g, toks, err = d.parseValIn(g, toks); err != nil {
return Value{}, nil, false, err
return ZeroValue, nil, false, err
}
}
@ -241,6 +241,8 @@ func (d *decoder) parseGraphValue(
return val, toks, true, nil
}
val.LexerToken = &openTok
termed := isTerm(toks[0])
if termed {
@ -276,7 +278,7 @@ func (d *decoder) parseValIn(into *Graph, toks []LexerToken) (*Graph, []LexerTok
return nil, nil, err
}
dstVal := Value{Name: &dst.Value}
dstVal := Value{Name: &dst.Value, LexerToken: &dst}
return into.AddValueIn(oe, dstVal), toks, nil
}

View File

@ -27,7 +27,7 @@ func TestDecoder(t *testing.T) {
},
{
in: "out = 1;",
exp: ZeroGraph.AddValueIn(ValueOut(i(1), Value{}), n("out")),
exp: ZeroGraph.AddValueIn(ValueOut(i(1), ZeroValue), n("out")),
},
{
in: "out = incr < 1;",
@ -49,7 +49,7 @@ func TestDecoder(t *testing.T) {
TupleOut(
[]OpenEdge{TupleOut(
[]OpenEdge{
ValueOut(i(1), Value{}),
ValueOut(i(1), ZeroValue),
ValueOut(i(2), n("c")),
TupleOut(
[]OpenEdge{ValueOut(i(3), n("e"))},
@ -69,11 +69,11 @@ func TestDecoder(t *testing.T) {
TupleOut(
[]OpenEdge{TupleOut(
[]OpenEdge{
ValueOut(i(1), Value{}),
ValueOut(i(1), ZeroValue),
TupleOut(
[]OpenEdge{
ValueOut(i(2), n("d")),
ValueOut(i(3), Value{}),
ValueOut(i(3), ZeroValue),
},
n("c"),
),
@ -90,7 +90,7 @@ func TestDecoder(t *testing.T) {
exp: ZeroGraph.AddValueIn(
ValueOut(
Value{Graph: ZeroGraph.
AddValueIn(ValueOut(i(1), Value{}), n("a")).
AddValueIn(ValueOut(i(1), ZeroValue), n("a")).
AddValueIn(
TupleOut(
[]OpenEdge{
@ -101,7 +101,7 @@ func TestDecoder(t *testing.T) {
n("b"),
),
},
Value{},
ZeroValue,
),
n("out"),
),
@ -114,7 +114,7 @@ func TestDecoder(t *testing.T) {
ValueOut(
i(2),
Value{Graph: ZeroGraph.
AddValueIn(ValueOut(i(1), Value{}), n("b")),
AddValueIn(ValueOut(i(1), ZeroValue), n("b")),
},
),
},
@ -126,8 +126,8 @@ func TestDecoder(t *testing.T) {
{
in: "a = 1; b = 2;",
exp: ZeroGraph.
AddValueIn(ValueOut(i(1), Value{}), n("a")).
AddValueIn(ValueOut(i(2), Value{}), n("b")),
AddValueIn(ValueOut(i(1), ZeroValue), n("a")).
AddValueIn(ValueOut(i(2), ZeroValue), n("b")),
},
}

View File

@ -6,22 +6,37 @@ import (
"strings"
)
// Value represents a value being stored in a Graph. No more than one field may
// be non-nil. No fields being set indicates lack of value.
// ZeroValue is a Value with no fields set.
var ZeroValue Value
// Value represents a value being stored in a Graph.
type Value struct {
// Only one of these fields may be set
Name *string
Number *int64
Graph *Graph
// TODO coming soon!
// String *string
// Optional fields indicating the token which was used to construct this
// Value, if any.
LexerToken *LexerToken
}
// IsZero returns true if the Value is the zero value (none of the sub-value
// fields are set). LexerToken is ignored for this check.
func (v Value) IsZero() bool {
v.LexerToken = nil
return v == Value{}
}
// Equal returns true if the passed in Value is equivalent.
func (v Value) Equal(v2 Value) bool {
switch {
case v == Value{} && v2 == Value{}:
case v.IsZero() && v2.IsZero():
return true
case v.Name != nil && v2.Name != nil && *v.Name == *v2.Name:
@ -42,8 +57,8 @@ func (v Value) String() string {
switch {
case v == Value{}:
return "<noval>"
case v.IsZero():
return "<zero>"
case v.Name != nil:
return *v.Name
@ -105,24 +120,24 @@ func ValueOut(val, edgeVal Value) OpenEdge {
// represents an edge (with edgeVal attached to it) coming from the
// TupleVertex comprised of the given ordered-set of input edges.
//
// If len(ins) == 1 and edgeVal == Value{}, then that single OpenEdge is
// If len(ins) == 1 && edgeVal.IsZero(), then that single OpenEdge is
// returned as-is.
func TupleOut(ins []OpenEdge, edgeVal Value) OpenEdge {
if len(ins) == 1 {
if edgeVal == (Value{}) {
if edgeVal.IsZero() {
return ins[0]
}
if ins[0].val == (Value{}) {
if ins[0].val.IsZero() {
return ins[0].WithEdgeVal(edgeVal)
}
}
return OpenEdge{
fromV: mkVertex(TupleVertex, Value{}, ins...),
fromV: mkVertex(TupleVertex, ZeroValue, ins...),
val: edgeVal,
}
}

View File

@ -61,7 +61,7 @@ func TestEqual(t *testing.T) {
// equivalent to just that edge.
a: ZeroGraph.AddValueIn(TupleOut([]OpenEdge{
ValueOut(i(1), n("ident")),
}, Value{}), n("out")),
}, ZeroValue), n("out")),
b: ZeroGraph.AddValueIn(ValueOut(i(1), n("ident")), n("out")),
exp: true,
},
@ -70,7 +70,7 @@ func TestEqual(t *testing.T) {
// edgeVal should be equivalent to just that edge with the tuple's
// edge value.
a: ZeroGraph.AddValueIn(TupleOut([]OpenEdge{
ValueOut(i(1), Value{}),
ValueOut(i(1), ZeroValue),
}, n("ident")), n("out")),
b: ZeroGraph.AddValueIn(ValueOut(i(1), n("ident")), n("out")),
exp: true,

View File

@ -8,15 +8,26 @@ import (
"unicode"
)
// LexerLocation describes the location in a file where a particular token was
// parsed from.
type LexerLocation struct {
Row, Col int
}
func (l LexerLocation) String() string {
return fmt.Sprintf("%d:%d", l.Row, l.Col)
}
// LexerError is returned by Lexer when an unexpected error occurs parsing a
// stream of LexerTokens.
type LexerError struct {
Err error
Row, Col int
Location LexerLocation
}
func (e *LexerError) Error() string {
return fmt.Sprintf("%d:%d: %s", e.Row, e.Col, e.Err.Error())
return fmt.Sprintf("%s: %s", e.Location.String(), e.Err.Error())
}
func (e *LexerError) Unwrap() error {
@ -39,7 +50,11 @@ type LexerToken struct {
Kind LexerTokenKind
Value string // never empty string
Row, Col int
Location LexerLocation
}
func (t LexerToken) errPrefix() string {
return fmt.Sprintf("%s: at %q", t.Location.String(), t.Value)
}
// Lexer is used to parse a string stream into a sequence of tokens which can
@ -90,8 +105,10 @@ func (l *lexer) fmtErr(err error) *LexerError {
return &LexerError{
Err: err,
Location: LexerLocation{
Row: row,
Col: col,
},
}
}
@ -167,7 +184,9 @@ func (l *lexer) readWhile(
return LexerToken{
Kind: kind,
Value: l.stringBuilder.String(),
Location: LexerLocation{
Row: row, Col: col,
},
}, lexErr
}
@ -236,8 +255,10 @@ func (l *lexer) next() (LexerToken, *LexerError) {
return LexerToken{
Kind: LexerTokenKindPunctuation,
Value: string(r),
Location: LexerLocation{
Row: l.lastRow,
Col: l.lastCol,
},
}, nil
case unicode.IsSpace(r):

View File

@ -25,7 +25,7 @@ func TestLexer(t *testing.T) {
{
Kind: LexerTokenKindName,
Value: "foo",
Row: 0, Col: 0,
Location: LexerLocation{Row: 0, Col: 0},
},
},
},
@ -35,27 +35,27 @@ func TestLexer(t *testing.T) {
{
Kind: LexerTokenKindName,
Value: "foo",
Row: 0, Col: 0,
Location: LexerLocation{Row: 0, Col: 0},
},
{
Kind: LexerTokenKindName,
Value: "bar",
Row: 0, Col: 4,
Location: LexerLocation{Row: 0, Col: 4},
},
{
Kind: LexerTokenKindName,
Value: "f-o",
Row: 1, Col: 0,
Location: LexerLocation{Row: 1, Col: 0},
},
{
Kind: LexerTokenKindName,
Value: "f0O",
Row: 1, Col: 4,
Location: LexerLocation{Row: 1, Col: 4},
},
{
Kind: LexerTokenKindName,
Value: "Foo",
Row: 1, Col: 8,
Location: LexerLocation{Row: 1, Col: 8},
},
},
},
@ -65,17 +65,17 @@ func TestLexer(t *testing.T) {
{
Kind: LexerTokenKindNumber,
Value: "1",
Row: 0, Col: 0,
Location: LexerLocation{Row: 0, Col: 0},
},
{
Kind: LexerTokenKindNumber,
Value: "100",
Row: 0, Col: 2,
Location: LexerLocation{Row: 0, Col: 2},
},
{
Kind: LexerTokenKindNumber,
Value: "-100",
Row: 0, Col: 6,
Location: LexerLocation{Row: 0, Col: 6},
},
},
},
@ -85,37 +85,37 @@ func TestLexer(t *testing.T) {
{
Kind: LexerTokenKindNumber,
Value: "1",
Row: 0, Col: 0,
Location: LexerLocation{Row: 0, Col: 0},
},
{
Kind: LexerTokenKindPunctuation,
Value: "<",
Row: 0, Col: 1,
Location: LexerLocation{Row: 0, Col: 1},
},
{
Kind: LexerTokenKindNumber,
Value: "2",
Row: 0, Col: 2,
Location: LexerLocation{Row: 0, Col: 2},
},
{
Kind: LexerTokenKindPunctuation,
Value: "!",
Row: 0, Col: 3,
Location: LexerLocation{Row: 0, Col: 3},
},
{
Kind: LexerTokenKindNumber,
Value: "-3",
Row: 0, Col: 4,
Location: LexerLocation{Row: 0, Col: 4},
},
{
Kind: LexerTokenKindPunctuation,
Value: "(",
Row: 0, Col: 7,
Location: LexerLocation{Row: 0, Col: 7},
},
{
Kind: LexerTokenKindPunctuation,
Value: ")",
Row: 0, Col: 8,
Location: LexerLocation{Row: 0, Col: 8},
},
},
},
@ -142,8 +142,8 @@ func TestLexer(t *testing.T) {
inParts := strings.Split(test.in, "\n")
assert.ErrorIs(t, lexErr, expErr)
assert.Equal(t, lexErr.Row, len(inParts)-1)
assert.Equal(t, lexErr.Col, len(inParts[len(inParts)-1]))
assert.Equal(t, lexErr.Location.Row, len(inParts)-1)
assert.Equal(t, lexErr.Location.Col, len(inParts[len(inParts)-1]))
})
}