diff --git a/gg/decoder.go b/gg/decoder.go index 4e21ceb..46a6189 100644 --- a/gg/decoder.go +++ b/gg/decoder.go @@ -19,7 +19,7 @@ const ( ) func decoderErr(tok LexerToken, err error) error { - return fmt.Errorf("%d:%d: %w", tok.Row, tok.Col, err) + return fmt.Errorf("%s: %w", tok.errPrefix(), err) } func decoderErrf(tok LexerToken, str string, args ...interface{}) error { @@ -53,7 +53,7 @@ func (d *decoder) parseSingleValue( tok, rest := toks[0], toks[1:] if len(rest) == 0 { - return Value{}, nil, false, decoderErrf(tok, "cannot be final token, possibly missing %q", punctTerm) + return ZeroValue, nil, false, decoderErrf(tok, "cannot be final token, possibly missing %q", punctTerm) } termed := isTerm(rest[0]) @@ -65,20 +65,20 @@ func (d *decoder) parseSingleValue( switch tok.Kind { case LexerTokenKindName: - return Value{Name: &tok.Value}, rest, termed, nil + return Value{Name: &tok.Value, LexerToken: &tok}, rest, termed, nil case LexerTokenKindNumber: i, err := strconv.ParseInt(tok.Value, 10, 64) if err != nil { - return Value{}, nil, false, decoderErrf(tok, "parsing %q as integer: %w", tok.Value, err) + return ZeroValue, nil, false, decoderErrf(tok, "parsing %q as integer: %w", tok.Value, err) } - return Value{Number: &i}, rest, termed, nil + return Value{Number: &i, LexerToken: &tok}, rest, termed, nil case LexerTokenKindPunctuation: - return Value{}, nil, false, decoderErrf(tok, "expected value, found punctuation %q", tok.Value) + return ZeroValue, nil, false, decoderErrf(tok, "expected value, found punctuation %q", tok.Value) default: panic(fmt.Sprintf("unexpected token kind %q", tok.Kind)) @@ -116,7 +116,7 @@ func (d *decoder) parseOpenEdge( } if termed { - return ValueOut(val, Value{}), toks, nil + return ValueOut(val, ZeroValue), toks, nil } opTok, toks := toks[0], toks[1:] @@ -181,7 +181,7 @@ func (d *decoder) parseTuple( toks = toks[1:] } - return TupleOut(edges, Value{}), toks, nil + return TupleOut(edges, ZeroValue), toks, nil } // returned boolean value indicates if the token following the graph is a term. @@ -211,18 +211,18 @@ func (d *decoder) parseGraphValue( break } - return Value{}, nil, false, decoderErrf(openTok, "no matching %q", punctCloseGraph) + return ZeroValue, nil, false, decoderErrf(openTok, "no matching %q", punctCloseGraph) } else if closingTok := toks[0]; isPunct(closingTok, punctCloseGraph) { if !expectWrappers { - return Value{}, nil, false, decoderErrf(closingTok, "unexpected %q", punctCloseGraph) + return ZeroValue, nil, false, decoderErrf(closingTok, "unexpected %q", punctCloseGraph) } toks = toks[1:] if len(toks) == 0 { - return Value{}, nil, false, decoderErrf(closingTok, "cannot be final token, possibly missing %q", punctTerm) + return ZeroValue, nil, false, decoderErrf(closingTok, "cannot be final token, possibly missing %q", punctTerm) } break @@ -231,7 +231,7 @@ func (d *decoder) parseGraphValue( var err error if g, toks, err = d.parseValIn(g, toks); err != nil { - return Value{}, nil, false, err + return ZeroValue, nil, false, err } } @@ -241,6 +241,8 @@ func (d *decoder) parseGraphValue( return val, toks, true, nil } + val.LexerToken = &openTok + termed := isTerm(toks[0]) if termed { @@ -276,7 +278,7 @@ func (d *decoder) parseValIn(into *Graph, toks []LexerToken) (*Graph, []LexerTok return nil, nil, err } - dstVal := Value{Name: &dst.Value} + dstVal := Value{Name: &dst.Value, LexerToken: &dst} return into.AddValueIn(oe, dstVal), toks, nil } diff --git a/gg/decoder_test.go b/gg/decoder_test.go index 9876436..e3212cb 100644 --- a/gg/decoder_test.go +++ b/gg/decoder_test.go @@ -27,7 +27,7 @@ func TestDecoder(t *testing.T) { }, { in: "out = 1;", - exp: ZeroGraph.AddValueIn(ValueOut(i(1), Value{}), n("out")), + exp: ZeroGraph.AddValueIn(ValueOut(i(1), ZeroValue), n("out")), }, { in: "out = incr < 1;", @@ -49,7 +49,7 @@ func TestDecoder(t *testing.T) { TupleOut( []OpenEdge{TupleOut( []OpenEdge{ - ValueOut(i(1), Value{}), + ValueOut(i(1), ZeroValue), ValueOut(i(2), n("c")), TupleOut( []OpenEdge{ValueOut(i(3), n("e"))}, @@ -69,11 +69,11 @@ func TestDecoder(t *testing.T) { TupleOut( []OpenEdge{TupleOut( []OpenEdge{ - ValueOut(i(1), Value{}), + ValueOut(i(1), ZeroValue), TupleOut( []OpenEdge{ ValueOut(i(2), n("d")), - ValueOut(i(3), Value{}), + ValueOut(i(3), ZeroValue), }, n("c"), ), @@ -90,7 +90,7 @@ func TestDecoder(t *testing.T) { exp: ZeroGraph.AddValueIn( ValueOut( Value{Graph: ZeroGraph. - AddValueIn(ValueOut(i(1), Value{}), n("a")). + AddValueIn(ValueOut(i(1), ZeroValue), n("a")). AddValueIn( TupleOut( []OpenEdge{ @@ -101,7 +101,7 @@ func TestDecoder(t *testing.T) { n("b"), ), }, - Value{}, + ZeroValue, ), n("out"), ), @@ -114,7 +114,7 @@ func TestDecoder(t *testing.T) { ValueOut( i(2), Value{Graph: ZeroGraph. - AddValueIn(ValueOut(i(1), Value{}), n("b")), + AddValueIn(ValueOut(i(1), ZeroValue), n("b")), }, ), }, @@ -126,8 +126,8 @@ func TestDecoder(t *testing.T) { { in: "a = 1; b = 2;", exp: ZeroGraph. - AddValueIn(ValueOut(i(1), Value{}), n("a")). - AddValueIn(ValueOut(i(2), Value{}), n("b")), + AddValueIn(ValueOut(i(1), ZeroValue), n("a")). + AddValueIn(ValueOut(i(2), ZeroValue), n("b")), }, } diff --git a/gg/gg.go b/gg/gg.go index 78620e7..3e55399 100644 --- a/gg/gg.go +++ b/gg/gg.go @@ -6,22 +6,37 @@ import ( "strings" ) -// Value represents a value being stored in a Graph. No more than one field may -// be non-nil. No fields being set indicates lack of value. +// ZeroValue is a Value with no fields set. +var ZeroValue Value + +// Value represents a value being stored in a Graph. type Value struct { + + // Only one of these fields may be set Name *string Number *int64 Graph *Graph // TODO coming soon! // String *string + + // Optional fields indicating the token which was used to construct this + // Value, if any. + LexerToken *LexerToken +} + +// IsZero returns true if the Value is the zero value (none of the sub-value +// fields are set). LexerToken is ignored for this check. +func (v Value) IsZero() bool { + v.LexerToken = nil + return v == Value{} } // Equal returns true if the passed in Value is equivalent. func (v Value) Equal(v2 Value) bool { switch { - case v == Value{} && v2 == Value{}: + case v.IsZero() && v2.IsZero(): return true case v.Name != nil && v2.Name != nil && *v.Name == *v2.Name: @@ -42,8 +57,8 @@ func (v Value) String() string { switch { - case v == Value{}: - return "" + case v.IsZero(): + return "" case v.Name != nil: return *v.Name @@ -105,24 +120,24 @@ func ValueOut(val, edgeVal Value) OpenEdge { // represents an edge (with edgeVal attached to it) coming from the // TupleVertex comprised of the given ordered-set of input edges. // -// If len(ins) == 1 and edgeVal == Value{}, then that single OpenEdge is +// If len(ins) == 1 && edgeVal.IsZero(), then that single OpenEdge is // returned as-is. func TupleOut(ins []OpenEdge, edgeVal Value) OpenEdge { if len(ins) == 1 { - if edgeVal == (Value{}) { + if edgeVal.IsZero() { return ins[0] } - if ins[0].val == (Value{}) { + if ins[0].val.IsZero() { return ins[0].WithEdgeVal(edgeVal) } } return OpenEdge{ - fromV: mkVertex(TupleVertex, Value{}, ins...), + fromV: mkVertex(TupleVertex, ZeroValue, ins...), val: edgeVal, } } diff --git a/gg/gg_test.go b/gg/gg_test.go index 32d2d67..069c0e4 100644 --- a/gg/gg_test.go +++ b/gg/gg_test.go @@ -61,7 +61,7 @@ func TestEqual(t *testing.T) { // equivalent to just that edge. a: ZeroGraph.AddValueIn(TupleOut([]OpenEdge{ ValueOut(i(1), n("ident")), - }, Value{}), n("out")), + }, ZeroValue), n("out")), b: ZeroGraph.AddValueIn(ValueOut(i(1), n("ident")), n("out")), exp: true, }, @@ -70,7 +70,7 @@ func TestEqual(t *testing.T) { // edgeVal should be equivalent to just that edge with the tuple's // edge value. a: ZeroGraph.AddValueIn(TupleOut([]OpenEdge{ - ValueOut(i(1), Value{}), + ValueOut(i(1), ZeroValue), }, n("ident")), n("out")), b: ZeroGraph.AddValueIn(ValueOut(i(1), n("ident")), n("out")), exp: true, diff --git a/gg/lexer.go b/gg/lexer.go index 12e10ed..8acbdbc 100644 --- a/gg/lexer.go +++ b/gg/lexer.go @@ -8,15 +8,26 @@ import ( "unicode" ) -// LexerError is returned by Lexer when an unexpected error occurs parsing a -// stream of LexerTokens. -type LexerError struct { - Err error +// LexerLocation describes the location in a file where a particular token was +// parsed from. +type LexerLocation struct { Row, Col int } +func (l LexerLocation) String() string { + return fmt.Sprintf("%d:%d", l.Row, l.Col) +} + +// LexerError is returned by Lexer when an unexpected error occurs parsing a +// stream of LexerTokens. +type LexerError struct { + Err error + + Location LexerLocation +} + func (e *LexerError) Error() string { - return fmt.Sprintf("%d:%d: %s", e.Row, e.Col, e.Err.Error()) + return fmt.Sprintf("%s: %s", e.Location.String(), e.Err.Error()) } func (e *LexerError) Unwrap() error { @@ -39,7 +50,11 @@ type LexerToken struct { Kind LexerTokenKind Value string // never empty string - Row, Col int + Location LexerLocation +} + +func (t LexerToken) errPrefix() string { + return fmt.Sprintf("%s: at %q", t.Location.String(), t.Value) } // Lexer is used to parse a string stream into a sequence of tokens which can @@ -90,8 +105,10 @@ func (l *lexer) fmtErr(err error) *LexerError { return &LexerError{ Err: err, - Row: row, - Col: col, + Location: LexerLocation{ + Row: row, + Col: col, + }, } } @@ -167,7 +184,9 @@ func (l *lexer) readWhile( return LexerToken{ Kind: kind, Value: l.stringBuilder.String(), - Row: row, Col: col, + Location: LexerLocation{ + Row: row, Col: col, + }, }, lexErr } @@ -236,8 +255,10 @@ func (l *lexer) next() (LexerToken, *LexerError) { return LexerToken{ Kind: LexerTokenKindPunctuation, Value: string(r), - Row: l.lastRow, - Col: l.lastCol, + Location: LexerLocation{ + Row: l.lastRow, + Col: l.lastCol, + }, }, nil case unicode.IsSpace(r): diff --git a/gg/lexer_test.go b/gg/lexer_test.go index 91e743e..038dba0 100644 --- a/gg/lexer_test.go +++ b/gg/lexer_test.go @@ -23,9 +23,9 @@ func TestLexer(t *testing.T) { in: "foo", exp: []LexerToken{ { - Kind: LexerTokenKindName, - Value: "foo", - Row: 0, Col: 0, + Kind: LexerTokenKindName, + Value: "foo", + Location: LexerLocation{Row: 0, Col: 0}, }, }, }, @@ -33,29 +33,29 @@ func TestLexer(t *testing.T) { in: "foo bar\nf-o f0O Foo", exp: []LexerToken{ { - Kind: LexerTokenKindName, - Value: "foo", - Row: 0, Col: 0, + Kind: LexerTokenKindName, + Value: "foo", + Location: LexerLocation{Row: 0, Col: 0}, }, { - Kind: LexerTokenKindName, - Value: "bar", - Row: 0, Col: 4, + Kind: LexerTokenKindName, + Value: "bar", + Location: LexerLocation{Row: 0, Col: 4}, }, { - Kind: LexerTokenKindName, - Value: "f-o", - Row: 1, Col: 0, + Kind: LexerTokenKindName, + Value: "f-o", + Location: LexerLocation{Row: 1, Col: 0}, }, { - Kind: LexerTokenKindName, - Value: "f0O", - Row: 1, Col: 4, + Kind: LexerTokenKindName, + Value: "f0O", + Location: LexerLocation{Row: 1, Col: 4}, }, { - Kind: LexerTokenKindName, - Value: "Foo", - Row: 1, Col: 8, + Kind: LexerTokenKindName, + Value: "Foo", + Location: LexerLocation{Row: 1, Col: 8}, }, }, }, @@ -63,19 +63,19 @@ func TestLexer(t *testing.T) { in: "1 100 -100", exp: []LexerToken{ { - Kind: LexerTokenKindNumber, - Value: "1", - Row: 0, Col: 0, + Kind: LexerTokenKindNumber, + Value: "1", + Location: LexerLocation{Row: 0, Col: 0}, }, { - Kind: LexerTokenKindNumber, - Value: "100", - Row: 0, Col: 2, + Kind: LexerTokenKindNumber, + Value: "100", + Location: LexerLocation{Row: 0, Col: 2}, }, { - Kind: LexerTokenKindNumber, - Value: "-100", - Row: 0, Col: 6, + Kind: LexerTokenKindNumber, + Value: "-100", + Location: LexerLocation{Row: 0, Col: 6}, }, }, }, @@ -83,39 +83,39 @@ func TestLexer(t *testing.T) { in: "1<2!-3 ()", exp: []LexerToken{ { - Kind: LexerTokenKindNumber, - Value: "1", - Row: 0, Col: 0, + Kind: LexerTokenKindNumber, + Value: "1", + Location: LexerLocation{Row: 0, Col: 0}, }, { - Kind: LexerTokenKindPunctuation, - Value: "<", - Row: 0, Col: 1, + Kind: LexerTokenKindPunctuation, + Value: "<", + Location: LexerLocation{Row: 0, Col: 1}, }, { - Kind: LexerTokenKindNumber, - Value: "2", - Row: 0, Col: 2, + Kind: LexerTokenKindNumber, + Value: "2", + Location: LexerLocation{Row: 0, Col: 2}, }, { - Kind: LexerTokenKindPunctuation, - Value: "!", - Row: 0, Col: 3, + Kind: LexerTokenKindPunctuation, + Value: "!", + Location: LexerLocation{Row: 0, Col: 3}, }, { - Kind: LexerTokenKindNumber, - Value: "-3", - Row: 0, Col: 4, + Kind: LexerTokenKindNumber, + Value: "-3", + Location: LexerLocation{Row: 0, Col: 4}, }, { - Kind: LexerTokenKindPunctuation, - Value: "(", - Row: 0, Col: 7, + Kind: LexerTokenKindPunctuation, + Value: "(", + Location: LexerLocation{Row: 0, Col: 7}, }, { - Kind: LexerTokenKindPunctuation, - Value: ")", - Row: 0, Col: 8, + Kind: LexerTokenKindPunctuation, + Value: ")", + Location: LexerLocation{Row: 0, Col: 8}, }, }, }, @@ -142,8 +142,8 @@ func TestLexer(t *testing.T) { inParts := strings.Split(test.in, "\n") assert.ErrorIs(t, lexErr, expErr) - assert.Equal(t, lexErr.Row, len(inParts)-1) - assert.Equal(t, lexErr.Col, len(inParts[len(inParts)-1])) + assert.Equal(t, lexErr.Location.Row, len(inParts)-1) + assert.Equal(t, lexErr.Location.Col, len(inParts[len(inParts)-1])) }) }