jstream: normalize type names to make a bit more sense everywhere

This commit is contained in:
Brian Picciano 2018-05-27 03:38:03 +00:00
parent dbc9d32193
commit d20ad7830c
5 changed files with 111 additions and 109 deletions

View File

@ -25,24 +25,24 @@ func (dr *delimReader) Read(b []byte) (int, error) {
return n, err return n, err
} }
type byteBlobReader struct { type bytesReader struct {
dr *delimReader dr *delimReader
dec io.Reader dec io.Reader
} }
func newByteBlobReader(r io.Reader) *byteBlobReader { func newBytesReader(r io.Reader) *bytesReader {
dr := &delimReader{r: r} dr := &delimReader{r: r}
return &byteBlobReader{ return &bytesReader{
dr: dr, dr: dr,
dec: base64.NewDecoder(base64.StdEncoding, dr), dec: base64.NewDecoder(base64.StdEncoding, dr),
} }
} }
func (bbr *byteBlobReader) Read(into []byte) (int, error) { func (br *bytesReader) Read(into []byte) (int, error) {
n, err := bbr.dec.Read(into) n, err := br.dec.Read(into)
if bbr.dr.delim == bbEnd { if br.dr.delim == bbEnd {
return n, io.EOF return n, io.EOF
} else if bbr.dr.delim == bbCancel { } else if br.dr.delim == bbCancel {
return n, ErrCanceled return n, ErrCanceled
} }
return n, err return n, err
@ -50,6 +50,6 @@ func (bbr *byteBlobReader) Read(into []byte) (int, error) {
// returns the bytes which were read off the underlying io.Reader but which // returns the bytes which were read off the underlying io.Reader but which
// haven't been consumed yet. // haven't been consumed yet.
func (bbr *byteBlobReader) buffered() io.Reader { func (br *bytesReader) buffered() io.Reader {
return bytes.NewBuffer(bbr.dr.rest) return bytes.NewBuffer(br.dr.rest)
} }

View File

@ -11,14 +11,14 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
type bbrTest struct { type brTest struct {
wsSuffix []byte // whitespace wsSuffix []byte // whitespace
body []byte body []byte
shouldCancel bool shouldCancel bool
intoSize int intoSize int
} }
func randBBRTest(minBodySize, maxBodySize int) bbrTest { func randBRTest(minBodySize, maxBodySize int) brTest {
var whitespace = []byte{' ', '\n', '\t', '\r'} var whitespace = []byte{' ', '\n', '\t', '\r'}
genWhitespace := func(n int) []byte { genWhitespace := func(n int) []byte {
ws := make([]byte, n) ws := make([]byte, n)
@ -29,18 +29,18 @@ func randBBRTest(minBodySize, maxBodySize int) bbrTest {
} }
body := mtest.RandBytes(minBodySize + mtest.Rand.Intn(maxBodySize-minBodySize)) body := mtest.RandBytes(minBodySize + mtest.Rand.Intn(maxBodySize-minBodySize))
return bbrTest{ return brTest{
wsSuffix: genWhitespace(mtest.Rand.Intn(10)), wsSuffix: genWhitespace(mtest.Rand.Intn(10)),
body: body, body: body,
intoSize: 1 + mtest.Rand.Intn(len(body)+1), intoSize: 1 + mtest.Rand.Intn(len(body)+1),
} }
} }
func (bt bbrTest) msgAndArgs() []interface{} { func (bt brTest) msgAndArgs() []interface{} {
return []interface{}{"bt:%#v len(body):%d", bt, len(bt.body)} return []interface{}{"bt:%#v len(body):%d", bt, len(bt.body)}
} }
func (bt bbrTest) mkBytes() []byte { func (bt brTest) mkBytes() []byte {
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
enc := base64.NewEncoder(base64.StdEncoding, buf) enc := base64.NewEncoder(base64.StdEncoding, buf)
@ -58,13 +58,13 @@ func (bt bbrTest) mkBytes() []byte {
return buf.Bytes() return buf.Bytes()
} }
func (bt bbrTest) do(t *T) bool { func (bt brTest) do(t *T) bool {
buf := bytes.NewBuffer(bt.mkBytes()) buf := bytes.NewBuffer(bt.mkBytes())
bbr := newByteBlobReader(buf) br := newBytesReader(buf)
into := make([]byte, bt.intoSize) into := make([]byte, bt.intoSize)
outBuf := new(bytes.Buffer) outBuf := new(bytes.Buffer)
_, err := io.CopyBuffer(outBuf, bbr, into) _, err := io.CopyBuffer(outBuf, br, into)
if bt.shouldCancel { if bt.shouldCancel {
return assert.Equal(t, ErrCanceled, err, bt.msgAndArgs()...) return assert.Equal(t, ErrCanceled, err, bt.msgAndArgs()...)
} }
@ -74,7 +74,7 @@ func (bt bbrTest) do(t *T) bool {
if !assert.Equal(t, bt.body, outBuf.Bytes(), bt.msgAndArgs()...) { if !assert.Equal(t, bt.body, outBuf.Bytes(), bt.msgAndArgs()...) {
return false return false
} }
fullRest := append(bbr.dr.rest, buf.Bytes()...) fullRest := append(br.dr.rest, buf.Bytes()...)
if len(bt.wsSuffix) == 0 { if len(bt.wsSuffix) == 0 {
return assert.Empty(t, fullRest, bt.msgAndArgs()...) return assert.Empty(t, fullRest, bt.msgAndArgs()...)
} }
@ -83,15 +83,15 @@ func (bt bbrTest) do(t *T) bool {
func TestByteBlobReader(t *T) { func TestByteBlobReader(t *T) {
// some sanity tests // some sanity tests
bbrTest{ brTest{
body: []byte{2, 3, 4, 5}, body: []byte{2, 3, 4, 5},
intoSize: 4, intoSize: 4,
}.do(t) }.do(t)
bbrTest{ brTest{
body: []byte{2, 3, 4, 5}, body: []byte{2, 3, 4, 5},
intoSize: 3, intoSize: 3,
}.do(t) }.do(t)
bbrTest{ brTest{
body: []byte{2, 3, 4, 5}, body: []byte{2, 3, 4, 5},
shouldCancel: true, shouldCancel: true,
intoSize: 3, intoSize: 3,
@ -99,7 +99,7 @@ func TestByteBlobReader(t *T) {
// fuzz this bitch // fuzz this bitch
for i := 0; i < 50000; i++ { for i := 0; i < 50000; i++ {
bt := randBBRTest(0, 1000) bt := randBRTest(0, 1000)
if !bt.do(t) { if !bt.do(t) {
return return
} }
@ -112,7 +112,7 @@ func TestByteBlobReader(t *T) {
func BenchmarkByteBlobReader(b *B) { func BenchmarkByteBlobReader(b *B) {
type bench struct { type bench struct {
bt bbrTest bt brTest
body []byte body []byte
buf *bytes.Reader buf *bytes.Reader
cpBuf []byte cpBuf []byte
@ -122,7 +122,7 @@ func BenchmarkByteBlobReader(b *B) {
n := 100 n := 100
benches := make([]bench, n) benches := make([]bench, n)
for i := range benches { for i := range benches {
bt := randBBRTest(minBodySize, maxBodySize) bt := randBRTest(minBodySize, maxBodySize)
body := bt.mkBytes() body := bt.mkBytes()
benches[i] = bench{ benches[i] = bench{
bt: bt, bt: bt,
@ -146,15 +146,15 @@ func BenchmarkByteBlobReader(b *B) {
} }
} }
testBBR := func(b *B, benches []bench) { testBR := func(b *B, benches []bench) {
j := 0 j := 0
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
if j >= len(benches) { if j >= len(benches) {
j = 0 j = 0
} }
benches[j].buf.Reset(benches[j].body) benches[j].buf.Reset(benches[j].body)
bbr := newByteBlobReader(benches[j].buf) br := newBytesReader(benches[j].buf)
io.CopyBuffer(ioutil.Discard, bbr, benches[j].cpBuf) io.CopyBuffer(ioutil.Discard, br, benches[j].cpBuf)
j++ j++
} }
} }
@ -177,8 +177,8 @@ func BenchmarkByteBlobReader(b *B) {
b.Run("raw", func(b *B) { b.Run("raw", func(b *B) {
testRaw(b, set) testRaw(b, set)
}) })
b.Run("bbr", func(b *B) { b.Run("br", func(b *B) {
testBBR(b, set) testBR(b, set)
}) })
b.StopTimer() b.StopTimer()
}) })

View File

@ -34,32 +34,30 @@
// //
// There are three jstream element types: // There are three jstream element types:
// //
// * JSON Value: Any JSON value // * Value: Any JSON value
// * Byte Blob: A stream of bytes of unknown, and possibly infinite, size // * Bytes: A stream of bytes of unknown, and possibly infinite, size
// * Stream: A heterogenous sequence of jstream elements of unknown, and // * Stream: A heterogenous sequence of jstream elements of unknown, and
// possibly infinite, size // possibly infinite, size
// //
// JSON Value elements are defined as being JSON objects with a `val` field. The // Value elements are defined as being JSON objects with a `val` field. The
// value of that field is the JSON Value. // value of that field is the JSON Value.
// //
// { "val":{"foo":"bar"} } // { "val":{"foo":"bar"} }
// //
// Byte Blob elements are defined as being a JSON object with a `bytesStart` // Bytes elements are defined as being a JSON object with a `bytesStart` field
// field with a value of `true`. Immediately following the JSON object are the // with a value of `true`. Immediately following the JSON object are the bytes
// bytes which are the Byte Blob, encoded using standard base64. Immediately // encoded using standard base64. Immediately following the encoded bytes is the
// following the encoded bytes is the character `$`, to indicate the bytes have // character `$`, to indicate the bytes have been completely written.
// been completely written. Alternatively the character `!` may be written // Alternatively the character `!` may be written immediately after the bytes to
// immediately after the bytes to indicate writing was canceled prematurely by // indicate writing was canceled prematurely by the writer.
// the writer.
// //
// { "bytesStart":true }wXnxQHgUO8g=$ // { "bytesStart":true }wXnxQHgUO8g=$
// { "bytesStart":true }WGYcTI8=! // { "bytesStart":true }WGYcTI8=!
// //
// The JSON object may also contain a `sizeHint` field, which gives the // The JSON object may also contain a `sizeHint` field, which gives the
// estimated number of bytes in the Byte Blob (excluding the trailing // estimated number of bytes (excluding the trailing delimiter). The hint is
// delimiter). The hint is neither required to exist or be accurate if it does. // neither required to exist or be accurate if it does. The trailing delimeter
// The trailing delimeter (`$` or `!`) is required to be sent even if the hint // (`$` or `!`) is required to be sent even if the hint is sent.
// is sent.
// //
// Stream elements are defined as being a JSON object with a `streamStart` field // Stream elements are defined as being a JSON object with a `streamStart` field
// with a value of `true`. Immediately following the JSON object will be zero // with a value of `true`. Immediately following the JSON object will be zero
@ -110,6 +108,10 @@ package jstream
// TODO figure out how to expose the json.Encoder/Decoders so that users can set // TODO figure out how to expose the json.Encoder/Decoders so that users can set
// custom options on them (like UseNumber and whatnot) // custom options on them (like UseNumber and whatnot)
// TODO attempt to refactor this into using channels? or write a layer on top
// which does so?
// TODO TypeNil?
import ( import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
@ -119,7 +121,7 @@ import (
"io/ioutil" "io/ioutil"
) )
// byte blob constants // bytes constants
const ( const (
bbEnd = '$' bbEnd = '$'
bbCancel = '!' bbCancel = '!'
@ -131,9 +133,9 @@ type Type string
// The jstream element types // The jstream element types
const ( const (
TypeJSONValue Type = "jsonValue" TypeValue Type = "value"
TypeByteBlob Type = "byteBlob" TypeBytes Type = "bytes"
TypeStream Type = "stream" TypeStream Type = "stream"
) )
// ErrWrongType is an error returned by the Decode* methods on Decoder when the // ErrWrongType is an error returned by the Decode* methods on Decoder when the
@ -148,7 +150,7 @@ func (err ErrWrongType) Error() string {
} }
var ( var (
// ErrCanceled is returned when reading either a Byte Blob or a Stream, // ErrCanceled is returned when reading either a Bytes or a Stream element,
// indicating that the writer has prematurely canceled the element. // indicating that the writer has prematurely canceled the element.
ErrCanceled = errors.New("canceled by writer") ErrCanceled = errors.New("canceled by writer")
@ -172,8 +174,8 @@ type element struct {
// Element is a single jstream element which is read off a StreamReader. // Element is a single jstream element which is read off a StreamReader.
// //
// If a method is called which expects a particular Element type (e.g. // If a method is called which expects a particular Element type (e.g.
// DecodeValue, which expects a JSONValue Element) but the Element is not of // DecodeValue, which expects a Value Element) but the Element is not of that
// that type then an ErrWrongType will be returned. // type then an ErrWrongType will be returned.
// //
// If there was an error reading the Element off the StreamReader that error is // If there was an error reading the Element off the StreamReader that error is
// kept in the Element and returned from any method call. // kept in the Element and returned from any method call.
@ -207,20 +209,20 @@ func (el Element) assertType(is Type) error {
return nil return nil
} }
// DecodeValue attempts to unmarshal a JSON Value Element's value into the given // DecodeValue attempts to unmarshal a Value Element's value into the given
// receiver. // receiver.
func (el Element) DecodeValue(i interface{}) error { func (el Element) DecodeValue(i interface{}) error {
if err := el.assertType(TypeJSONValue); err != nil { if err := el.assertType(TypeValue); err != nil {
return err return err
} }
return json.Unmarshal(el.value, i) return json.Unmarshal(el.value, i)
} }
// DecodeBytes returns an io.Reader which will contain the contents of a // DecodeBytes returns an io.Reader which will contain the contents of a
// ByteBlob element. The io.Reader _must_ be read till io.EOF or ErrCanceled // Bytes element. The io.Reader _must_ be read till io.EOF or ErrCanceled is
// before the StreamReader may be used again. // returned before the StreamReader may be used again.
func (el Element) DecodeBytes() (io.Reader, error) { func (el Element) DecodeBytes() (io.Reader, error) {
if err := el.assertType(TypeByteBlob); err != nil { if err := el.assertType(TypeBytes); err != nil {
return nil, err return nil, err
} }
return el.br, nil return el.br, nil
@ -241,12 +243,12 @@ func (el Element) DecodeStream() (*StreamReader, error) {
// it came from and discards it, making the StreamReader ready to have Next // it came from and discards it, making the StreamReader ready to have Next
// called on it again. // called on it again.
// //
// If the Element is a Byte Blob and is ended with io.EOF, or if the Element is // If the Element is a Bytes and is ended with io.EOF, or if the Element is a
// a Stream and is ended with ErrStreamEnded then this returns nil. If either is // Stream and is ended with ErrStreamEnded then this returns nil. If either is
// canceled this also returns nil. All other errors are returned. // canceled this also returns nil. All other errors are returned.
func (el Element) Discard() error { func (el Element) Discard() error {
switch el.Type { switch el.Type {
case TypeByteBlob: case TypeBytes:
r, _ := el.DecodeBytes() r, _ := el.DecodeBytes()
_, err := io.Copy(ioutil.Discard, r) _, err := io.Copy(ioutil.Discard, r)
if err == ErrCanceled { if err == ErrCanceled {
@ -265,7 +267,7 @@ func (el Element) Discard() error {
return err return err
} }
} }
default: // TypeJSONValue default: // TypeValue
return nil return nil
} }
} }
@ -277,7 +279,7 @@ type StreamReader struct {
// only one of these can be set at a time // only one of these can be set at a time
dec *json.Decoder dec *json.Decoder
bbr *byteBlobReader br *bytesReader
// once set this StreamReader will always return this error on Next // once set this StreamReader will always return this error on Next
err error err error
@ -293,25 +295,25 @@ func (sr *StreamReader) clone() *StreamReader {
return &sr2 return &sr2
} }
// pulls buffered bytes out of either the json.Decoder or byteBlobReader, if // pulls buffered bytes out of either the json.Decoder or bytesReader, if
// possible, and returns an io.MultiReader of those and orig. Will also set the // possible, and returns an io.MultiReader of those and orig. Will also set the
// json.Decoder/byteBlobReader to nil if that's where the bytes came from. // json.Decoder/bytesReader to nil if that's where the bytes came from.
func (sr *StreamReader) multiReader() io.Reader { func (sr *StreamReader) multiReader() io.Reader {
if sr.dec != nil { if sr.dec != nil {
buf := sr.dec.Buffered() buf := sr.dec.Buffered()
sr.dec = nil sr.dec = nil
return io.MultiReader(buf, sr.orig) return io.MultiReader(buf, sr.orig)
} else if sr.bbr != nil { } else if sr.br != nil {
buf := sr.bbr.buffered() buf := sr.br.buffered()
sr.bbr = nil sr.br = nil
return io.MultiReader(buf, sr.orig) return io.MultiReader(buf, sr.orig)
} }
return sr.orig return sr.orig
} }
// Next reads, decodes, and returns the next Element off the StreamReader. If // Next reads, decodes, and returns the next Element off the StreamReader. If
// the Element is a ByteBlob or embedded Stream then it _must_ be fully consumed // the Element is a Bytes or embedded Stream Element then it _must_ be fully
// before Next is called on this StreamReader again. // consumed before Next is called on this StreamReader again.
// //
// The returned Element's Err field will be ErrStreamEnd if the Stream was // The returned Element's Err field will be ErrStreamEnd if the Stream was
// ended, or ErrCanceled if it was canceled, and this StreamReader should not be // ended, or ErrCanceled if it was canceled, and this StreamReader should not be
@ -350,22 +352,22 @@ func (sr *StreamReader) Next() Element {
} }
} else if el.BytesStart { } else if el.BytesStart {
return Element{ return Element{
Type: TypeByteBlob, Type: TypeBytes,
SizeHint: el.SizeHint, SizeHint: el.SizeHint,
br: sr.readBytes(), br: sr.readBytes(),
} }
} else if len(el.Value) > 0 { } else if len(el.Value) > 0 {
return Element{ return Element{
Type: TypeJSONValue, Type: TypeValue,
value: el.Value, value: el.Value,
} }
} }
return Element{Err: errors.New("malformed Element, can't determine type")} return Element{Err: errors.New("malformed Element, can't determine type")}
} }
func (sr *StreamReader) readBytes() *byteBlobReader { func (sr *StreamReader) readBytes() *bytesReader {
sr.bbr = newByteBlobReader(sr.multiReader()) sr.br = newBytesReader(sr.multiReader())
return sr.bbr return sr.br
} }
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -384,7 +386,7 @@ func NewStreamWriter(w io.Writer) *StreamWriter {
} }
// EncodeValue marshals the given value and writes it to the Stream as a // EncodeValue marshals the given value and writes it to the Stream as a
// JSONValue element. // Value element.
func (sw *StreamWriter) EncodeValue(i interface{}) error { func (sw *StreamWriter) EncodeValue(i interface{}) error {
b, err := json.Marshal(i) b, err := json.Marshal(i)
if err != nil { if err != nil {
@ -394,11 +396,11 @@ func (sw *StreamWriter) EncodeValue(i interface{}) error {
} }
// EncodeBytes copies the given io.Reader, until io.EOF, onto the Stream as a // EncodeBytes copies the given io.Reader, until io.EOF, onto the Stream as a
// ByteBlob element. This method will block until copying is completed or an // Bytes element. This method will block until copying is completed or an error
// error is encountered. // is encountered.
// //
// If the io.Reader returns any error which isn't io.EOF then the ByteBlob is // If the io.Reader returns any error which isn't io.EOF then the Bytes element
// canceled and that error is returned from this method. Otherwise nil is // is canceled and that error is returned from this method. Otherwise nil is
// returned. // returned.
// //
// sizeHint may be given if it's known or can be guessed how many bytes the // sizeHint may be given if it's known or can be guessed how many bytes the

View File

@ -51,9 +51,9 @@ func TestEncoderDecoder(t *T) {
case pick == 0: case pick == 0:
typ = TypeStream typ = TypeStream
case pick < 4: case pick < 4:
typ = TypeJSONValue typ = TypeValue
default: default:
typ = TypeByteBlob typ = TypeBytes
} }
} }
@ -66,7 +66,7 @@ func TestEncoderDecoder(t *T) {
} }
switch typ { switch typ {
case TypeJSONValue: case TypeValue:
tc.val = map[string]interface{}{ tc.val = map[string]interface{}{
mtest.RandHex(8): mtest.RandHex(8), mtest.RandHex(8): mtest.RandHex(8),
mtest.RandHex(8): mtest.RandHex(8), mtest.RandHex(8): mtest.RandHex(8),
@ -75,7 +75,7 @@ func TestEncoderDecoder(t *T) {
mtest.RandHex(8): mtest.RandHex(8), mtest.RandHex(8): mtest.RandHex(8),
} }
return tc return tc
case TypeByteBlob: case TypeBytes:
tc.bytes = mtest.RandBytes(mtest.Rand.Intn(256)) tc.bytes = mtest.RandBytes(mtest.Rand.Intn(256))
return tc return tc
case TypeStream: case TypeStream:
@ -97,7 +97,7 @@ func TestEncoderDecoder(t *T) {
success = success && assert.Equal(t, tc.typ, el.Type, l...) success = success && assert.Equal(t, tc.typ, el.Type, l...)
switch el.Type { switch el.Type {
case TypeJSONValue: case TypeValue:
if tc.discard { if tc.discard {
success = success && assert.NoError(t, el.Discard()) success = success && assert.NoError(t, el.Discard())
break break
@ -106,7 +106,7 @@ func TestEncoderDecoder(t *T) {
var val interface{} var val interface{}
success = success && assert.NoError(t, el.DecodeValue(&val), l...) success = success && assert.NoError(t, el.DecodeValue(&val), l...)
success = success && assert.Equal(t, tc.val, val, l...) success = success && assert.Equal(t, tc.val, val, l...)
case TypeByteBlob: case TypeBytes:
br, err := el.DecodeBytes() br, err := el.DecodeBytes()
success = success && assert.NoError(t, err, l...) success = success && assert.NoError(t, err, l...)
success = success && assert.Equal(t, uint(len(tc.bytes)), el.SizeHint, l...) success = success && assert.Equal(t, uint(len(tc.bytes)), el.SizeHint, l...)
@ -170,9 +170,9 @@ func TestEncoderDecoder(t *T) {
assertWrite = func(w *StreamWriter, tc testCase) bool { assertWrite = func(w *StreamWriter, tc testCase) bool {
l, success := tcLog(tc), true l, success := tcLog(tc), true
switch tc.typ { switch tc.typ {
case TypeJSONValue: case TypeValue:
success = success && assert.NoError(t, w.EncodeValue(tc.val), l...) success = success && assert.NoError(t, w.EncodeValue(tc.val), l...)
case TypeByteBlob: case TypeBytes:
if tc.cancel { if tc.cancel {
r := newCancelBuffer(tc.bytes) r := newCancelBuffer(tc.bytes)
err := w.EncodeBytes(uint(len(tc.bytes)), r) err := w.EncodeBytes(uint(len(tc.bytes)), r)
@ -248,32 +248,32 @@ func TestEncoderDecoder(t *T) {
// some basic test cases // some basic test cases
do() // empty stream do() // empty stream
do(randTestCase(TypeJSONValue, false)) do(randTestCase(TypeValue, false))
do(randTestCase(TypeByteBlob, false)) do(randTestCase(TypeBytes, false))
do( do(
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
) )
do( do(
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
randTestCase(TypeByteBlob, false), randTestCase(TypeBytes, false),
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
) )
do( do(
randTestCase(TypeByteBlob, false), randTestCase(TypeBytes, false),
randTestCase(TypeByteBlob, false), randTestCase(TypeBytes, false),
randTestCase(TypeByteBlob, false), randTestCase(TypeBytes, false),
) )
do( do(
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
randTestCase(TypeStream, false), randTestCase(TypeStream, false),
randTestCase(TypeJSONValue, false), randTestCase(TypeValue, false),
) )
// some special cases, empty elements which are canceled // some special cases, empty elements which are canceled
do(testCase{typ: TypeStream, cancel: true}) do(testCase{typ: TypeStream, cancel: true})
do(testCase{typ: TypeByteBlob, cancel: true}) do(testCase{typ: TypeBytes, cancel: true})
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
tc := randTestCase(TypeStream, false) tc := randTestCase(TypeStream, false)
@ -297,7 +297,7 @@ func TestDoubleDiscardBytes(t *T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, b1, b1got) assert.Equal(t, b1, b1got)
// discarding an already consumed byte blob shouldn't do anything // discarding an already consumed bytes shouldn't do anything
assert.NoError(t, el1.Discard()) assert.NoError(t, el1.Discard())
r2, err := r.Next().DecodeBytes() r2, err := r.Next().DecodeBytes()

View File

@ -50,13 +50,13 @@ completely consumed and the pipe may be used for a new request.
The two elements of the request stream are specified as follows: The two elements of the request stream are specified as follows:
* The first element, the head, is a JSON value with an object containing a * The first element, the head, is a value element with an object containing a
`name` field, which identifies the call being made, and optionally a `debug` `name` field, which identifies the call being made, and optionally a `debug`
field. field.
* The second element is the argument to the call. This may be a JSON value, a * The second element is the argument to the call. This may be a value, a
byte blob, or an embedded stream containing even more elements, depending on bytes, or an embedded stream element containing even more elements, depending
the call. It's up to the client and server to coordinate beforehand what to on the call. It's up to the client and server to coordinate beforehand what to
expect here. expect here.
## Call response ## Call response
@ -67,12 +67,12 @@ completely consumed and the pipe may be used for a new request.
The two elements of the response stream are specified as follows: The two elements of the response stream are specified as follows:
* The first element is the response from the call. This may be a JSON value, a * The first element is the response from the call. This may be a value, a bytes,
byte blob, or an embedded stream containing even more elements, depending on or an embedded stream element containing even more elements, depending on the
the call. It's up to the client and server to coordinate beforehand what to call. It's up to the client and server to coordinate beforehand what to expect
expect here. here.
* The second element, the tail, is a JSON value with an object optionally * The second element, the tail, is a value element with an object optionally
containing an `err` field, and optionally containing a `debug` field. The containing an `err` field, and optionally containing a `debug` field. The
value of `err` may be any JSON value which is meaningful to the client and value of `err` may be any JSON value which is meaningful to the client and
server. This element is required even if there's no `err` or `debug` data. server. This element is required even if there's no `err` or `debug` data.