correctly determine the length of STREAM frames for IETF QUIC

The length of the data has an influence on the length of the frame (if
it contains the data length), and the length can either consume 1 or 2
bytes due to variable length encoding.
This commit is contained in:
Marten Seemann
2018-02-05 09:01:15 +08:00
parent 38c420a35b
commit 5974c6c113
5 changed files with 134 additions and 4 deletions

View File

@@ -130,3 +130,29 @@ func (f *StreamFrame) MinLength(version protocol.VersionNumber) protocol.ByteCou
}
return length
}
// MaxDataLen returns the maximum data length
// If 0 is returned, writing will fail (a STREAM_FRAME must contain at least 1 byte of data).
func (f *StreamFrame) MaxDataLen(maxSize protocol.ByteCount, version protocol.VersionNumber) protocol.ByteCount {
if !version.UsesIETFFrameFormat() {
return f.maxDataLenLegacy(maxSize, version)
}
headerLen := 1 + utils.VarIntLen(uint64(f.StreamID))
if f.Offset != 0 {
headerLen += utils.VarIntLen(uint64(f.Offset))
}
if f.DataLenPresent {
// pretend that the data size will be 1 bytes
// if it turns out that varint encoding the length will consume 2 bytes, we need to adjust the data length afterwards
headerLen++
}
if headerLen > maxSize {
return 0
}
maxDataLen := maxSize - headerLen
if f.DataLenPresent && utils.VarIntLen(uint64(maxDataLen)) != 1 {
maxDataLen--
}
return maxDataLen
}

View File

@@ -191,6 +191,14 @@ func (f *StreamFrame) minLengthLegacy(_ protocol.VersionNumber) protocol.ByteCou
return length
}
func (f *StreamFrame) maxDataLenLegacy(maxFrameSize protocol.ByteCount, version protocol.VersionNumber) protocol.ByteCount {
headerLen := f.minLengthLegacy(version)
if headerLen > maxFrameSize {
return 0
}
return maxFrameSize - headerLen
}
// DataLen gives the length of data in bytes
func (f *StreamFrame) DataLen() protocol.ByteCount {
return protocol.ByteCount(len(f.Data))

View File

@@ -394,7 +394,6 @@ var _ = Describe("STREAM frame (for gQUIC)", func() {
StreamID: 0xdecafbad,
Data: []byte("foobar"),
}
frame.MinLength(0)
err := frame.Write(b, versionBigEndian)
Expect(err).ToNot(HaveOccurred())
Expect(b.Bytes()[0] & 0x3).To(Equal(uint8(0x3)))
@@ -480,4 +479,35 @@ var _ = Describe("STREAM frame (for gQUIC)", func() {
Expect(frame.DataLen()).To(Equal(protocol.ByteCount(6)))
})
})
Context("max data length", func() {
It("always returns a data length such that the resulting frame has the right size", func() {
const maxSize = 3000
data := make([]byte, maxSize)
f := &StreamFrame{
StreamID: 0x1337,
Offset: 0xdeadbeef,
DataLenPresent: true,
}
b := &bytes.Buffer{}
for i := 1; i < 3000; i++ {
b.Reset()
f.Data = nil
maxDataLen := f.MaxDataLen(protocol.ByteCount(i), versionBigEndian)
if maxDataLen == 0 { // 0 means that no valid STREAM_FRAME can be written
// check that writing a minimal size STREAM_FRAME (i.e. with 1 byte data) is actually larger than the desired size
f.Data = []byte{0}
err := f.Write(b, versionBigEndian)
Expect(err).ToNot(HaveOccurred())
Expect(b.Len()).To(BeNumerically(">", i))
continue
}
f.Data = data[:int(maxDataLen)]
err := f.Write(b, versionBigEndian)
Expect(err).ToNot(HaveOccurred())
Expect(b.Len()).To(Equal(i))
}
})
})
})

View File

@@ -208,4 +208,70 @@ var _ = Describe("STREAM frame (for IETF QUIC)", func() {
Expect(f.MinLength(versionIETFFrames)).To(Equal(1 + utils.VarIntLen(0x1337) + utils.VarIntLen(0x1234567) + utils.VarIntLen(6)))
})
})
Context("max data length", func() {
const maxSize = 3000
It("always returns a data length such that the resulting frame has the right size, if data length is not present", func() {
data := make([]byte, maxSize)
f := &StreamFrame{
StreamID: 0x1337,
Offset: 0xdeadbeef,
}
b := &bytes.Buffer{}
for i := 1; i < 3000; i++ {
b.Reset()
f.Data = nil
maxDataLen := f.MaxDataLen(protocol.ByteCount(i), versionIETFFrames)
if maxDataLen == 0 { // 0 means that no valid STREAM_FRAME can be written
// check that writing a minimal size STREAM_FRAME (i.e. with 1 byte data) is actually larger than the desired size
f.Data = []byte{0}
err := f.Write(b, versionIETFFrames)
Expect(err).ToNot(HaveOccurred())
Expect(b.Len()).To(BeNumerically(">", i))
continue
}
f.Data = data[:int(maxDataLen)]
err := f.Write(b, versionIETFFrames)
Expect(err).ToNot(HaveOccurred())
Expect(b.Len()).To(Equal(i))
}
})
It("always returns a data length such that the resulting frame has the right size, if data length is present", func() {
data := make([]byte, maxSize)
f := &StreamFrame{
StreamID: 0x1337,
Offset: 0xdeadbeef,
DataLenPresent: true,
}
b := &bytes.Buffer{}
var frameOneByteTooSmallCounter int
for i := 1; i < 3000; i++ {
b.Reset()
f.Data = nil
maxDataLen := f.MaxDataLen(protocol.ByteCount(i), versionIETFFrames)
if maxDataLen == 0 { // 0 means that no valid STREAM_FRAME can be written
// check that writing a minimal size STREAM_FRAME (i.e. with 1 byte data) is actually larger than the desired size
f.Data = []byte{0}
err := f.Write(b, versionIETFFrames)
Expect(err).ToNot(HaveOccurred())
Expect(b.Len()).To(BeNumerically(">", i))
continue
}
f.Data = data[:int(maxDataLen)]
err := f.Write(b, versionIETFFrames)
Expect(err).ToNot(HaveOccurred())
// There's *one* pathological case, where a data length of x can be encoded into 1 byte
// but a data lengths of x+1 needs 2 bytes
// In that case, it's impossible to create a STREAM_FRAME of the desired size
if b.Len() == i-1 {
frameOneByteTooSmallCounter++
continue
}
Expect(b.Len()).To(Equal(i))
}
Expect(frameOneByteTooSmallCounter).To(Equal(1))
})
})
})

View File

@@ -145,11 +145,11 @@ func (s *sendStream) popStreamFrame(maxBytes protocol.ByteCount) (*wire.StreamFr
Offset: s.writeOffset,
DataLenPresent: true,
}
frameLen := frame.MinLength(s.version)
if frameLen >= maxBytes { // a STREAM frame must have at least one byte of data
maxDataLen := frame.MaxDataLen(maxBytes, s.version)
if maxDataLen == 0 { // a STREAM frame must have at least one byte of data
return nil, s.dataForWriting != nil
}
frame.Data, frame.FinBit = s.getDataForWriting(maxBytes - frameLen)
frame.Data, frame.FinBit = s.getDataForWriting(maxDataLen)
if len(frame.Data) == 0 && !frame.FinBit {
// this can happen if:
// - popStreamFrame is called but there's no data for writing