mirror of
https://github.com/dolthub/dolt.git
synced 2026-02-11 02:59:34 -06:00
Ensure all bytes consumed on decode (#2019)
Ensure all bytes consumed on decode
This commit is contained in:
@@ -31,8 +31,10 @@ func EncodeValue(v Value, vw ValueWriter) chunks.Chunk {
|
||||
func DecodeFromBytes(data []byte, vr ValueReader, tc *TypeCache) Value {
|
||||
tc.Lock()
|
||||
defer tc.Unlock()
|
||||
dec := newValueDecoder(&binaryNomsReader{data, 0}, vr, tc)
|
||||
br := &binaryNomsReader{data, 0}
|
||||
dec := newValueDecoder(br, vr, tc)
|
||||
v := dec.readValue()
|
||||
d.Chk.True(br.pos() == uint32(len(data)))
|
||||
return v
|
||||
}
|
||||
|
||||
|
||||
@@ -349,6 +349,18 @@ func TestWriteStruct(t *testing.T) {
|
||||
)
|
||||
}
|
||||
|
||||
func TestWriteStructTooMuchData(t *testing.T) {
|
||||
s := NewStruct("S", structData{"x": Number(42), "b": Bool(true)})
|
||||
c := EncodeValue(s, nil)
|
||||
data := c.Data()
|
||||
buff := make([]byte, len(data)+1)
|
||||
copy(buff, data)
|
||||
buff[len(data)] = 5 // Add a bogus extrabyte
|
||||
assert.Panics(t, func() {
|
||||
DecodeFromBytes(buff, nil, staticTypeCache)
|
||||
})
|
||||
}
|
||||
|
||||
func TestWriteStructWithList(t *testing.T) {
|
||||
// struct S {l: List<String>}({l: ["a", "b"]})
|
||||
assertEncoding(t,
|
||||
|
||||
@@ -35,9 +35,12 @@ setEncodeValue(encodeValue);
|
||||
|
||||
export function decodeValue(chunk: Chunk, vr: ValueReader): Value {
|
||||
const data = chunk.data;
|
||||
const dec = new ValueDecoder(new BinaryNomsReader(data), vr, staticTypeCache);
|
||||
const br = new BinaryNomsReader(data);
|
||||
const dec = new ValueDecoder(br, vr, staticTypeCache);
|
||||
const v = dec.readValue();
|
||||
|
||||
if (br.pos() !== data.byteLength) {
|
||||
throw new Error('Invalid chunk data: not all bytes consumed');
|
||||
}
|
||||
if (v instanceof ValueBase) {
|
||||
setHash(v, chunk.hash);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import {assert} from 'chai';
|
||||
|
||||
import Blob from './blob.js';
|
||||
import * as Bytes from './bytes.js';
|
||||
import Chunk from './chunk.js';
|
||||
import Hash from './hash.js';
|
||||
import List, {newListLeafSequence} from './list.js';
|
||||
import Map from './map.js';
|
||||
@@ -387,6 +388,17 @@ suite('Encoding', () => {
|
||||
newStruct('S', {x: 42, b: true}));
|
||||
});
|
||||
|
||||
test('struct too much data', async () => {
|
||||
const s = newStruct('S', {x: 42, b: true});
|
||||
const c = encodeValue(s, null);
|
||||
const data = c.data;
|
||||
const buff = Bytes.alloc(data.byteLength + 1);
|
||||
Bytes.copy(data, buff);
|
||||
buff[data.byteLength] = 5; // Add a bogus extra byte
|
||||
const c2 = new Chunk(buff);
|
||||
assert.throws(() => decodeValue(c2, null));
|
||||
});
|
||||
|
||||
test('struct with list', () => {
|
||||
// struct S {l: List<String>}({l: ['a', 'b']})
|
||||
assertEncoding([
|
||||
|
||||
Reference in New Issue
Block a user