mirror of
https://github.com/dolthub/dolt.git
synced 2026-02-11 02:59:34 -06:00
Switch to network-byte-order in DataStore wire format (#1386)
* Switch to network-byte-order in DataStore wire format Fixes #1370
This commit is contained in:
@@ -43,7 +43,7 @@ func NewSerializer(writer io.Writer) ChunkSink {
|
||||
|
||||
// Because of chunking at higher levels, no chunk should never be more than 4GB
|
||||
chunkSize := uint32(len(chunk.Data()))
|
||||
err = binary.Write(s.writer, binary.LittleEndian, chunkSize)
|
||||
err = binary.Write(s.writer, binary.BigEndian, chunkSize)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
n, err = io.Copy(s.writer, bytes.NewReader(chunk.Data()))
|
||||
@@ -129,7 +129,7 @@ func deserializeChunk(reader io.Reader) Chunk {
|
||||
r := ref.New(digest)
|
||||
|
||||
chunkSize := uint32(0)
|
||||
err = binary.Read(reader, binary.LittleEndian, &chunkSize)
|
||||
err = binary.Read(reader, binary.BigEndian, &chunkSize)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
w := NewChunkWriter()
|
||||
|
||||
@@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func serializeHints(w io.Writer, hints types.Hints) {
|
||||
err := binary.Write(w, binary.LittleEndian, uint32(len(hints))) // 4 billion hints is probably absurd. Maybe this should be smaller?
|
||||
err := binary.Write(w, binary.BigEndian, uint32(len(hints))) // 4 billion hints is probably absurd. Maybe this should be smaller?
|
||||
d.Chk.NoError(err)
|
||||
for r := range hints {
|
||||
serializeHash(w, r)
|
||||
@@ -20,7 +20,7 @@ func serializeHints(w io.Writer, hints types.Hints) {
|
||||
}
|
||||
|
||||
func serializeHashes(w io.Writer, hashes ref.RefSlice) {
|
||||
err := binary.Write(w, binary.LittleEndian, uint32(len(hashes))) // 4 billion hashes is probably absurd. Maybe this should be smaller?
|
||||
err := binary.Write(w, binary.BigEndian, uint32(len(hashes))) // 4 billion hashes is probably absurd. Maybe this should be smaller?
|
||||
d.Chk.NoError(err)
|
||||
for _, r := range hashes {
|
||||
serializeHash(w, r)
|
||||
@@ -36,7 +36,7 @@ func serializeHash(w io.Writer, hash ref.Ref) {
|
||||
|
||||
func deserializeHints(reader io.Reader) types.Hints {
|
||||
numRefs := uint32(0)
|
||||
err := binary.Read(reader, binary.LittleEndian, &numRefs)
|
||||
err := binary.Read(reader, binary.BigEndian, &numRefs)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
hints := make(types.Hints, numRefs)
|
||||
@@ -48,7 +48,7 @@ func deserializeHints(reader io.Reader) types.Hints {
|
||||
|
||||
func deserializeHashes(reader io.Reader) ref.RefSlice {
|
||||
numRefs := uint32(0)
|
||||
err := binary.Read(reader, binary.LittleEndian, &numRefs)
|
||||
err := binary.Read(reader, binary.BigEndian, &numRefs)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
hashes := make(ref.RefSlice, numRefs)
|
||||
|
||||
@@ -5,7 +5,7 @@ import Ref from './ref.js';
|
||||
import {invariant} from './assert.js';
|
||||
|
||||
const headerSize = 4; // uint32
|
||||
const littleEndian = true;
|
||||
const bigEndian = false; // Passing false to DataView methods makes them use big-endian byte order.
|
||||
const sha1Size = 20;
|
||||
const chunkLengthSize = 4; // uint32
|
||||
const chunkHeaderSize = sha1Size + chunkLengthSize;
|
||||
@@ -44,7 +44,7 @@ export function serialize(hints: Set<Ref>, chunks: Array<Chunk>): ArrayBuffer {
|
||||
function serializeHints(hints: Set<Ref>, buffer: ArrayBuffer): number {
|
||||
let offset = 0;
|
||||
const view = new DataView(buffer, offset, headerSize);
|
||||
view.setUint32(offset, hints.size | 0, littleEndian); // Coerce number to uint32
|
||||
view.setUint32(offset, hints.size | 0, bigEndian); // Coerce number to uint32
|
||||
offset += headerSize;
|
||||
|
||||
hints.forEach(ref => {
|
||||
@@ -78,7 +78,7 @@ function deserializeHints(buffer: ArrayBuffer): {hints: Array<Ref>, offset: numb
|
||||
|
||||
let offset = 0;
|
||||
const view = new DataView(buffer, 0, headerSize);
|
||||
const numHints = view.getUint32(0, littleEndian);
|
||||
const numHints = view.getUint32(0, bigEndian);
|
||||
offset += headerSize;
|
||||
|
||||
const totalLength = headerSize + (numHints * sha1Size);
|
||||
|
||||
Reference in New Issue
Block a user