(js2) ChunkSerializer

This commit is contained in:
Rafael Weinstein
2015-10-30 07:41:40 -07:00
parent 79aac9ea2e
commit da11f0e9d7
2 changed files with 122 additions and 0 deletions

View File

@@ -0,0 +1,77 @@
/* @flow */
'use strict';
import Chunk from './chunk.js';
import Ref from './ref.js';
const sha1Size = 20;
const chunkLengthSize = 4; // uint32
const chunkHeaderSize = sha1Size + chunkLengthSize;
export function serialize(chunks: Array<Chunk>): ArrayBuffer {
let totalSize = 0;
for (let i = 0; i < chunks.length; i++) {
totalSize += chunkHeaderSize + chunks[i].data.length;
}
let buffer = new ArrayBuffer(totalSize);
let offset = 0;
for (let i = 0; i < chunks.length; i++) {
let chunk = chunks[i];
let refArray = new Uint8Array(buffer, offset, sha1Size);
refArray.set(chunk.ref.digest);
offset += sha1Size;
// Uint32Arrays cannot be created at non-4-byte offsets into a buffer, so read & write of chunkLength must be done with tmp Uint8Array.
let chunkLength = chunk.data.length;
let sizeArray = new Uint32Array(1);
sizeArray[0] = chunkLength;
let sizeWriteArray = new Uint8Array(buffer, offset, chunkLengthSize);
sizeWriteArray.set(new Uint8Array(sizeArray.buffer));
offset += chunkLengthSize;
let dataArray = new Uint8Array(buffer, offset, chunkLength);
dataArray.set(chunk.data);
offset += chunkLength;
}
return buffer;
}
export function deserialize(buffer: ArrayBuffer): Array<Chunk> {
let chunks:Array<Chunk> = [];
let totalLenth = buffer.byteLength;
for (let offset = 0; offset < totalLenth;) {
if (buffer.byteLength - offset < chunkHeaderSize) {
throw new Error('Invalid chunk buffer');
}
let refArray = new Uint8Array(buffer, offset, sha1Size);
let ref = new Ref(new Uint8Array(refArray));
offset += sha1Size;
let sizeReadArray = new Uint8Array(buffer, offset, chunkLengthSize);
let sizeArray = new Uint32Array(new Uint8Array(sizeReadArray).buffer);
let chunkLength = sizeArray[0];
offset += chunkLengthSize;
if (offset + chunkLength > totalLenth) {
throw new Error('Invalid chunk buffer');
}
let dataArray = new Uint8Array(buffer, offset, chunkLength);
let chunk = new Chunk(new Uint8Array(dataArray)); // Makes a slice (copy) of the byte sequence from buffer.
if (!chunk.ref.equals(ref)) {
throw new Error('Serialized ref !== computed ref');
}
offset += chunkLength;
chunks.push(chunk);
}
return chunks;
}

View File

@@ -0,0 +1,45 @@
/* @flow */
'use strict';
import {suite, test} from 'mocha';
import {assert} from 'chai';
import Chunk from './chunk.js';
import {deserialize, serialize} from './chunk_serializer.js';
suite('ChunkSerializer', () => {
function assertChunks(expect: Array<Chunk>, actual: Array<Chunk>) {
assert.strictEqual(expect.length, actual.length);
for (let i = 0; i < expect.length; i++) {
assert.isTrue(expect[i].ref.equals(actual[i].ref));
}
}
test('simple', () => {
let chunks = [Chunk.fromString('abc'), Chunk.fromString('def'), Chunk.fromString('ghi'), Chunk.fromString('wacka wack wack')];
let buffer = serialize(chunks);
let newChunks = deserialize(buffer);
assertChunks(chunks, newChunks);
});
test('leading & trailing empty', () => {
let chunks = [Chunk.fromString(''), Chunk.fromString('A'), Chunk.fromString('')];
let buffer = serialize(chunks);
let newChunks = deserialize(buffer);
assertChunks(chunks, newChunks);
});
test('no chunks', () => {
let chunks = [];
let buffer = serialize(chunks);
let newChunks = deserialize(buffer);
assertChunks(chunks, newChunks);
});
});