JS: Move readValue to DataStore

Towards #1095
This commit is contained in:
Erik Arvidsson
2016-03-21 13:09:02 -07:00
parent a6e5de48dc
commit c27780e4fe
22 changed files with 433 additions and 359 deletions

View File

@@ -6,9 +6,9 @@ import Random from './pseudo-random.js';
import MemoryStore from './memory-store.js';
import test from './async-test.js';
import {blobType} from './type.js';
import {readValue} from './read-value.js';
import {writeValue} from './encode.js';
import {newBlob, BlobWriter} from './blob.js';
import {DataStore} from './data-store.js';
suite('Blob', () => {
function intSequence(start: number, end: number): Uint8Array {
@@ -51,10 +51,11 @@ suite('Blob', () => {
test('roundtrip', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const b1 = await newBlob(randomArray(15));
const r1 = await writeValue(b1, blobType, ms);
const b2 = await readValue(r1, ms);
const r1 = await writeValue(b1, blobType, ds);
const b2 = await ds.readValue(r1);
assert.isTrue(b1.equals(b2));
});

View File

@@ -13,8 +13,9 @@ import {Kind} from './noms-kind.js';
import {newMap} from './map.js';
import {newSet} from './set.js';
import {Package, registerPackage} from './package.js';
import {readValue} from './read-value.js';
// import {readValue} from './read-value.js';
import {writeValue} from './encode.js';
import {decodeNomsValue} from './decode.js';
type DatasTypes = {
commitTypeDef: Type,
@@ -103,14 +104,14 @@ export class DataStore {
return getEmptyCommitMap();
}
return readValue(rootRef, this._cs);
return this.readValue(rootRef);
});
}
head(datasetID: string): Promise<?Struct> {
return this._datasets.then(
datasets => datasets.get(datasetID).then(commitRef => commitRef ?
readValue(commitRef, this._cs) : null));
this.readValue(commitRef) : null));
}
datasets(): Promise<NomsMap<string, Ref>> {
@@ -128,6 +129,15 @@ export class DataStore {
return true;
}
async readValue(ref: Ref): Promise<any> {
const chunk = await this._cs.get(ref);
if (chunk.isEmpty()) {
return null;
}
return decodeNomsValue(chunk, this);
}
async commit(datasetId: string, commit: Struct): Promise<DataStore> {
const currentRootRefP = this.getRoot();
let currentDatasets = await this._datasetsFromRootRef(currentRootRefP);
@@ -156,10 +166,10 @@ export class DataStore {
}
}
async function getAncestors(commits: NomsSet<Ref>, store: ChunkStore): Promise<NomsSet<Ref>> {
async function getAncestors(commits: NomsSet<Ref>, store: DataStore): Promise<NomsSet<Ref>> {
let ancestors = await newSet([], getDatasTypes().commitSetType);
await commits.map(async (commitRef) => {
const commit = await readValue(commitRef, store);
const commit = await store.readValue(commitRef);
await commit.get('parents').map(async (ref) => ancestors = await ancestors.insert(ref));
});
return ancestors;

View File

@@ -6,7 +6,7 @@ import Chunk from './chunk.js';
import MemoryStore from './memory-store.js';
import Ref from './ref.js';
import {assert} from 'chai';
import {DataStore, getDatasTypes, newCommit} from './datastore.js';
import {DataStore, getDatasTypes, newCommit} from './data-store.js';
import {invariant, notNull} from './assert.js';
import {newMap} from './map.js';
import {writeValue} from './encode.js';
@@ -148,9 +148,9 @@ suite('DataStore', () => {
const commit = await newCommit('foo', []);
const commitRef = writeValue(commit, commit.type, ms);
const commitRef = writeValue(commit, commit.type, ds);
const datasets = await newMap(['foo', commitRef], types.commitMapType);
const rootRef = writeValue(datasets, datasets.type, ms);
const rootRef = writeValue(datasets, datasets.type, ds);
assert.isTrue(await ms.updateRoot(rootRef, new Ref()));
ds = new DataStore(ms); // refresh the datasets

View File

@@ -19,11 +19,11 @@ import {Kind} from './noms-kind.js';
import {ListLeafSequence, NomsList} from './list.js';
import {MapLeafSequence, NomsMap} from './map.js';
import {NomsSet, SetLeafSequence} from './set.js';
import {readValue} from './read-value.js';
import {registerPackage, Package} from './package.js';
import {suite} from 'mocha';
import type {Value} from './value.js';
import {writeValue} from './encode.js';
import {DataStore} from './data-store.js';
suite('Decode', () => {
function stringToUint8Array(s): Uint8Array {
@@ -36,8 +36,9 @@ suite('Decode', () => {
test('read', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [1, 'hi', true];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
assert.strictEqual(1, r.read());
assert.isFalse(r.atEnd());
@@ -51,9 +52,9 @@ suite('Decode', () => {
test('read type as tag', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
function doTest(expected: Type, a: Array<any>) {
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const tr = r.readTypeAsTag();
assert.isTrue(expected.equals(tr));
}
@@ -71,9 +72,10 @@ suite('Decode', () => {
test('read primitives', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
async function doTest(expected: any, a: Array<any>): Promise<void> {
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assert.deepEqual(expected, v);
}
@@ -101,21 +103,23 @@ suite('Decode', () => {
test('read list of int 32', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.List, Kind.Int32, false, ['0', '1', '2', '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsList<int32> = await r.readTopLevelValue();
invariant(v instanceof NomsList);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, [0, 1, 2, 3]));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2, 3]));
assert.isTrue(l.equals(v));
});
// TODO: Can't round-trip collections of value types. =-(
test('read list of value', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.List, Kind.Value, false, [Kind.Int32, '1', Kind.String, 'hi', Kind.Bool, true]];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsList<Value> = await r.readTopLevelValue();
invariant(v instanceof NomsList);
@@ -128,34 +132,36 @@ suite('Decode', () => {
test('read value list of int8', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Value, Kind.List, Kind.Int8, false, ['0', '1', '2']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
invariant(v instanceof NomsList);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int8));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, [0, 1, 2]));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2]));
assert.isTrue(l.equals(v));
});
test('read compound list', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const ltr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const r1 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [0, 1])), ltr, ms);
const r2 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [2, 3])), ltr, ms);
const r3 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [4, 5])), ltr, ms);
const r1 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1])), ltr, ds);
const r2 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3])), ltr, ds);
const r3 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5])), ltr, ds);
const tuples = [
new MetaTuple(r1, 2),
new MetaTuple(r2, 4),
new MetaTuple(r3, 6),
];
const l:NomsList<int32> = new NomsList(ltr, new IndexedMetaSequence(ms, ltr, tuples));
const l:NomsList<int32> = new NomsList(ltr, new IndexedMetaSequence(ds, ltr, tuples));
invariant(l instanceof NomsList);
const a = [Kind.List, Kind.Int32, true,
[r1.toString(), '2', r2.toString(), '4', r3.toString(), '6']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
invariant(v instanceof NomsList);
assert.isTrue(v.ref.equals(l.ref));
@@ -163,68 +169,73 @@ suite('Decode', () => {
test('read map of int64 to float64', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Map, Kind.Int64, Kind.Float64, false, ['0', '1', '2', '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsMap<int64, float64> = await r.readTopLevelValue();
invariant(v instanceof NomsMap);
const t = makeCompoundType(Kind.Map, makePrimitiveType(Kind.Int64),
makePrimitiveType(Kind.Float64));
const m = new NomsMap(t, new MapLeafSequence(ms, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
assert.isTrue(v.equals(m));
});
test('read map of ref to uint64', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Map, Kind.Ref, Kind.Value, Kind.Uint64, false,
['sha1-0000000000000000000000000000000000000001', '2',
'sha1-0000000000000000000000000000000000000002', '4']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsMap<Ref, uint64> = await r.readTopLevelValue();
invariant(v instanceof NomsMap);
const t = makeCompoundType(Kind.Map, makeCompoundType(Kind.Ref, makePrimitiveType(Kind.Value)),
makePrimitiveType(Kind.Uint64));
const m = new NomsMap(t,
new MapLeafSequence(ms, t, [{key: new Ref('sha1-0000000000000000000000000000000000000001'),
new MapLeafSequence(ds, t, [{key: new Ref('sha1-0000000000000000000000000000000000000001'),
value: 2}, {key: new Ref('sha1-0000000000000000000000000000000000000002'), value: 4}]));
assert.isTrue(v.equals(m));
});
test('read value map of uint64 to uint32', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Value, Kind.Map, Kind.Uint64, Kind.Uint32, false, ['0', '1', '2', '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsMap<uint64, uint32> = await r.readTopLevelValue();
invariant(v instanceof NomsMap);
const t = makeCompoundType(Kind.Map, makePrimitiveType(Kind.Uint64),
makePrimitiveType(Kind.Uint32));
const m = new NomsMap(t, new MapLeafSequence(ms, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
assert.isTrue(v.equals(m));
});
test('read set of uint8', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Set, Kind.Uint8, false, ['0', '1', '2', '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsSet<uint8> = await r.readTopLevelValue();
invariant(v instanceof NomsSet);
const t = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Uint8));
const s = new NomsSet(t, new SetLeafSequence(ms, t, [0, 1, 2, 3]));
const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
assert.isTrue(v.equals(s));
});
test('read value set of uint16', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [Kind.Value, Kind.Set, Kind.Uint16, false, ['0', '1', '2', '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsSet<uint16> = await r.readTopLevelValue();
invariant(v instanceof NomsSet);
const t = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Uint16));
const s = new NomsSet(t, new SetLeafSequence(ms, t, [0, 1, 2, 3]));
const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
assert.isTrue(v.equals(s));
});
@@ -240,6 +251,7 @@ suite('Decode', () => {
test('test read struct', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A1', [
new Field('x', makePrimitiveType(Kind.Int16), false),
new Field('s', makePrimitiveType(Kind.String), false),
@@ -250,7 +262,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -262,6 +274,7 @@ suite('Decode', () => {
test('test read struct union', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A2', [
new Field('x', makePrimitiveType(Kind.Float32), false),
], [
@@ -273,7 +286,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', '1', 'hi'];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -284,6 +297,7 @@ suite('Decode', () => {
test('test read struct optional', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A3', [
new Field('x', makePrimitiveType(Kind.Float32), false),
new Field('s', makePrimitiveType(Kind.String), true),
@@ -294,7 +308,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', false, true, false];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -305,7 +319,7 @@ suite('Decode', () => {
test('test read struct with list', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const ltr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const tr = makeStructType('A4', [
new Field('b', makePrimitiveType(Kind.Bool), false),
@@ -317,18 +331,19 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, false, ['0', '1', '2'], 'hi'];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
b: true,
l: new NomsList(ltr, new ListLeafSequence(ms, ltr, [0, 1, 2])),
l: new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1, 2])),
s: 'hi',
});
});
test('test read struct with value', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A5', [
new Field('b', makePrimitiveType(Kind.Bool), false),
new Field('v', makePrimitiveType(Kind.Value), false),
@@ -339,7 +354,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, Kind.Uint8, '42', 'hi'];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -351,6 +366,7 @@ suite('Decode', () => {
test('test read value struct', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A1', [
new Field('x', makePrimitiveType(Kind.Int16), false),
new Field('s', makePrimitiveType(Kind.String), false),
@@ -361,7 +377,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Value, Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -373,13 +389,13 @@ suite('Decode', () => {
test('test read enum', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeEnumType('E', ['a', 'b', 'c']);
const pkg = new Package([tr], []);
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '1'];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assert.deepEqual(1, v);
@@ -387,13 +403,13 @@ suite('Decode', () => {
test('test read value enum', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeEnumType('E', ['a', 'b', 'c']);
const pkg = new Package([tr], []);
registerPackage(pkg);
const a = [Kind.Value, Kind.Unresolved, pkg.ref.toString(), '0', '1'];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assert.deepEqual(1, v);
@@ -401,6 +417,7 @@ suite('Decode', () => {
test('test read struct with', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('A1', [
new Field('x', makePrimitiveType(Kind.Int16), false),
new Field('e', makeType(new Ref(), 1), false),
@@ -411,7 +428,7 @@ suite('Decode', () => {
registerPackage(pkg);
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', '1', true];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v = await r.readTopLevelValue();
assertStruct(v, tr.desc, {
@@ -423,6 +440,7 @@ suite('Decode', () => {
test('test read map of string to struct', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeStructType('s', [
new Field('b', makePrimitiveType(Kind.Bool), false),
new Field('i', makePrimitiveType(Kind.Int32), false),
@@ -434,7 +452,7 @@ suite('Decode', () => {
const a = [Kind.Value, Kind.Map, Kind.String, Kind.Unresolved, pkg.ref.toString(), '0', false,
['bar', false, '2', 'baz', false, '1', 'foo', true, '3']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v:NomsMap<string, Struct> = await r.readTopLevelValue();
invariant(v instanceof NomsMap);
@@ -446,34 +464,36 @@ suite('Decode', () => {
test('decodeNomsValue', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const chunk = Chunk.fromString(
`t [${Kind.Value}, ${Kind.Set}, ${Kind.Uint16}, false, ["0", "1", "2", "3"]]`);
const v:NomsSet<uint16> = await decodeNomsValue(chunk, new MemoryStore());
const v:NomsSet<uint16> = await decodeNomsValue(chunk, new DataStore(new MemoryStore()));
const t = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Uint16));
const s:NomsSet<uint16> = new NomsSet(t, new SetLeafSequence(ms, t, [0, 1, 2, 3]));
const s:NomsSet<uint16> = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
assert.isTrue(v.equals(s));
});
test('decodeNomsValue: counter with one commit', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const root = Ref.parse('sha1-c3680a063b73ac42c3075110108a48a91007abf7');
ms.put(Chunk.fromString('t [15,11,16,21,"sha1-7546d804d845125bc42669c7a4c3f3fb909eca29","0",' +
ds.put(Chunk.fromString('t [15,11,16,21,"sha1-7546d804d845125bc42669c7a4c3f3fb909eca29","0",' +
'false,["counter","sha1-a6fffab4e12b49d57f194f0d3add9f6623a13e19"]]')); // root
ms.put(Chunk.fromString('t [22,[19,"Commit",["value",13,false,"parents",17,[16,[21,' +
ds.put(Chunk.fromString('t [22,[19,"Commit",["value",13,false,"parents",17,[16,[21,' +
'"sha1-0000000000000000000000000000000000000000","0"]],false],[]],[]]')); // datas package
ms.put(Chunk.fromString('t [21,"sha1-4da2f91cdbba5a7c91b383091da45e55e16d2152","0",4,"1",' +
ds.put(Chunk.fromString('t [21,"sha1-4da2f91cdbba5a7c91b383091da45e55e16d2152","0",4,"1",' +
'false,[]]')); // commit
const rootMap = await readValue(root, ms);
const rootMap = await ds.readValue(root);
const counterRef = await rootMap.get('counter');
const commit = await readValue(counterRef, ms);
const commit = await ds.readValue(counterRef);
assert.strictEqual(1, await commit.get('value'));
});
test('out of line blob', async () => {
const chunk = Chunk.fromString('b hi');
const blob = await decodeNomsValue(chunk, new MemoryStore());
const blob = await decodeNomsValue(chunk, new DataStore(new MemoryStore()));
const r = await blob.getReader().read();
assert.isFalse(r.done);
assert.equal(2, r.value.byteLength);
@@ -489,7 +509,7 @@ suite('Decode', () => {
}
const chunk2 = new Chunk(data);
const blob2 = await decodeNomsValue(chunk2, new MemoryStore());
const blob2 = await decodeNomsValue(chunk2, new DataStore(new MemoryStore()));
const r2 = await blob2.getReader().read();
assert.isFalse(r2.done);
assert.equal(bytes.length, r2.value.length);
@@ -498,12 +518,13 @@ suite('Decode', () => {
test('inline blob', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const a = [
Kind.List, Kind.Blob, false,
[false, encodeBase64(stringToUint8Array('hello')),
false, encodeBase64(stringToUint8Array('world'))],
];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v: NomsList<NomsBlob> = await r.readTopLevelValue();
invariant(v instanceof NomsList);
@@ -517,12 +538,13 @@ suite('Decode', () => {
test('compound blob', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const r1 = writeValue(await newBlob(stringToUint8Array('hi')), blobType, ms);
const r2 = writeValue(await newBlob(stringToUint8Array('world')), blobType, ms);
const r1 = writeValue(await newBlob(stringToUint8Array('hi')), blobType, ds);
const r2 = writeValue(await newBlob(stringToUint8Array('world')), blobType, ds);
const a = [Kind.Blob, true, [r1.ref.toString(), '2', r2.ref.toString(), '5']];
const r = new JsonArrayReader(a, ms);
const r = new JsonArrayReader(a, ds);
const v: NomsBlob = await r.readTopLevelValue();
invariant(v instanceof NomsBlob);

View File

@@ -4,7 +4,7 @@ import {NomsBlob, BlobLeafSequence} from './blob.js';
import Chunk from './chunk.js';
import Ref from './ref.js';
import Struct from './struct.js';
import type {ChunkStore} from './chunk-store.js';
import type {DataStore} from './data-store.js';
import type {NomsKind} from './noms-kind.js';
import {decode as decodeBase64} from './base64.js';
import {
@@ -24,7 +24,6 @@ import {isPrimitiveKind, Kind} from './noms-kind.js';
import {ListLeafSequence, NomsList} from './list.js';
import {lookupPackage, Package, readPackage} from './package.js';
import {NomsMap, MapLeafSequence} from './map.js';
import {setDecodeNomsValue} from './read-value.js';
import {NomsSet, SetLeafSequence} from './set.js';
import {IndexedMetaSequence} from './meta-sequence.js';
@@ -42,12 +41,12 @@ class UnresolvedPackage {
export class JsonArrayReader {
_a: Array<any>;
_i: number;
_cs: ChunkStore;
_ds: DataStore;
constructor(a: Array<any>, cs: ChunkStore) {
constructor(a: Array<any>, ds: DataStore) {
this._a = a;
this._i = 0;
this._cs = cs;
this._ds = ds;
}
read(): any {
@@ -142,7 +141,7 @@ export class JsonArrayReader {
readBlobLeafSequence(): BlobLeafSequence {
const bytes = decodeBase64(this.readString());
return new BlobLeafSequence(this._cs, bytes);
return new BlobLeafSequence(this._ds, bytes);
}
readSequence(t: Type, pkg: ?Package): Array<any> {
@@ -158,12 +157,12 @@ export class JsonArrayReader {
readListLeafSequence(t: Type, pkg: ?Package): ListLeafSequence {
const seq = this.readSequence(t, pkg);
return new ListLeafSequence(this._cs, t, seq);
return new ListLeafSequence(this._ds, t, seq);
}
readSetLeafSequence(t: Type, pkg: ?Package): SetLeafSequence {
const seq = this.readSequence(t, pkg);
return new SetLeafSequence(this._cs, t, seq);
return new SetLeafSequence(this._ds, t, seq);
}
readMapLeafSequence(t: Type, pkg: ?Package): MapLeafSequence {
@@ -176,7 +175,7 @@ export class JsonArrayReader {
entries.push({key: k, value: v});
}
return new MapLeafSequence(this._cs, t, entries);
return new MapLeafSequence(this._ds, t, entries);
}
readEnum(): number {
@@ -192,17 +191,17 @@ export class JsonArrayReader {
data.push(new MetaTuple(ref, v));
}
return newMetaSequenceFromData(this._cs, t, data);
return newMetaSequenceFromData(this._ds, t, data);
}
readPackage(t: Type, pkg: ?Package): Package {
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const types = [];
while (!r2.atEnd()) {
types.push(r2.readTypeAsValue(pkg));
}
const r3 = new JsonArrayReader(this.readArray(), this._cs);
const r3 = new JsonArrayReader(this.readArray(), this._ds);
const deps = [];
while (!r3.atEnd()) {
deps.push(r3.readRef());
@@ -222,7 +221,7 @@ export class JsonArrayReader {
resolve(v);
} catch (ex) {
if (ex instanceof UnresolvedPackage) {
readPackage(ex.pkgRef, this._cs).then(() => {
readPackage(ex.pkgRef, this._ds).then(() => {
this._i = i;
doRead();
});
@@ -243,7 +242,7 @@ export class JsonArrayReader {
const isMeta = this.readBool();
let sequence;
if (isMeta) {
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
sequence = r2.readMetaSequence(t, pkg);
invariant(sequence instanceof IndexedMetaSequence);
} else {
@@ -274,7 +273,7 @@ export class JsonArrayReader {
}
case Kind.List: {
const isMeta = this.readBool();
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const sequence = isMeta ?
r2.readMetaSequence(t, pkg) :
r2.readListLeafSequence(t, pkg);
@@ -282,7 +281,7 @@ export class JsonArrayReader {
}
case Kind.Map: {
const isMeta = this.readBool();
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const sequence = isMeta ?
r2.readMetaSequence(t, pkg) :
r2.readMapLeafSequence(t, pkg);
@@ -296,7 +295,7 @@ export class JsonArrayReader {
return this.readRef();
case Kind.Set: {
const isMeta = this.readBool();
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const sequence = isMeta ?
r2.readMetaSequence(t, pkg) :
r2.readSetLeafSequence(t, pkg);
@@ -341,7 +340,7 @@ export class JsonArrayReader {
switch (k) {
case Kind.Enum:
const name = this.readString();
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const ids = [];
while (!r2.atEnd()) {
ids.push(r2.readString());
@@ -351,7 +350,7 @@ export class JsonArrayReader {
case Kind.Map:
case Kind.Ref:
case Kind.Set: {
const r2 = new JsonArrayReader(this.readArray(), this._cs);
const r2 = new JsonArrayReader(this.readArray(), this._ds);
const elemTypes: Array<Type> = [];
while (!r2.atEnd()) {
elemTypes.push(r2.readTypeAsValue());
@@ -363,7 +362,7 @@ export class JsonArrayReader {
const name = this.readString();
const readFields = () => {
const fields: Array<Field> = [];
const fieldReader = new JsonArrayReader(this.readArray(), this._cs);
const fieldReader = new JsonArrayReader(this.readArray(), this._ds);
while (!fieldReader.atEnd()) {
const fieldName = fieldReader.readString();
const fieldType = fieldReader.readTypeAsValue(pkg);
@@ -428,21 +427,19 @@ export class JsonArrayReader {
}
}
export function decodeNomsValue(chunk: Chunk, cs: ChunkStore): Promise<any> {
export function decodeNomsValue(chunk: Chunk, ds: DataStore): Promise<any> {
const tag = new Chunk(new Uint8Array(chunk.data.buffer, 0, 2)).toString();
switch (tag) {
case typedTag: {
const payload = JSON.parse(new Chunk(new Uint8Array(chunk.data.buffer, 2)).toString());
const reader = new JsonArrayReader(payload, cs);
const reader = new JsonArrayReader(payload, ds);
return reader.readTopLevelValue();
}
case blobTag:
return Promise.resolve(
new NomsBlob(new BlobLeafSequence(cs, new Uint8Array(chunk.data.buffer, 2))));
new NomsBlob(new BlobLeafSequence(ds, new Uint8Array(chunk.data.buffer, 2))));
default:
throw new Error('Not implemented');
}
}
setDecodeNomsValue(decodeNomsValue); // TODO: Avoid cyclic badness with commonjs.

View File

@@ -19,12 +19,14 @@ import {NomsSet, SetLeafSequence} from './set.js';
import {Package, registerPackage} from './package.js';
import {writeValue} from './encode.js';
import {newBlob} from './blob.js';
import {DataStore} from './data-store.js';
suite('Encode', () => {
test('write primitives', () => {
function f(k: NomsKind, v: any, ex: any) {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
w.writeTopLevel(makePrimitiveType(k), v);
assert.deepEqual([k, ex], w.array);
}
@@ -53,7 +55,8 @@ suite('Encode', () => {
test('write simple blob', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const blob = await newBlob(new Uint8Array([0x00, 0x01]));
w.writeTopLevel(makePrimitiveType(Kind.Blob), blob);
assert.deepEqual([Kind.Blob, false, 'AAE='], w.array);
@@ -61,20 +64,22 @@ suite('Encode', () => {
test('write list', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, [0, 1, 2, 3]));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2, 3]));
w.writeTopLevel(tr, l);
assert.deepEqual([Kind.List, Kind.Int32, false, ['0', '1', '2', '3']], w.array);
});
test('write list of value', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Value));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, ['0', '1', '2', '3']));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['0', '1', '2', '3']));
w.writeTopLevel(tr, l);
assert.deepEqual([Kind.List, Kind.Value, false, [
Kind.String, '0',
@@ -86,13 +91,14 @@ suite('Encode', () => {
test('write list of list', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const it = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int16));
const tr = makeCompoundType(Kind.List, it);
const v = new NomsList(tr, new ListLeafSequence(ms, tr, [
new NomsList(tr, new ListLeafSequence(ms, it, [0])),
new NomsList(tr, new ListLeafSequence(ms, it, [1, 2, 3])),
const v = new NomsList(tr, new ListLeafSequence(ds, tr, [
new NomsList(tr, new ListLeafSequence(ds, it, [0])),
new NomsList(tr, new ListLeafSequence(ds, it, [1, 2, 3])),
]));
w.writeTopLevel(tr, v);
assert.deepEqual([Kind.List, Kind.List, Kind.Int16, false, [false, ['0'], false,
@@ -101,23 +107,25 @@ suite('Encode', () => {
test('write set', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Uint32));
const v = new NomsSet(tr, new SetLeafSequence(ms, tr, [0, 1, 2, 3]));
const v = new NomsSet(tr, new SetLeafSequence(ds, tr, [0, 1, 2, 3]));
w.writeTopLevel(tr, v);
assert.deepEqual([Kind.Set, Kind.Uint32, false, ['0', '1', '2', '3']], w.array);
});
test('write set of set', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const st = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Int32));
const tr = makeCompoundType(Kind.Set, st);
const v = new NomsSet(tr, new SetLeafSequence(ms, tr, [
new NomsSet(tr, new SetLeafSequence(ms, st, [0])),
new NomsSet(tr, new SetLeafSequence(ms, st, [1, 2, 3])),
const v = new NomsSet(tr, new SetLeafSequence(ds, tr, [
new NomsSet(tr, new SetLeafSequence(ds, st, [0])),
new NomsSet(tr, new SetLeafSequence(ds, st, [1, 2, 3])),
]));
w.writeTopLevel(tr, v);
@@ -127,11 +135,12 @@ suite('Encode', () => {
test('write map', async() => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Bool));
const v = new NomsMap(tr, new MapLeafSequence(ms, tr, [{key: 'a', value: false},
const v = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false},
{key:'b', value:true}]));
w.writeTopLevel(tr, v);
assert.deepEqual([Kind.Map, Kind.String, Kind.Bool, false, ['a', false, 'b', true]], w.array);
@@ -139,24 +148,25 @@ suite('Encode', () => {
test('write map of map', async() => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const kt = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Int64));
const vt = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Bool));
const tr = makeCompoundType(Kind.Map, kt, vt);
const s = new NomsSet(vt, new SetLeafSequence(ms, vt, [true]));
const m1 = new NomsMap(kt, new MapLeafSequence(ms, kt, [{key: 'a', value: 0}]));
const v = new NomsMap(kt, new MapLeafSequence(ms, tr, [{key: m1, value: s}]));
const s = new NomsSet(vt, new SetLeafSequence(ds, vt, [true]));
const m1 = new NomsMap(kt, new MapLeafSequence(ds, kt, [{key: 'a', value: 0}]));
const v = new NomsMap(kt, new MapLeafSequence(ds, tr, [{key: m1, value: s}]));
w.writeTopLevel(tr, v);
assert.deepEqual([Kind.Map, Kind.Map, Kind.String, Kind.Int64, Kind.Set, Kind.Bool, false,
[false, ['a', '0'], false, [true]]], w.array);
});
test('write empty struct', async() => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ms = new MemoryStore();const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const typeDef = makeStructType('S', [], []);
const pkg = new Package([typeDef], []);
@@ -172,7 +182,8 @@ suite('Encode', () => {
test('write struct', async() => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const typeDef = makeStructType('S', [
new Field('x', makePrimitiveType(Kind.Int8), false),
@@ -191,7 +202,8 @@ suite('Encode', () => {
test('write struct optional field', async() => {
const ms = new MemoryStore();
let w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
let w = new JsonArrayWriter(ds);
const typeDef = makeStructType('S', [
new Field('x', makePrimitiveType(Kind.Int8), true),
@@ -207,14 +219,15 @@ suite('Encode', () => {
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', true, '42', true], w.array);
v = new Struct(type, typeDef, {b: true});
w = new JsonArrayWriter(ms);
w = new JsonArrayWriter(ds);
w.writeTopLevel(type, v);
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, true], w.array);
});
test('write struct with union', async() => {
const ms = new MemoryStore();
let w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
let w = new JsonArrayWriter(ds);
const typeDef = makeStructType('S', [
new Field('x', makePrimitiveType(Kind.Int8), false),
@@ -232,14 +245,15 @@ suite('Encode', () => {
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '1', 'hi'], w.array);
v = new Struct(type, typeDef, {x: 42, b: true});
w = new JsonArrayWriter(ms);
w = new JsonArrayWriter(ds);
w.writeTopLevel(type, v);
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '0', true], w.array);
});
test('write struct with list', async() => {
const ms = new MemoryStore();
let w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
let w = new JsonArrayWriter(ds);
const ltr = makeCompoundType(Kind.List, makePrimitiveType(Kind.String));
const typeDef = makeStructType('S', [
@@ -251,19 +265,20 @@ suite('Encode', () => {
const type = makeType(pkgRef, 0);
let v = new Struct(type, typeDef, {l: new NomsList(ltr,
new ListLeafSequence(ms, ltr, ['a', 'b']))});
new ListLeafSequence(ds, ltr, ['a', 'b']))});
w.writeTopLevel(type, v);
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, ['a', 'b']], w.array);
v = new Struct(type, typeDef, {l: new NomsList(ltr, new ListLeafSequence(ms, ltr, []))});
w = new JsonArrayWriter(ms);
v = new Struct(type, typeDef, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, []))});
w = new JsonArrayWriter(ds);
w.writeTopLevel(type, v);
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, []], w.array);
});
test('write struct with struct', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const s2TypeDef = makeStructType('S2', [
new Field('x', makePrimitiveType(Kind.Int32), false),
@@ -285,7 +300,8 @@ suite('Encode', () => {
test('write enum', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const pkg = new Package([makeEnumType('E', ['a', 'b', 'c'])], []);
registerPackage(pkg);
@@ -298,14 +314,15 @@ suite('Encode', () => {
test('write list of enum', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const pkg = new Package([makeEnumType('E', ['a', 'b', 'c'])], []);
registerPackage(pkg);
const pkgRef = pkg.ref;
const typ = makeType(pkgRef, 0);
const listType = makeCompoundType(Kind.List, typ);
const l = new NomsList(listType, new ListLeafSequence(ms, listType, [0, 1, 2]));
const l = new NomsList(listType, new ListLeafSequence(ds, listType, [0, 1, 2]));
w.writeTopLevel(listType, l);
assert.deepEqual([Kind.List, Kind.Unresolved, pkgRef.toString(), '0', false, ['0', '1', '2']],
@@ -314,18 +331,19 @@ suite('Encode', () => {
test('write compound list', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const ltr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const r1 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [0, 1])), ltr, ms);
const r2 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [2, 3])), ltr, ms);
const r3 = writeValue(new NomsList(ltr, new ListLeafSequence(ms, ltr, [4, 5])), ltr, ms);
const r1 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1])), ltr, ds);
const r2 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3])), ltr, ds);
const r3 = writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5])), ltr, ds);
const tuples = [
new MetaTuple(r1, 2),
new MetaTuple(r2, 4),
new MetaTuple(r3, 6),
];
const l = new NomsList(ltr, new IndexedMetaSequence(ms, ltr, tuples));
const l = new NomsList(ltr, new IndexedMetaSequence(ds, ltr, tuples));
w.writeTopLevel(ltr, l);
assert.deepEqual([Kind.List, Kind.Int32, true, [r1.toString(), '2', r2.toString(), '4',
@@ -334,9 +352,10 @@ suite('Encode', () => {
test('write type value', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const test = (expected: Array<any>, v: Type) => {
const w = new JsonArrayWriter(ms);
const w = new JsonArrayWriter(ds);
w.writeTopLevel(v.type, v);
assert.deepEqual(expected, w.array);
};
@@ -382,9 +401,10 @@ suite('Encode', () => {
}
const ms = new MemoryStore();
const ds = new DataStore(ms);
const blob = await newBlob(stringToUint8Array('hi'));
const chunk = encodeNomsValue(blob, makePrimitiveType(Kind.Blob), ms);
const chunk = encodeNomsValue(blob, makePrimitiveType(Kind.Blob), ds);
assert.equal(4, chunk.data.length);
assert.deepEqual(stringToUint8Array('b hi'), chunk.data);
@@ -398,14 +418,15 @@ suite('Encode', () => {
view.setUint8(2 + i, i);
}
const blob2 = await newBlob(bytes);
const chunk2 = encodeNomsValue(blob2, makePrimitiveType(Kind.Blob), ms);
const chunk2 = encodeNomsValue(blob2, makePrimitiveType(Kind.Blob), ds);
assert.equal(buffer2.byteLength, chunk2.data.buffer.byteLength);
assert.deepEqual(buffer2, chunk2.data.buffer);
});
test('write ref', async () => {
const ms = new MemoryStore();
const w = new JsonArrayWriter(ms);
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const ref = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567');
const t = makeCompoundType(Kind.Ref, makePrimitiveType(Kind.Blob));
w.writeTopLevel(t, ref);

View File

@@ -3,7 +3,7 @@
import Chunk from './chunk.js';
import Ref from './ref.js';
import Struct from './struct.js';
import type {ChunkStore} from './chunk-store.js';
import type {DataStore} from './data-store.js';
import type {NomsKind} from './noms-kind.js';
import {encode as encodeBase64} from './base64.js';
import {boolType, EnumDesc, makePrimitiveType, stringType, StructDesc, Type} from './type.js';
@@ -22,11 +22,11 @@ const typedTag = 't ';
class JsonArrayWriter {
array: Array<any>;
_cs: ?ChunkStore;
_ds: ?DataStore;
constructor(cs: ?ChunkStore) {
constructor(ds: ?DataStore) {
this.array = [];
this._cs = cs;
this._ds = ds;
}
write(v: any) {
@@ -78,8 +78,8 @@ class JsonArrayWriter {
this.writeInt(t.ordinal);
const pkg = lookupPackage(pkgRef);
if (pkg && this._cs) {
writeValue(pkg, pkg.type, this._cs);
if (pkg && this._ds) {
writeValue(pkg, pkg.type, this._ds);
}
break;
}
@@ -98,14 +98,14 @@ class JsonArrayWriter {
}
this.write(true);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
const indexType = indexTypeForMetaSequence(t);
for (let i = 0; i < v.items.length; i++) {
const tuple = v.items[i];
invariant(tuple instanceof MetaTuple);
if (tuple.sequence && this._cs) {
if (tuple.sequence && this._ds) {
const child = tuple.sequence;
writeValue(child, child.type, this._cs);
writeValue(child, child.type, this._ds);
}
w2.writeRef(tuple.ref);
w2.writeValue(tuple.value, indexType, pkg);
@@ -154,7 +154,7 @@ class JsonArrayWriter {
}
invariant(sequence instanceof ListLeafSequence);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
const elemType = t.elemTypes[0];
sequence.items.forEach(sv => w2.writeValue(sv, elemType, pkg));
this.write(w2.array);
@@ -169,7 +169,7 @@ class JsonArrayWriter {
}
invariant(sequence instanceof MapLeafSequence);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
const keyType = t.elemTypes[0];
const valueType = t.elemTypes[1];
sequence.items.forEach(entry => {
@@ -182,10 +182,10 @@ class JsonArrayWriter {
case Kind.Package: {
invariant(v instanceof Package);
const ptr = makePrimitiveType(Kind.Type);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
v.types.forEach(type => w2.writeValue(type, ptr, pkg));
this.write(w2.array);
const w3 = new JsonArrayWriter(this._cs);
const w3 = new JsonArrayWriter(this._ds);
v.dependencies.forEach(ref => w3.writeRef(ref));
this.write(w3.array);
break;
@@ -206,7 +206,7 @@ class JsonArrayWriter {
}
invariant(sequence instanceof SetLeafSequence);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
const elemType = t.elemTypes[0];
const elems = [];
sequence.items.forEach(v => {
@@ -248,7 +248,7 @@ class JsonArrayWriter {
const desc = t.desc;
invariant(desc instanceof EnumDesc);
this.write(t.name);
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
for (let i = 0; i < desc.ids.length; i++) {
w2.write(desc.ids[i]);
}
@@ -258,7 +258,7 @@ class JsonArrayWriter {
case Kind.Map:
case Kind.Ref:
case Kind.Set: {
const w2 = new JsonArrayWriter(this._cs);
const w2 = new JsonArrayWriter(this._ds);
t.elemTypes.forEach(elem => w2.writeTypeAsValue(elem));
this.write(w2.array);
break;
@@ -267,14 +267,14 @@ class JsonArrayWriter {
const desc = t.desc;
invariant(desc instanceof StructDesc);
this.write(t.name);
const fieldWriter = new JsonArrayWriter(this._cs);
const fieldWriter = new JsonArrayWriter(this._ds);
desc.fields.forEach(field => {
fieldWriter.write(field.name);
fieldWriter.writeTypeAsValue(field.t);
fieldWriter.write(field.optional);
});
this.write(fieldWriter.array);
const choiceWriter = new JsonArrayWriter(this._cs);
const choiceWriter = new JsonArrayWriter(this._ds);
desc.union.forEach(choice => {
choiceWriter.write(choice.name);
choiceWriter.writeTypeAsValue(choice.t);
@@ -294,8 +294,8 @@ class JsonArrayWriter {
}
const pkg = lookupPackage(pkgRef);
if (pkg && this._cs) {
writeValue(pkg, pkg.type, this._cs);
if (pkg && this._ds) {
writeValue(pkg, pkg.type, this._ds);
}
break;
@@ -376,14 +376,14 @@ function getTypeOfValue(v: any): Type {
}
}
function encodeEmbeddedNomsValue(v: any, t: Type, cs: ?ChunkStore): Chunk {
function encodeEmbeddedNomsValue(v: any, t: Type, ds: ?DataStore): Chunk {
if (v instanceof Package) {
// if (v.dependencies.length > 0) {
// throw new Error('Not implemented');
// }
}
const w = new JsonArrayWriter(cs);
const w = new JsonArrayWriter(ds);
w.writeTopLevel(t, v);
return Chunk.fromString(typedTag + JSON.stringify(w.array));
}
@@ -401,7 +401,7 @@ function encodeTopLevelBlob(sequence: BlobLeafSequence): Chunk {
return new Chunk(data);
}
function encodeNomsValue(v: any, t: Type, cs: ?ChunkStore): Chunk {
export function encodeNomsValue(v: any, t: Type, ds: ?DataStore): Chunk {
if (t.kind === Kind.Blob) {
invariant(v instanceof NomsBlob || v instanceof Sequence);
const sequence: BlobLeafSequence = v instanceof NomsBlob ? v.sequence : v;
@@ -409,16 +409,16 @@ function encodeNomsValue(v: any, t: Type, cs: ?ChunkStore): Chunk {
return encodeTopLevelBlob(sequence);
}
}
return encodeEmbeddedNomsValue(v, t, cs);
return encodeEmbeddedNomsValue(v, t, ds);
}
function writeValue(v: any, t: Type, cs: ChunkStore): Ref {
const chunk = encodeNomsValue(v, t, cs);
export function writeValue(v: any, t: Type, ds: DataStore): Ref {
const chunk = encodeNomsValue(v, t, ds);
invariant(!chunk.isEmpty());
cs.put(chunk);
ds.put(chunk);
return chunk.ref;
}
export {encodeNomsValue, JsonArrayWriter, writeValue};
export {JsonArrayWriter};
setEncodeNomsValue(encodeNomsValue);

View File

@@ -2,11 +2,11 @@
import Chunk from './chunk.js';
import Ref from './ref.js';
import type {ChunkStore} from './chunk-store.js';
import {invariant, notNull} from './assert.js';
import {Type} from './type.js';
import type {DataStore} from './data-store.js';
type encodeFn = (v: any, t: Type, cs: ?ChunkStore) => Chunk;
type encodeFn = (v: any, t: Type, ds: ?DataStore) => Chunk;
let encodeNomsValue: ?encodeFn = null;
export function getRefOfValueOrPrimitive(v: any, t: ?Type): Ref {

View File

@@ -12,8 +12,8 @@ import {invariant} from './assert.js';
import {Kind} from './noms-kind.js';
import {ListLeafSequence, newList, NomsList} from './list.js';
import {makeCompoundType, makePrimitiveType} from './type.js';
import {readValue} from './read-value.js';
import {writeValue} from './encode.js';
import {DataStore} from './data-store.js';
const testListSize = 5000;
const listOfNRef = 'sha1-11e947e8aacfda8e9052bb57e661da442b26c625';
@@ -125,12 +125,13 @@ suite('BuildList', () => {
test('write, read, modify, read', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const nums = firstNNumbers(testListSize);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int64));
const s = await newList(nums, tr);
const r = writeValue(s, tr, ms);
const s2 = await readValue(r, ms);
const r = writeValue(s, tr, ds);
const s2 = await ds.readValue(r);
const outNums = await s2.toJS();
assert.deepEqual(nums, outNums);
@@ -145,16 +146,18 @@ suite('BuildList', () => {
suite('ListLeafSequence', () => {
test('isEmpty', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.String));
const newList = items => new NomsList(tr, new ListLeafSequence(ms, tr, items));
const newList = items => new NomsList(tr, new ListLeafSequence(ds, tr, items));
assert.isTrue(newList([]).isEmpty());
assert.isFalse(newList(['z', 'x', 'a', 'b']).isEmpty());
});
test('get', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.String));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, ['z', 'x', 'a', 'b']));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['z', 'x', 'a', 'b']));
assert.strictEqual('z', await l.get(0));
assert.strictEqual('x', await l.get(1));
assert.strictEqual('a', await l.get(2));
@@ -163,8 +166,9 @@ suite('ListLeafSequence', () => {
test('forEach', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const l = new NomsList(tr, new ListLeafSequence(ms, tr, [4, 2, 10, 16]));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [4, 2, 10, 16]));
const values = [];
await l.forEach((v, i) => { values.push(v, i); });
@@ -173,10 +177,11 @@ suite('ListLeafSequence', () => {
test('iterator', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const test = async items => {
const l = new NomsList(tr, new ListLeafSequence(ms, tr, items));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, items));
assert.deepEqual(items, await flatten(l.iterator()));
assert.deepEqual(items, await flattenParallel(l.iterator(), items.length));
};
@@ -188,10 +193,11 @@ suite('ListLeafSequence', () => {
test('iteratorAt', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Int32));
const test = async items => {
const l = new NomsList(tr, new ListLeafSequence(ms, tr, items));
const l = new NomsList(tr, new ListLeafSequence(ds, tr, items));
for (let i = 0; i <= items.length; i++) {
const slice = items.slice(i);
assert.deepEqual(slice, await flatten(l.iteratorAt(i)));
@@ -206,12 +212,13 @@ suite('ListLeafSequence', () => {
test('chunks', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.Value));
const st = makePrimitiveType(Kind.String);
const r1 = writeValue('x', st, ms);
const r2 = writeValue('a', st, ms);
const r3 = writeValue('b', st, ms);
const l = new NomsList(tr, new ListLeafSequence(ms, tr, ['z', r1, r2, r3]));
const r1 = writeValue('x', st, ds);
const r2 = writeValue('a', st, ds);
const r3 = writeValue('b', st, ds);
const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['z', r1, r2, r3]));
assert.strictEqual(3, l.chunks.length);
assert.isTrue(r1.equals(l.chunks[0]));
assert.isTrue(r2.equals(l.chunks[1]));
@@ -222,24 +229,25 @@ suite('ListLeafSequence', () => {
suite('CompoundList', () => {
function build(): NomsList {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, makePrimitiveType(Kind.String));
const l1 = new NomsList(tr, new ListLeafSequence(ms, tr, ['a', 'b']));
const r1 = writeValue(l1, tr, ms);
const l2 = new NomsList(tr, new ListLeafSequence(ms, tr, ['e', 'f']));
const r2 = writeValue(l2, tr, ms);
const l3 = new NomsList(tr, new ListLeafSequence(ms, tr, ['h', 'i']));
const r3 = writeValue(l3, tr, ms);
const l4 = new NomsList(tr, new ListLeafSequence(ms, tr, ['m', 'n']));
const r4 = writeValue(l4, tr, ms);
const l1 = new NomsList(tr, new ListLeafSequence(ds, tr, ['a', 'b']));
const r1 = writeValue(l1, tr, ds);
const l2 = new NomsList(tr, new ListLeafSequence(ds, tr, ['e', 'f']));
const r2 = writeValue(l2, tr, ds);
const l3 = new NomsList(tr, new ListLeafSequence(ds, tr, ['h', 'i']));
const r3 = writeValue(l3, tr, ds);
const l4 = new NomsList(tr, new ListLeafSequence(ds, tr, ['m', 'n']));
const r4 = writeValue(l4, tr, ds);
const m1 = new NomsList(tr, new IndexedMetaSequence(ms, tr, [new MetaTuple(r1, 2),
const m1 = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(r1, 2),
new MetaTuple(r2, 2)]));
const rm1 = writeValue(m1, tr, ms);
const m2 = new NomsList(tr, new IndexedMetaSequence(ms, tr, [new MetaTuple(r3, 2),
const rm1 = writeValue(m1, tr, ds);
const m2 = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(r3, 2),
new MetaTuple(r4, 2)]));
const rm2 = writeValue(m2, tr, ms);
const rm2 = writeValue(m2, tr, ds);
const l = new NomsList(tr, new IndexedMetaSequence(ms, tr, [new MetaTuple(rm1, 4),
const l = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(rm1, 4),
new MetaTuple(rm2, 4)]));
return l;
}

View File

@@ -2,7 +2,7 @@
import BuzHashBoundaryChecker from './buzhash-boundary-checker.js';
import type {BoundaryChecker, makeChunkFn} from './sequence-chunker.js';
import type {ChunkStore} from './chunk-store.js';
import type {DataStore} from './data-store.js';
import type {Splice} from './edit-distance.js';
import type {valueOrPrimitive} from './value.js'; // eslint-disable-line no-unused-vars
import {AsyncIterator} from './async-iterator.js';
@@ -19,9 +19,9 @@ import {Type} from './type.js';
const listWindowSize = 64;
const listPattern = ((1 << 6) | 0) - 1;
function newListLeafChunkFn<T: valueOrPrimitive>(t: Type, cs: ?ChunkStore = null): makeChunkFn {
function newListLeafChunkFn<T: valueOrPrimitive>(t: Type, ds: ?DataStore = null): makeChunkFn {
return (items: Array<T>) => {
const listLeaf = new ListLeafSequence(cs, t, items);
const listLeaf = new ListLeafSequence(ds, t, items);
const mt = new MetaTuple(listLeaf, items.length);
return [mt, listLeaf];
};
@@ -51,10 +51,10 @@ export class NomsList<T: valueOrPrimitive> extends Collection<IndexedSequence> {
async splice(idx: number, insert: Array<T>, remove: number): Promise<NomsList<T>> {
const cursor = await this.sequence.newCursorAt(idx);
const cs = this.sequence.cs;
const ds = this.sequence.ds;
const type = this.type;
const seq = await chunkSequence(cursor, insert, remove, newListLeafChunkFn(type, cs),
newIndexedMetaSequenceChunkFn(type, cs),
const seq = await chunkSequence(cursor, insert, remove, newListLeafChunkFn(type, ds),
newIndexedMetaSequenceChunkFn(type, ds),
newListLeafBoundaryChecker(type),
newIndexedMetaSequenceBoundaryChecker);
invariant(seq instanceof IndexedSequence);

View File

@@ -5,16 +5,14 @@ import {suite} from 'mocha';
import MemoryStore from './memory-store.js';
import test from './async-test.js';
import type {ChunkStore} from './chunk-store.js';
import {invariant} from './assert.js';
import {Kind} from './noms-kind.js';
import {flatten, flattenParallel} from './test-util.js';
import {makeCompoundType, makePrimitiveType} from './type.js';
import {MapLeafSequence, newMap, NomsMap} from './map.js';
import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
import {readValue} from './read-value.js';
import {writeValue} from './encode.js';
import {DataStore} from './data-store';
const testMapSize = 5000;
const mapOfNRef = 'sha1-1b9664e55091370996f3af428ffee78f1ad36426';
@@ -93,6 +91,7 @@ suite('BuildMap', () => {
test('write, read, modify, read', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const kvs = [];
for (let i = 0; i < testMapSize; i++) {
@@ -103,8 +102,8 @@ suite('BuildMap', () => {
makePrimitiveType(Kind.Int64));
const m = await newMap(kvs, tr);
const r = writeValue(m, tr, ms);
const m2 = await readValue(r, ms);
const r = writeValue(m, tr, ds);
const m2 = await ds.readValue(r);
const outKvs = [];
await m2.forEach((v, k) => outKvs.push(k, v));
assert.deepEqual(kvs, outKvs);
@@ -121,19 +120,21 @@ suite('BuildMap', () => {
suite('MapLeaf', () => {
test('isEmpty', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Bool));
const newMap = entries => new NomsMap(tr, new MapLeafSequence(ms, tr, entries));
const newMap = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
assert.isTrue(newMap([]).isEmpty());
assert.isFalse(newMap([{key: 'a', value: false}, {key:'k', value:true}]).isEmpty());
});
test('has', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Bool));
const m = new NomsMap(tr,
new MapLeafSequence(ms, tr, [{key: 'a', value: false}, {key:'k', value:true}]));
new MapLeafSequence(ds, tr, [{key: 'a', value: false}, {key:'k', value:true}]));
assert.isTrue(await m.has('a'));
assert.isFalse(await m.has('b'));
assert.isTrue(await m.has('k'));
@@ -142,10 +143,11 @@ suite('MapLeaf', () => {
test('first/last/get', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Int32));
const m = new NomsMap(tr,
new MapLeafSequence(ms, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
assert.deepEqual(['a', 4], await m.first());
assert.deepEqual(['k', 8], await m.last());
@@ -158,10 +160,11 @@ suite('MapLeaf', () => {
test('forEach', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Int32));
const m = new NomsMap(tr,
new MapLeafSequence(ms, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
const kv = [];
await m.forEach((v, k) => { kv.push(k, v); });
@@ -170,11 +173,12 @@ suite('MapLeaf', () => {
test('iterator', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Int32));
const test = async entries => {
const m = new NomsMap(tr, new MapLeafSequence(ms, tr, entries));
const m = new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
assert.deepEqual(entries, await flatten(m.iterator()));
assert.deepEqual(entries, await flattenParallel(m.iterator(), entries.length));
};
@@ -186,9 +190,10 @@ suite('MapLeaf', () => {
test('iteratorAt', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Int32));
const build = entries => new NomsMap(tr, new MapLeafSequence(ms, tr, entries));
const build = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
assert.deepEqual([], await flatten(build([]).iteratorAt('a')));
@@ -211,15 +216,16 @@ suite('MapLeaf', () => {
test('chunks', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map,
makePrimitiveType(Kind.Value), makePrimitiveType(Kind.Value));
const st = makePrimitiveType(Kind.String);
const r1 = writeValue('x', st, ms);
const r2 = writeValue('a', st, ms);
const r3 = writeValue('b', st, ms);
const r4 = writeValue('c', st, ms);
const r1 = writeValue('x', st, ds);
const r2 = writeValue('a', st, ds);
const r3 = writeValue('b', st, ds);
const r4 = writeValue('c', st, ds);
const m = new NomsMap(tr,
new MapLeafSequence(ms, tr, [{key: r1, value: r2}, {key: r3, value: r4}]));
new MapLeafSequence(ds, tr, [{key: r1, value: r2}, {key: r3, value: r4}]));
assert.strictEqual(4, m.chunks.length);
assert.isTrue(r1.equals(m.chunks[0]));
assert.isTrue(r2.equals(m.chunks[1]));
@@ -229,43 +235,45 @@ suite('MapLeaf', () => {
});
suite('CompoundMap', () => {
function build(cs: ChunkStore): Array<NomsMap> {
function build(ds: DataStore): Array<NomsMap> {
const tr = makeCompoundType(Kind.Map, makePrimitiveType(Kind.String),
makePrimitiveType(Kind.Bool));
const l1 = new NomsMap(tr, new MapLeafSequence(cs, tr, [{key: 'a', value: false},
const l1 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false},
{key:'b', value:false}]));
const r1 = writeValue(l1, tr, cs);
const l2 = new NomsMap(tr, new MapLeafSequence(cs, tr, [{key: 'e', value: true},
const r1 = writeValue(l1, tr, ds);
const l2 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'e', value: true},
{key:'f', value:true}]));
const r2 = writeValue(l2, tr, cs);
const l3 = new NomsMap(tr, new MapLeafSequence(cs, tr, [{key: 'h', value: false},
const r2 = writeValue(l2, tr, ds);
const l3 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'h', value: false},
{key:'i', value:true}]));
const r3 = writeValue(l3, tr, cs);
const l4 = new NomsMap(tr, new MapLeafSequence(cs, tr, [{key: 'm', value: true},
const r3 = writeValue(l3, tr, ds);
const l4 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'm', value: true},
{key:'n', value:false}]));
const r4 = writeValue(l4, tr, cs);
const r4 = writeValue(l4, tr, ds);
const m1 = new NomsMap(tr, new OrderedMetaSequence(cs, tr, [new MetaTuple(r1, 'b'),
const m1 = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(r1, 'b'),
new MetaTuple(r2, 'f')]));
const rm1 = writeValue(m1, tr, cs);
const m2 = new NomsMap(tr, new OrderedMetaSequence(cs, tr, [new MetaTuple(r3, 'i'),
const rm1 = writeValue(m1, tr, ds);
const m2 = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(r3, 'i'),
new MetaTuple(r4, 'n')]));
const rm2 = writeValue(m2, tr, cs);
const rm2 = writeValue(m2, tr, ds);
const c = new NomsMap(tr, new OrderedMetaSequence(cs, tr, [new MetaTuple(rm1, 'f'),
const c = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(rm1, 'f'),
new MetaTuple(rm2, 'n')]));
return [c, m1, m2];
}
test('isEmpty', () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
assert.isFalse(c.isEmpty());
});
test('get', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
assert.strictEqual(false, await c.get('a'));
assert.strictEqual(false, await c.get('b'));
@@ -285,7 +293,8 @@ suite('CompoundMap', () => {
test('first/last/has', async () => {
const ms = new MemoryStore();
const [c, m1, m2] = build(ms);
const ds = new DataStore(ms);
const [c, m1, m2] = build(ds);
assert.deepEqual(['a', false], await c.first());
assert.deepEqual(['n', false], await c.last());
@@ -312,7 +321,8 @@ suite('CompoundMap', () => {
test('forEach', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
const kv = [];
await c.forEach((v, k) => { kv.push(k, v); });
@@ -322,7 +332,8 @@ suite('CompoundMap', () => {
test('iterator', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
const expected = [{key: 'a', value: false}, {key: 'b', value: false}, {key: 'e', value: true},
{key: 'f', value: true}, {key: 'h', value: false}, {key: 'i', value: true},
{key: 'm', value: true}, {key: 'n', value: false}];
@@ -332,7 +343,8 @@ suite('CompoundMap', () => {
test('iteratorAt', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
const entries = [{key: 'a', value: false}, {key: 'b', value: false}, {key: 'e', value: true},
{key: 'f', value: true}, {key: 'h', value: false}, {key: 'i', value: true},
{key: 'm', value: true}, {key: 'n', value: false}];
@@ -356,7 +368,8 @@ suite('CompoundMap', () => {
test('iterator return', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
const iter = c.iterator();
const values = [];
for (let res = await iter.next(); !res.done; res = await iter.next()) {
@@ -372,7 +385,8 @@ suite('CompoundMap', () => {
test('iterator return parallel', async () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
const iter = c.iterator();
const values = await Promise.all([iter.next(), iter.next(), iter.return(), iter.next()]);
assert.deepEqual([{done: false, value: {key: 'a', value: false}},
@@ -383,7 +397,8 @@ suite('CompoundMap', () => {
test('chunks', () => {
const ms = new MemoryStore();
const [c] = build(ms);
const ds = new DataStore(ms);
const [c] = build(ds);
assert.strictEqual(2, c.chunks.length);
});
});

View File

@@ -2,7 +2,6 @@
import BuzHashBoundaryChecker from './buzhash-boundary-checker.js';
import type {BoundaryChecker, makeChunkFn} from './sequence-chunker.js';
import type {ChunkStore} from './chunk-store.js';
import type {valueOrPrimitive} from './value.js'; // eslint-disable-line no-unused-vars
import {AsyncIterator} from './async-iterator.js';
import {chunkSequence} from './sequence-chunker.js';
@@ -18,6 +17,7 @@ import {MetaTuple, newOrderedMetaSequenceBoundaryChecker,
import {OrderedSequence, OrderedSequenceCursor,
OrderedSequenceIterator} from './ordered-sequence.js';
import {Type} from './type.js';
import type {DataStore} from './data-store.js';
export type MapEntry<K: valueOrPrimitive, V: valueOrPrimitive> = {
key: K,
@@ -27,9 +27,9 @@ export type MapEntry<K: valueOrPrimitive, V: valueOrPrimitive> = {
const mapWindowSize = 1;
const mapPattern = ((1 << 6) | 0) - 1;
function newMapLeafChunkFn(t: Type, cs: ?ChunkStore = null): makeChunkFn {
function newMapLeafChunkFn(t: Type, ds: ?DataStore = null): makeChunkFn {
return (items: Array<MapEntry>) => {
const mapLeaf = new MapLeafSequence(cs, t, items);
const mapLeaf = new MapLeafSequence(ds, t, items);
let indexValue: ?(MapEntry | Ref) = null;
if (items.length > 0) {
@@ -145,9 +145,9 @@ export class NomsMap<K: valueOrPrimitive, V: valueOrPrimitive> extends Collectio
async _splice(cursor: OrderedSequenceCursor, insert: Array<MapEntry>, remove: number):
Promise<NomsMap<K, V>> {
const type = this.type;
const cs = this.sequence.cs;
const seq = await chunkSequence(cursor, insert, remove, newMapLeafChunkFn(type, cs),
newOrderedMetaSequenceChunkFn(type, cs),
const ds = this.sequence.ds;
const seq = await chunkSequence(cursor, insert, remove, newMapLeafChunkFn(type, ds),
newOrderedMetaSequenceChunkFn(type, ds),
newMapLeafBoundaryChecker(type),
newOrderedMetaSequenceBoundaryChecker);
invariant(seq instanceof OrderedSequence);

View File

@@ -3,7 +3,7 @@
import BuzHashBoundaryChecker from './buzhash-boundary-checker.js';
import {default as Ref, sha1Size} from './ref.js';
import type {BoundaryChecker, makeChunkFn} from './sequence-chunker.js';
import type {ChunkStore} from './chunk-store.js';
import type {DataStore} from './data-store.js';
import type {valueOrPrimitive} from './value.js'; // eslint-disable-line no-unused-vars
import {Collection} from './collection.js';
import {CompoundDesc, makeCompoundType, makePrimitiveType, Type} from './type.js';
@@ -11,7 +11,6 @@ import {IndexedSequence} from './indexed-sequence.js';
import {invariant} from './assert.js';
import {Kind} from './noms-kind.js';
import {OrderedSequence} from './ordered-sequence.js';
import {readValue} from './read-value.js';
import {Sequence} from './sequence.js';
export type MetaSequence = Sequence<MetaTuple>;
@@ -33,13 +32,13 @@ export class MetaTuple<K> {
return this._sequenceOrRef instanceof Sequence ? this._sequenceOrRef : null;
}
getSequence(cs: ?ChunkStore): Promise<Sequence> {
getSequence(ds: ?DataStore): Promise<Sequence> {
if (this._sequenceOrRef instanceof Sequence) {
return Promise.resolve(this._sequenceOrRef);
} else {
const ref = this._sequenceOrRef;
invariant(cs && ref instanceof Ref);
return readValue(ref, cs).then((c: Collection) => c.sequence);
invariant(ds && ref instanceof Ref);
return ds.readValue(ref).then((c: Collection) => c.sequence);
}
}
}
@@ -47,8 +46,8 @@ export class MetaTuple<K> {
export class IndexedMetaSequence extends IndexedSequence<MetaTuple<number>> {
offsets: Array<number>;
constructor(cs: ?ChunkStore, type: Type, items: Array<MetaTuple<number>>) {
super(cs, type, items);
constructor(ds: ?DataStore, type: Type, items: Array<MetaTuple<number>>) {
super(ds, type, items);
this.offsets = [];
let cum = 0;
for (let i = 0; i < items.length; i++) {
@@ -94,7 +93,7 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple<number>> {
}
const mt = this.items[idx];
return mt.getSequence(this.cs);
return mt.getSequence(this.ds);
}
// Returns the sequences pointed to by all items[i], s.t. start <= i < end, and returns the
@@ -103,14 +102,14 @@ export class IndexedMetaSequence extends IndexedSequence<MetaTuple<number>> {
Promise<IndexedSequence> {
const childrenP = [];
for (let i = start; i < start + length; i++) {
childrenP.push(this.items[i].getSequence(this.cs));
childrenP.push(this.items[i].getSequence(this.ds));
}
return Promise.all(childrenP).then(children => {
const items = [];
children.forEach(child => items.push(...child.items));
return children[0].isMeta ? new IndexedMetaSequence(this.cs, this.type, items)
: new IndexedSequence(this.cs, this.type, items);
return children[0].isMeta ? new IndexedMetaSequence(this.ds, this.type, items)
: new IndexedSequence(this.ds, this.type, items);
});
}
@@ -130,7 +129,7 @@ export class OrderedMetaSequence<K: valueOrPrimitive> extends OrderedSequence<K,
}
const mt = this.items[idx];
return mt.getSequence(this.cs);
return mt.getSequence(this.ds);
}
getKey(idx: number): K {
@@ -138,15 +137,15 @@ export class OrderedMetaSequence<K: valueOrPrimitive> extends OrderedSequence<K,
}
}
export function newMetaSequenceFromData(cs: ChunkStore, type: Type, tuples: Array<MetaTuple>):
export function newMetaSequenceFromData(ds: DataStore, type: Type, tuples: Array<MetaTuple>):
MetaSequence {
switch (type.kind) {
case Kind.Map:
case Kind.Set:
return new OrderedMetaSequence(cs, type, tuples);
return new OrderedMetaSequence(ds, type, tuples);
case Kind.Blob:
case Kind.List:
return new IndexedMetaSequence(cs, type, tuples);
return new IndexedMetaSequence(ds, type, tuples);
default:
throw new Error('Not reached');
}
@@ -175,9 +174,9 @@ export function indexTypeForMetaSequence(t: Type): Type {
throw new Error('Not reached');
}
export function newOrderedMetaSequenceChunkFn(t: Type, cs: ?ChunkStore = null): makeChunkFn {
export function newOrderedMetaSequenceChunkFn(t: Type, ds: ?DataStore = null): makeChunkFn {
return (tuples: Array<MetaTuple>) => {
const meta = new OrderedMetaSequence(cs, t, tuples);
const meta = new OrderedMetaSequence(ds, t, tuples);
const lastValue = tuples[tuples.length - 1].value;
return [new MetaTuple(meta, lastValue), meta];
};
@@ -193,10 +192,10 @@ export function newOrderedMetaSequenceBoundaryChecker(): BoundaryChecker<MetaTup
);
}
export function newIndexedMetaSequenceChunkFn(t: Type, cs: ?ChunkStore = null): makeChunkFn {
export function newIndexedMetaSequenceChunkFn(t: Type, ds: ?DataStore = null): makeChunkFn {
return (tuples: Array<MetaTuple>) => {
const sum = tuples.reduce((l, mt) => l + mt.value, 0);
const meta = new IndexedMetaSequence(cs, t, tuples);
const meta = new IndexedMetaSequence(ds, t, tuples);
return [new MetaTuple(meta, sum), meta];
};
}

View File

@@ -1,7 +1,7 @@
// @flow
export {AsyncIterator} from './async-iterator.js';
export {DataStore, newCommit} from './datastore.js';
export {DataStore, newCommit} from './data-store.js';
export {NomsBlob, BlobReader} from './blob.js';
export {decodeNomsValue} from './decode.js';
export {default as Chunk} from './chunk.js';
@@ -17,7 +17,6 @@ export {newList, ListLeafSequence, NomsList} from './list.js';
export {newMap, NomsMap, MapLeafSequence} from './map.js';
export {newSet, NomsSet, SetLeafSequence} from './set.js';
export {OrderedMetaSequence, IndexedMetaSequence} from './meta-sequence.js';
export {readValue} from './read-value.js';
export {SPLICE_AT, SPLICE_REMOVED, SPLICE_ADDED, SPLICE_FROM} from './edit-distance.js';
export {
boolType,

View File

@@ -1,13 +1,12 @@
// @flow
import Ref from './ref.js';
import type {ChunkStore} from './chunk-store.js';
import {invariant} from './assert.js';
import {packageType, Type} from './type.js';
import {readValue} from './read-value.js';
import {ValueBase} from './value.js';
import type {DataStore} from './data-store.js';
class Package extends ValueBase {
export class Package extends ValueBase {
types: Array<Type>;
dependencies: Array<Ref>;
@@ -31,30 +30,28 @@ class Package extends ValueBase {
const packageRegistry: { [key: string]: Package } = Object.create(null);
function lookupPackage(r: Ref): ?Package {
export function lookupPackage(r: Ref): ?Package {
return packageRegistry[r.toString()];
}
// TODO: Compute ref rather than setting
function registerPackage(p: Package) {
export function registerPackage(p: Package) {
packageRegistry[p.ref.toString()] = p;
}
const pendingPackages: { [key: string]: Promise<Package> } = Object.create(null);
function readPackage(r: Ref, cs: ChunkStore): Promise<Package> {
export function readPackage(r: Ref, ds: DataStore): Promise<Package> {
const refStr = r.toString();
const p = pendingPackages[refStr];
if (p) {
return p;
}
return pendingPackages[refStr] = readValue(r, cs).then(p => {
return pendingPackages[refStr] = ds.readValue(r).then(p => {
invariant(p instanceof Package);
registerPackage(p);
delete pendingPackages[refStr];
return p;
});
}
export {lookupPackage, Package, readPackage, registerPackage};

View File

@@ -1,22 +0,0 @@
// @flow
import Ref from './ref.js';
import Chunk from './chunk.js';
import type {ChunkStore} from './chunk-store.js';
import {notNull} from './assert.js';
type decodeFn = (chunk: Chunk, cs: ChunkStore) => Promise<any>
let decodeNomsValue: ?decodeFn = null;
export async function readValue(r: Ref, cs: ChunkStore): Promise<any> {
const chunk = await cs.get(r);
if (chunk.isEmpty()) {
return null;
}
return notNull(decodeNomsValue)(chunk, cs);
}
export function setDecodeNomsValue(decode: decodeFn) {
decodeNomsValue = decode;
}

View File

@@ -8,16 +8,16 @@ import {notNull} from './assert.js';
import {makeCompoundType, makePrimitiveType} from './type.js';
import {Kind} from './noms-kind.js';
import MemoryStore from './memory-store.js';
import type {ChunkStore} from './chunk-store.js';
import {DataStore} from './data-store.js';
class TestSequence extends Sequence<any> {
constructor(cs: ?ChunkStore, items: Array<any>) {
super(cs, makeCompoundType(Kind.List, makePrimitiveType(Kind.Value)), items);
constructor(ds: ?DataStore, items: Array<any>) {
super(ds, makeCompoundType(Kind.List, makePrimitiveType(Kind.Value)), items);
}
getChildSequence(idx: number): // eslint-disable-line no-unused-vars
Promise<?Sequence> {
return Promise.resolve(new TestSequence(this.cs, this.items[idx]));
return Promise.resolve(new TestSequence(this.ds, this.items[idx]));
}
}
@@ -31,9 +31,10 @@ class TestSequenceCursor extends SequenceCursor<any, TestSequence> {
suite('SequenceCursor', () => {
function testCursor(data: any): TestSequenceCursor {
const ms = new MemoryStore();
const s1 = new TestSequence(ms, data);
const ds = new DataStore(ms);
const s1 = new TestSequence(ds, data);
const c1 = new TestSequenceCursor(null, s1, 0);
const s2 = new TestSequence(ms, data[0]);
const s2 = new TestSequence(ds, data[0]);
const c2 = new TestSequenceCursor(c1, s2, 0);
return c2;
}

View File

@@ -1,6 +1,6 @@
// @flow
import type {ChunkStore} from './chunk-store.js';
import type {DataStore} from './data-store.js';
import {invariant, notNull} from './assert.js';
import {AsyncIterator} from './async-iterator.js';
import type {AsyncIteratorResult} from './async-iterator.js';
@@ -8,13 +8,13 @@ import {Type} from './type.js';
import {ValueBase} from './value.js';
export class Sequence<T> extends ValueBase {
cs: ?ChunkStore;
ds: ?DataStore;
items: Array<T>;
constructor(cs: ?ChunkStore, type: Type, items: Array<T>) {
constructor(ds: ?DataStore, type: Type, items: Array<T>) {
super(type);
this.cs = cs;
this.ds = ds;
this.items = items;
}

View File

@@ -5,7 +5,6 @@ import {suite} from 'mocha';
import MemoryStore from './memory-store.js';
import test from './async-test.js';
import type {ChunkStore} from './chunk-store.js';
import {invariant, notNull} from './assert.js';
import {Kind} from './noms-kind.js';
import {flatten, flattenParallel} from './test-util.js';
@@ -13,8 +12,8 @@ import {makeCompoundType, makePrimitiveType} from './type.js';
import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
import {newSet, NomsSet, SetLeafSequence} from './set.js';
import {OrderedSequence} from './ordered-sequence.js';
import {readValue} from './read-value.js';
import {writeValue} from './encode.js';
import {DataStore} from './data-store.js';
const testSetSize = 5000;
const setOfNRef = 'sha1-54ff8f84b5f39fe2171572922d067257a57c539c';
@@ -69,12 +68,13 @@ suite('BuildSet', () => {
test('write, read, modify, read', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const nums = firstNNumbers(testSetSize);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Int64));
const s = await newSet(nums, tr);
const r = writeValue(s, tr, ms);
const s2 = await readValue(r, ms);
const r = writeValue(s, tr, ds);
const s2 = await ds.readValue(r);
const outNums = [];
await s2.forEach(k => outNums.push(k));
assert.deepEqual(nums, outNums);
@@ -91,16 +91,18 @@ suite('BuildSet', () => {
suite('SetLeaf', () => {
test('isEmpty', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
const newSet = items => new NomsSet(tr, new SetLeafSequence(ms, tr, items));
const newSet = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items));
assert.isTrue(newSet([]).isEmpty());
assert.isFalse(newSet(['a', 'k']).isEmpty());
});
test('first/last/has', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
const s = new NomsSet(tr, new SetLeafSequence(ms, tr, ['a', 'k']));
const s = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'k']));
assert.strictEqual('a', await s.first());
assert.strictEqual('k', await s.last());
@@ -113,8 +115,9 @@ suite('SetLeaf', () => {
test('forEach', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
const m = new NomsSet(tr, new SetLeafSequence(ms, tr, ['a', 'b']));
const m = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'b']));
const values = [];
await m.forEach((k) => { values.push(k); });
@@ -123,10 +126,11 @@ suite('SetLeaf', () => {
test('iterator', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
const test = async items => {
const m = new NomsSet(tr, new SetLeafSequence(ms, tr, items));
const m = new NomsSet(tr, new SetLeafSequence(ds, tr, items));
assert.deepEqual(items, await flatten(m.iterator()));
assert.deepEqual(items, await flattenParallel(m.iterator(), items.length));
};
@@ -138,8 +142,9 @@ suite('SetLeaf', () => {
test('iteratorAt', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
const build = items => new NomsSet(tr, new SetLeafSequence(ms, tr, items));
const build = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items));
assert.deepEqual([], await flatten(build([]).iteratorAt('a')));
@@ -156,12 +161,13 @@ suite('SetLeaf', () => {
test('chunks', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Value));
const st = makePrimitiveType(Kind.String);
const r1 = writeValue('x', st, ms);
const r2 = writeValue('a', st, ms);
const r3 = writeValue('b', st, ms);
const l = new NomsSet(tr, new SetLeafSequence(ms, tr, ['z', r1, r2, r3]));
const r1 = writeValue('x', st, ds);
const r2 = writeValue('a', st, ds);
const r3 = writeValue('b', st, ds);
const l = new NomsSet(tr, new SetLeafSequence(ds, tr, ['z', r1, r2, r3]));
assert.strictEqual(3, l.chunks.length);
assert.isTrue(r1.equals(l.chunks[0]));
assert.isTrue(r2.equals(l.chunks[1]));
@@ -170,14 +176,14 @@ suite('SetLeaf', () => {
});
suite('CompoundSet', () => {
function build(cs: ChunkStore, values: Array<string>): NomsSet {
function build(ds: DataStore, values: Array<string>): NomsSet {
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.String));
assert.isTrue(values.length > 1 && Math.log2(values.length) % 1 === 0);
let tuples = [];
for (let i = 0; i < values.length; i += 2) {
const l = new NomsSet(tr, new SetLeafSequence(cs, tr, [values[i], values[i + 1]]));
const r = writeValue(l, tr, cs);
const l = new NomsSet(tr, new SetLeafSequence(ds, tr, [values[i], values[i + 1]]));
const r = writeValue(l, tr, ds);
tuples.push(new MetaTuple(r, values[i + 1]));
}
@@ -185,8 +191,8 @@ suite('CompoundSet', () => {
while (tuples.length > 1) {
const next = [];
for (let i = 0; i < tuples.length; i += 2) {
last = new NomsSet(tr, new OrderedMetaSequence(cs, tr, [tuples[i], tuples[i + 1]]));
const r = writeValue(last, tr, cs);
last = new NomsSet(tr, new OrderedMetaSequence(ds, tr, [tuples[i], tuples[i + 1]]));
const r = writeValue(last, tr, ds);
next.push(new MetaTuple(r, tuples[i + 1].value));
}
@@ -198,13 +204,15 @@ suite('CompoundSet', () => {
test('isEmpty', () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
assert.isFalse(c.isEmpty());
});
test('first/last/has', async () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
assert.strictEqual('a', await c.first());
assert.strictEqual('n', await c.last());
assert.isTrue(await c.has('a'));
@@ -225,7 +233,8 @@ suite('CompoundSet', () => {
test('forEach', async () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const values = [];
await c.forEach((k) => { values.push(k); });
assert.deepEqual(['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n'], values);
@@ -233,16 +242,18 @@ suite('CompoundSet', () => {
test('iterator', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const values = ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n'];
const c = build(ms, values);
const c = build(ds, values);
assert.deepEqual(values, await flatten(c.iterator()));
assert.deepEqual(values, await flattenParallel(c.iterator(), values.length));
});
test('iteratorAt', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const values = ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n'];
const c = build(ms, values);
const c = build(ds, values);
const offsets = {
_: 0, a: 0,
b: 1,
@@ -263,8 +274,9 @@ suite('CompoundSet', () => {
test('iterator return', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const values = ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n'];
const c = build(ms, values);
const c = build(ds, values);
const iter = c.iterator();
const values2 = [];
for (let res = await iter.next(); !res.done; res = await iter.next()) {
@@ -278,7 +290,8 @@ suite('CompoundSet', () => {
test('iterator return parallel', async () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const iter = c.iterator();
const values = await Promise.all([iter.next(), iter.next(), iter.return(), iter.next()]);
assert.deepEqual(
@@ -288,20 +301,23 @@ suite('CompoundSet', () => {
test('chunks', () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
assert.strictEqual(2, c.chunks.length);
});
test('map', async () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const values = await c.map((k) => k + '*');
assert.deepEqual(['a*', 'b*', 'e*', 'f*', 'h*', 'i*', 'm*', 'n*'], values);
});
test('map async', async () => {
const ms = new MemoryStore();
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const ds = new DataStore(ms);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const values = await c.map((k) => Promise.resolve(k + '*'));
assert.deepEqual(['a*', 'b*', 'e*', 'f*', 'h*', 'i*', 'm*', 'n*'], values);
});
@@ -319,8 +335,9 @@ suite('CompoundSet', () => {
test('advanceTo', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const c = build(ms, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
const c = build(ds, ['a', 'b', 'e', 'f', 'h', 'i', 'm', 'n']);
invariant(c.sequence instanceof OrderedSequence);
let cursor = await c.sequence.newCursorAt(null);
@@ -361,11 +378,12 @@ suite('CompoundSet', () => {
async function testIntersect(expect: Array<string>, seqs: Array<Array<string>>) {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const first = build(ms, seqs[0]);
const first = build(ds, seqs[0]);
const sets:Array<NomsSet> = [];
for (let i = 1; i < seqs.length; i++) {
sets.push(build(ms, seqs[i]));
sets.push(build(ds, seqs[i]));
}
const result = await first.intersect(...sets);
@@ -390,10 +408,11 @@ suite('CompoundSet', () => {
test('iterator at 0', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, makePrimitiveType(Kind.Int8));
const test = async (expected, items) => {
const set = new NomsSet(tr, new SetLeafSequence(ms, tr, items));
const set = new NomsSet(tr, new SetLeafSequence(ds, tr, items));
const iter = set.iteratorAt(0);
assert.deepEqual(expected, await flatten(iter));
};

View File

@@ -2,7 +2,6 @@
import BuzHashBoundaryChecker from './buzhash-boundary-checker.js';
import type {BoundaryChecker, makeChunkFn} from './sequence-chunker.js';
import type {ChunkStore} from './chunk-store.js';
import type {valueOrPrimitive, Value} from './value.js'; // eslint-disable-line no-unused-vars
import {AsyncIterator} from './async-iterator.js';
import {chunkSequence} from './sequence-chunker.js';
@@ -17,13 +16,14 @@ import {MetaTuple, newOrderedMetaSequenceBoundaryChecker,
import {OrderedSequence, OrderedSequenceCursor,
OrderedSequenceIterator} from './ordered-sequence.js';
import {Type} from './type.js';
import type {DataStore} from './data-store.js';
const setWindowSize = 1;
const setPattern = ((1 << 6) | 0) - 1;
function newSetLeafChunkFn<T:valueOrPrimitive>(t: Type, cs: ?ChunkStore = null): makeChunkFn {
function newSetLeafChunkFn<T:valueOrPrimitive>(t: Type, ds: ?DataStore = null): makeChunkFn {
return (items: Array<T>) => {
const setLeaf = new SetLeafSequence(cs, t, items);
const setLeaf = new SetLeafSequence(ds, t, items);
let indexValue: ?(T | Ref) = null;
if (items.length > 0) {
@@ -101,9 +101,9 @@ export class NomsSet<T:valueOrPrimitive> extends Collection<OrderedSequence> {
async _splice(cursor: OrderedSequenceCursor, insert: Array<T>, remove: number):
Promise<NomsSet<T>> {
const type = this.type;
const cs = this.sequence.cs;
const seq = await chunkSequence(cursor, insert, remove, newSetLeafChunkFn(type, cs),
newOrderedMetaSequenceChunkFn(type, cs),
const ds = this.sequence.ds;
const seq = await chunkSequence(cursor, insert, remove, newSetLeafChunkFn(type, ds),
newOrderedMetaSequenceChunkFn(type, ds),
newSetLeafBoundaryChecker(type),
newOrderedMetaSequenceBoundaryChecker);
invariant(seq instanceof OrderedSequence);

View File

@@ -9,6 +9,7 @@ import {notNull} from './assert.js';
import {Package, registerPackage} from './package.js';
import {suite, test} from 'mocha';
import {writeValue} from './encode.js';
import {DataStore} from './data-store.js';
suite('Struct', () => {
test('equals', () => {
@@ -31,6 +32,7 @@ suite('Struct', () => {
test('chunks', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const typeDef = makeStructType('S1', [
new Field('r', makeCompoundType(Kind.Ref, makePrimitiveType(Kind.Bool)), false),
], []);
@@ -42,7 +44,7 @@ suite('Struct', () => {
const b = true;
const bt = makePrimitiveType(Kind.Bool);
const r = writeValue(b, bt, ms);
const r = writeValue(b, bt, ds);
const s1 = new Struct(type, typeDef, {r: r});
assert.strictEqual(2, s1.chunks.length);
assert.isTrue(pkgRef.equals(s1.chunks[0]));
@@ -51,6 +53,7 @@ suite('Struct', () => {
test('chunks optional', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const typeDef = makeStructType('S1', [
new Field('r', makeCompoundType(Kind.Ref, makePrimitiveType(Kind.Bool)), true),
], []);
@@ -67,7 +70,7 @@ suite('Struct', () => {
const b = true;
const bt = makePrimitiveType(Kind.Bool);
const r = writeValue(b, bt, ms);
const r = writeValue(b, bt, ds);
const s2 = new Struct(type, typeDef, {r: r});
assert.strictEqual(2, s2.chunks.length);
assert.isTrue(pkgRef.equals(s2.chunks[0]));
@@ -76,6 +79,7 @@ suite('Struct', () => {
test('chunks union', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const typeDef = makeStructType('S1', [], [
new Field('r', makeCompoundType(Kind.Ref, makePrimitiveType(Kind.Bool)), false),
new Field('s', makePrimitiveType(Kind.String), false),
@@ -92,7 +96,7 @@ suite('Struct', () => {
const b = true;
const bt = makePrimitiveType(Kind.Bool);
const r = writeValue(b, bt, ms);
const r = writeValue(b, bt, ds);
const s2 = new Struct(type, typeDef, {r: r});
assert.strictEqual(2, s2.chunks.length);
assert.isTrue(pkgRef.equals(s2.chunks[0]));

View File

@@ -6,13 +6,14 @@ import {assert} from 'chai';
import {Field, makeCompoundType, makePrimitiveType, makeStructType, makeType} from './type.js';
import {Kind} from './noms-kind.js';
import {Package, registerPackage} from './package.js';
import {readValue} from './read-value.js';
import {suite, test} from 'mocha';
import {writeValue} from './encode.js';
import {DataStore} from './data-store.js';
suite('Type', () => {
test('types', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const boolType = makePrimitiveType(Kind.Bool);
const uint8Type = makePrimitiveType(Kind.Uint8);
@@ -31,17 +32,17 @@ suite('Type', () => {
const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567');
const trType = makeType(pkgRef, 42);
const otherRef = writeValue(otherType, otherType.type, ms);
const mapRef = writeValue(mapType, mapType.type, ms);
const setRef = writeValue(setType, setType.type, ms);
const mahRef = writeValue(mahType, mahType.type, ms);
const trRef = writeValue(trType, trType.type, ms);
const otherRef = writeValue(otherType, otherType.type, ds);
const mapRef = writeValue(mapType, mapType.type, ds);
const setRef = writeValue(setType, setType.type, ds);
const mahRef = writeValue(mahType, mahType.type, ds);
const trRef = writeValue(trType, trType.type, ds);
assert.isTrue(otherType.equals(await readValue(otherRef, ms)));
assert.isTrue(mapType.equals(await readValue(mapRef, ms)));
assert.isTrue(setType.equals(await readValue(setRef, ms)));
assert.isTrue(mahType.equals(await readValue(mahRef, ms)));
assert.isTrue(trType.equals(await readValue(trRef, ms)));
assert.isTrue(otherType.equals(await ds.readValue(otherRef)));
assert.isTrue(mapType.equals(await ds.readValue(mapRef)));
assert.isTrue(setType.equals(await ds.readValue(setRef)));
assert.isTrue(mahType.equals(await ds.readValue(mahRef)));
assert.isTrue(trType.equals(await ds.readValue(trRef)));
});
test('typeRef describe', async () => {
@@ -79,18 +80,19 @@ suite('Type', () => {
test('type with pkgRef', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const pkg = new Package([makePrimitiveType(Kind.Float64)], []);
registerPackage(pkg);
const pkgRef = pkg.ref;
const unresolvedType = makeType(pkgRef, 42);
const unresolvedRef = writeValue(unresolvedType, unresolvedType.type, ms);
const unresolvedRef = writeValue(unresolvedType, unresolvedType.type, ds);
const v = await readValue(unresolvedRef, ms);
const v = await ds.readValue(unresolvedRef);
assert.isNotNull(v);
assert.isTrue(pkgRef.equals(v.chunks[0]));
const p = await readValue(pkgRef, ms);
const p = await ds.readValue(pkgRef);
assert.isNotNull(p);
});
@@ -100,9 +102,10 @@ suite('Type', () => {
test('empty package ref', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const v = makeType(new Ref(), -1);
const r = writeValue(v, v.type, ms);
const v2 = await readValue(r, ms);
const r = writeValue(v, v.type, ds);
const v2 = await ds.readValue(r);
assert.isTrue(v.equals(v2));
});
});