mirror of
https://github.com/dolthub/dolt.git
synced 2026-02-09 10:38:10 -06:00
Inline struct type declaration into chunk (#1324)
Struct type definition is now inlined into the chunk. To break cycles we use back references. - Removes unresolved type refs - Removes packages Fixes #1164 Fixes #1165
This commit is contained in:
@@ -20,11 +20,6 @@ before_script:
|
||||
- ./build.py
|
||||
- npm test
|
||||
- popd
|
||||
- pushd nomdl/codegen/test
|
||||
- npm prune
|
||||
- npm install
|
||||
- npm test
|
||||
- popd
|
||||
script:
|
||||
- export GODEBUG=invalidptr=0
|
||||
- export GO15VENDOREXPERIMENT=1
|
||||
@@ -40,7 +35,6 @@ cache:
|
||||
directories:
|
||||
- js/node_modules
|
||||
- clients/splore/node_modules
|
||||
- nomdl/codegen/test/node_modules
|
||||
deploy:
|
||||
provider: script
|
||||
script: tools/publish-js-sdk.py
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/attic-labs/noms/d"
|
||||
"github.com/attic-labs/noms/datas"
|
||||
"github.com/attic-labs/noms/dataset"
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
"github.com/stretchr/testify/suite"
|
||||
)
|
||||
@@ -49,11 +48,8 @@ func (s *testSuite) TestCSVExporter() {
|
||||
})
|
||||
}
|
||||
|
||||
typeDef := types.MakeStructType(structName, f, []types.Field{})
|
||||
pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{})
|
||||
pkgRef := types.RegisterPackage(&pkg)
|
||||
typeRef := types.MakeType(pkgRef, 0)
|
||||
structFields := typeDef.Desc.(types.StructDesc).Fields
|
||||
typ := types.MakeStructType(structName, f, []types.Field{})
|
||||
structFields := typ.Desc.(types.StructDesc).Fields
|
||||
|
||||
// Build data rows
|
||||
structs := make([]types.Value, len(payload))
|
||||
@@ -62,10 +58,10 @@ func (s *testSuite) TestCSVExporter() {
|
||||
for j, v := range row {
|
||||
fields[structFields[j].Name] = types.NewString(v)
|
||||
}
|
||||
structs[i] = types.NewStruct(typeRef, typeDef, fields)
|
||||
structs[i] = types.NewStruct(typ, fields)
|
||||
}
|
||||
|
||||
listType := types.MakeListType(typeRef)
|
||||
listType := types.MakeListType(typ)
|
||||
ds.Commit(types.NewTypedList(listType, structs...))
|
||||
ds.Store().Close()
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ func main() {
|
||||
kinds = csv.StringsToKinds(strings.Split(*columnTypes, ","))
|
||||
}
|
||||
|
||||
value, _, _ := csv.Read(r, *name, headers, kinds, ds.Store())
|
||||
value, _ := csv.Read(r, *name, headers, kinds, ds.Store())
|
||||
_, err = ds.Commit(value)
|
||||
d.Exp.NoError(err)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"io"
|
||||
|
||||
"github.com/attic-labs/noms/d"
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
)
|
||||
|
||||
@@ -66,7 +65,7 @@ func ReportValidFieldTypes(r *csv.Reader, headers []string) []KindSlice {
|
||||
}
|
||||
|
||||
// MakeStructTypeFromHeaders creates a struct type from the headers using |kinds| as the type of each field. If |kinds| is empty, default to strings.
|
||||
func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSlice) (typeRef, typeDef *types.Type) {
|
||||
func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSlice) *types.Type {
|
||||
useStringType := len(kinds) == 0
|
||||
d.Chk.True(useStringType || len(headers) == len(kinds))
|
||||
fields := make([]types.Field, len(headers))
|
||||
@@ -82,24 +81,19 @@ func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSl
|
||||
Optional: false,
|
||||
}
|
||||
}
|
||||
typeDef = types.MakeStructType(structName, fields, []types.Field{})
|
||||
pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{})
|
||||
pkgRef := types.RegisterPackage(&pkg)
|
||||
typeRef = types.MakeType(pkgRef, 0)
|
||||
|
||||
return
|
||||
return types.MakeStructType(structName, fields, []types.Field{})
|
||||
}
|
||||
|
||||
// Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row.
|
||||
// If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields.
|
||||
// In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs.
|
||||
func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, typeRef, typeDef *types.Type) {
|
||||
typeRef, typeDef = MakeStructTypeFromHeaders(headers, structName, kinds)
|
||||
func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) {
|
||||
t = MakeStructTypeFromHeaders(headers, structName, kinds)
|
||||
valueChan := make(chan types.Value, 128) // TODO: Make this a function param?
|
||||
listType := types.MakeListType(typeRef)
|
||||
listType := types.MakeListType(t)
|
||||
listChan := types.NewStreamingTypedList(listType, vrw, valueChan)
|
||||
|
||||
structFields := typeDef.Desc.(types.StructDesc).Fields
|
||||
structFields := t.Desc.(types.StructDesc).Fields
|
||||
|
||||
for {
|
||||
row, err := r.Read()
|
||||
@@ -117,8 +111,8 @@ func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, v
|
||||
fields[f.Name] = StringToType(v, f.T.Kind())
|
||||
}
|
||||
}
|
||||
valueChan <- types.NewStruct(typeRef, typeDef, fields)
|
||||
valueChan <- types.NewStruct(t, fields)
|
||||
}
|
||||
|
||||
return <-listChan, typeRef, typeDef
|
||||
return <-listChan, t
|
||||
}
|
||||
|
||||
@@ -22,13 +22,13 @@ b,2,false
|
||||
|
||||
headers := []string{"A", "B", "C"}
|
||||
kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind}
|
||||
l, typeRef, typeDef := Read(r, "test", headers, kinds, ds)
|
||||
l, typ := Read(r, "test", headers, kinds, ds)
|
||||
|
||||
assert.Equal(uint64(2), l.Len())
|
||||
|
||||
assert.True(typeRef.IsUnresolved())
|
||||
assert.Equal(types.StructKind, typ.Kind())
|
||||
|
||||
desc, ok := typeDef.Desc.(types.StructDesc)
|
||||
desc, ok := typ.Desc.(types.StructDesc)
|
||||
assert.True(ok)
|
||||
assert.Len(desc.Fields, 3)
|
||||
assert.Equal("A", desc.Fields[0].Name)
|
||||
@@ -53,13 +53,13 @@ func testTrailingHelper(t *testing.T, dataString string) {
|
||||
|
||||
headers := []string{"A", "B"}
|
||||
kinds := KindSlice{types.StringKind, types.StringKind}
|
||||
l, typeRef, typeDef := Read(r, "test", headers, kinds, ds)
|
||||
l, typ := Read(r, "test", headers, kinds, ds)
|
||||
|
||||
assert.Equal(uint64(3), l.Len())
|
||||
|
||||
assert.True(typeRef.IsUnresolved())
|
||||
assert.Equal(types.StructKind, typ.Kind())
|
||||
|
||||
desc, ok := typeDef.Desc.(types.StructDesc)
|
||||
desc, ok := typ.Desc.(types.StructDesc)
|
||||
assert.True(ok)
|
||||
assert.Len(desc.Fields, 2)
|
||||
assert.Equal("A", desc.Fields[0].Name)
|
||||
|
||||
@@ -14,15 +14,7 @@ func ValueToListAndElemDesc(v types.Value, vr types.ValueReader) (types.List, ty
|
||||
d.Exp.Equal(types.ListKind, v.Type().Kind(),
|
||||
"Dataset must be List<>, found: %s", v.Type().Describe())
|
||||
|
||||
u := v.Type().Desc.(types.CompoundDesc).ElemTypes[0]
|
||||
d.Exp.Equal(types.UnresolvedKind, u.Kind(),
|
||||
"List<> must be UnresolvedKind, found: %s", u.Describe())
|
||||
|
||||
pkg := types.ReadPackage(u.PackageRef(), vr)
|
||||
d.Exp.Equal(types.PackageKind, pkg.Type().Kind(),
|
||||
"Failed to read package: %s", pkg.Type().Describe())
|
||||
|
||||
t := pkg.Types()[u.Ordinal()]
|
||||
t := v.Type().Desc.(types.CompoundDesc).ElemTypes[0]
|
||||
d.Exp.Equal(types.StructKind, t.Kind(), "Did not find Struct: %s", t.Describe())
|
||||
return v.(types.List), t.Desc.(types.StructDesc)
|
||||
}
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
package datas
|
||||
|
||||
import (
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
)
|
||||
import "github.com/attic-labs/noms/types"
|
||||
|
||||
var __typeForCommit *types.Type
|
||||
var __typeDef *types.Type
|
||||
var commitType *types.Type
|
||||
|
||||
const (
|
||||
ParentsField = "parents"
|
||||
@@ -16,16 +12,17 @@ const (
|
||||
func init() {
|
||||
structName := "Commit"
|
||||
|
||||
fieldTypes := []types.Field{
|
||||
types.Field{Name: ValueField, T: types.MakePrimitiveType(types.ValueKind)},
|
||||
types.Field{Name: ParentsField, T: types.MakeSetType(types.MakeRefType(types.MakeType(ref.Ref{}, 0)))},
|
||||
}
|
||||
// struct Commit {
|
||||
// value: Value
|
||||
// parents: Set<Ref<Commit>>
|
||||
// }
|
||||
|
||||
typeDef := types.MakeStructType(structName, fieldTypes, []types.Field{})
|
||||
pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{})
|
||||
__typeDef = pkg.Types()[0]
|
||||
pkgRef := types.RegisterPackage(&pkg)
|
||||
__typeForCommit = types.MakeType(pkgRef, 0)
|
||||
fieldTypes := []types.Field{
|
||||
types.Field{Name: ValueField, T: types.ValueType},
|
||||
types.Field{Name: ParentsField, T: nil},
|
||||
}
|
||||
commitType = types.MakeStructType(structName, fieldTypes, []types.Field{})
|
||||
commitType.Desc.(types.StructDesc).Fields[1].T = types.MakeSetType(types.MakeRefType(commitType))
|
||||
}
|
||||
|
||||
func NewCommit() types.Struct {
|
||||
@@ -34,11 +31,11 @@ func NewCommit() types.Struct {
|
||||
ParentsField: NewSetOfRefOfCommit(),
|
||||
}
|
||||
|
||||
return types.NewStruct(__typeForCommit, __typeDef, initialFields)
|
||||
return types.NewStruct(commitType, initialFields)
|
||||
}
|
||||
|
||||
func typeForMapOfStringToRefOfCommit() *types.Type {
|
||||
return types.MakeMapType(types.StringType, types.MakeRefType(__typeForCommit))
|
||||
return types.MakeMapType(types.StringType, types.MakeRefType(commitType))
|
||||
}
|
||||
|
||||
func NewMapOfStringToRefOfCommit() types.Map {
|
||||
@@ -46,7 +43,7 @@ func NewMapOfStringToRefOfCommit() types.Map {
|
||||
}
|
||||
|
||||
func typeForSetOfRefOfCommit() *types.Type {
|
||||
return types.MakeSetType(types.MakeRefType(__typeForCommit))
|
||||
return types.MakeSetType(types.MakeRefType(commitType))
|
||||
}
|
||||
|
||||
func NewSetOfRefOfCommit() types.Set {
|
||||
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
)
|
||||
|
||||
// writesOnCommit allows tests to adjust for how many writes dataStoreCommon performs on Commit()
|
||||
const writesOnCommit = 3
|
||||
const writesOnCommit = 2
|
||||
|
||||
func TestLocalDataStore(t *testing.T) {
|
||||
suite.Run(t, &LocalDataStoreSuite{})
|
||||
|
||||
@@ -30,7 +30,7 @@ func TestDatasetCommitTracker(t *testing.T) {
|
||||
assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit))
|
||||
assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit))
|
||||
|
||||
assert.Equal("sha1-59bf8cf4ce01e5630fe93de07464ad2a02c232ab", cs.Root().String())
|
||||
assert.Equal("sha1-7c7a614a758ea33792755c5e91d745fc2503b602", cs.Root().String())
|
||||
}
|
||||
|
||||
func newDS(id string, cs *chunks.MemoryStore) Dataset {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@attic/noms",
|
||||
"version": "13.0.0",
|
||||
"version": "14.0.0",
|
||||
"description": "Noms JS SDK",
|
||||
"repository": "https://github.com/attic-labs/noms",
|
||||
"main": "dist/commonjs/noms.js",
|
||||
|
||||
@@ -94,8 +94,6 @@ export function getCompareFunction(t: Type): (v1: any, v2: any) => number {
|
||||
case Kind.Set:
|
||||
case Kind.Struct:
|
||||
case Kind.Type:
|
||||
case Kind.Unresolved:
|
||||
case Kind.Package:
|
||||
return compareObjects;
|
||||
|
||||
case Kind.Bool:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @flow
|
||||
|
||||
import Chunk from './chunk.js';
|
||||
import {default as Ref, emptyRef} from './ref.js';
|
||||
import Ref from './ref.js';
|
||||
import RefValue from './ref-value.js';
|
||||
import {newStruct} from './struct.js';
|
||||
import type {ChunkStore} from './chunk-store.js';
|
||||
@@ -10,27 +10,24 @@ import type {NomsSet} from './set.js';
|
||||
import type {valueOrPrimitive} from './value.js';
|
||||
import {
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeRefType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
makeSetType,
|
||||
makeMapType,
|
||||
Type,
|
||||
stringType,
|
||||
boolType,
|
||||
valueType,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {newMap} from './map.js';
|
||||
import {newSet} from './set.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import {decodeNomsValue} from './decode.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {encodeNomsValue} from './encode.js';
|
||||
import type {Commit} from './commit.js';
|
||||
|
||||
type DatasTypes = {
|
||||
commitTypeDef: Type,
|
||||
datasPackage: Package,
|
||||
commitType: Type,
|
||||
commitSetType: Type,
|
||||
refOfCommitType: Type,
|
||||
@@ -48,23 +45,19 @@ function getEmptyCommitMap(): Promise<NomsMap<string, RefValue<Commit>>> {
|
||||
let datasTypes: DatasTypes;
|
||||
export function getDatasTypes(): DatasTypes {
|
||||
if (!datasTypes) {
|
||||
const datasPackage = new Package([
|
||||
makeStructType('Commit', [
|
||||
new Field('value', valueType, false),
|
||||
new Field('parents', makeCompoundType(Kind.Set,
|
||||
makeCompoundType(Kind.Ref, makeType(emptyRef, 0))), false),
|
||||
], []),
|
||||
// struct Commit {
|
||||
// value: Value
|
||||
// parents: Set<Ref<Commit>>
|
||||
// }
|
||||
const commitType = makeStructType('Commit', [
|
||||
new Field('value', valueType, false),
|
||||
], []);
|
||||
registerPackage(datasPackage);
|
||||
const [commitTypeDef] = datasPackage.types;
|
||||
|
||||
const commitType = makeType(datasPackage.ref, 0);
|
||||
const refOfCommitType = makeCompoundType(Kind.Ref, commitType);
|
||||
const commitSetType = makeCompoundType(Kind.Set, refOfCommitType);
|
||||
const commitMapType = makeCompoundType(Kind.Map, stringType, refOfCommitType);
|
||||
const refOfCommitType = makeRefType(commitType);
|
||||
const commitSetType = makeSetType(refOfCommitType);
|
||||
invariant(commitType.desc instanceof StructDesc);
|
||||
commitType.desc.fields.push(new Field('parents', commitSetType, false));
|
||||
const commitMapType = makeMapType(stringType, refOfCommitType);
|
||||
datasTypes = {
|
||||
commitTypeDef,
|
||||
datasPackage,
|
||||
commitType,
|
||||
refOfCommitType,
|
||||
commitSetType,
|
||||
@@ -212,7 +205,7 @@ export function newCommit(value: valueOrPrimitive, parents: Array<Ref> = []): Pr
|
||||
const types = getDatasTypes();
|
||||
const parentRefs = parents.map(r => new RefValue(r, types.refOfCommitType));
|
||||
return newSet(parentRefs, types.commitSetType).then(parents =>
|
||||
newStruct(types.commitType, types.commitTypeDef, {value, parents}));
|
||||
newStruct(types.commitType, {value, parents}));
|
||||
}
|
||||
|
||||
class CacheEntry<T> {
|
||||
|
||||
@@ -13,14 +13,17 @@ import {decodeNomsValue, JsonArrayReader} from './decode.js';
|
||||
import {
|
||||
boolType,
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
makeListType,
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeRefType,
|
||||
numberType,
|
||||
stringType,
|
||||
Type,
|
||||
typeType,
|
||||
valueType,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
import {encode as encodeBase64} from './base64.js';
|
||||
import {IndexedMetaSequence, MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
|
||||
@@ -30,7 +33,6 @@ import {ListLeafSequence, NomsList} from './list.js';
|
||||
import {MapLeafSequence, NomsMap} from './map.js';
|
||||
import {NomsBlob, newBlob} from './blob.js';
|
||||
import {NomsSet, SetLeafSequence} from './set.js';
|
||||
import {registerPackage, Package} from './package.js';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
suite('Decode', () => {
|
||||
@@ -63,19 +65,17 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
function doTest(expected: Type, a: Array<any>) {
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const tr = r.readTypeAsTag();
|
||||
const tr = r.readTypeAsTag([]);
|
||||
assert.isTrue(expected.equals(tr));
|
||||
}
|
||||
|
||||
doTest(boolType, [Kind.Bool, true]);
|
||||
doTest(typeType, [Kind.Type, Kind.Bool]);
|
||||
doTest(makeCompoundType(Kind.List, boolType),
|
||||
doTest(makeListType(boolType),
|
||||
[Kind.List, Kind.Bool, true, false]);
|
||||
|
||||
const pkgRef = Ref.parse('sha1-a9993e364706816aba3e25717850c26c9cd0d89d');
|
||||
doTest(makeType(pkgRef, 42), [Kind.Unresolved, pkgRef.toString(), '42']);
|
||||
|
||||
doTest(typeType, [Kind.Type, Kind.Type, pkgRef.toString()]);
|
||||
doTest(makeStructType('S', [new Field('x', boolType, false)], []),
|
||||
[Kind.Struct, 'S', ['x', Kind.Bool, false], []]);
|
||||
});
|
||||
|
||||
test('read primitives', async () => {
|
||||
@@ -104,10 +104,10 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const a = [Kind.List, Kind.Number, false, ['0', '1', '2', '3']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsList<number> = await r.readTopLevelValue();
|
||||
const v: NomsList<number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsList);
|
||||
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2, 3]));
|
||||
assert.isTrue(l.equals(v));
|
||||
});
|
||||
@@ -119,10 +119,10 @@ suite('Decode', () => {
|
||||
const a = [Kind.List, Kind.Value, false,
|
||||
[Kind.Number, '1', Kind.String, 'hi', Kind.Bool, true]];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsList<Value> = await r.readTopLevelValue();
|
||||
const v: NomsList<Value> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsList);
|
||||
|
||||
const tr = makeCompoundType(Kind.List, valueType);
|
||||
const tr = makeListType(valueType);
|
||||
assert.isTrue(v.type.equals(tr));
|
||||
assert.strictEqual(1, await v.get(0));
|
||||
assert.strictEqual('hi', await v.get(1));
|
||||
@@ -137,7 +137,7 @@ suite('Decode', () => {
|
||||
const v = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsList);
|
||||
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2]));
|
||||
assert.isTrue(l.equals(v));
|
||||
});
|
||||
@@ -145,7 +145,7 @@ suite('Decode', () => {
|
||||
test('read compound list', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const ltr = makeCompoundType(Kind.List, numberType);
|
||||
const ltr = makeListType(numberType);
|
||||
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0]))).targetRef;
|
||||
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [1, 2]))).targetRef;
|
||||
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [3, 4, 5]))).targetRef;
|
||||
@@ -169,11 +169,10 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const a = [Kind.Map, Kind.Number, Kind.Number, false, ['0', '1', '2', '3']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsMap<number, number> = await r.readTopLevelValue();
|
||||
const v: NomsMap<number, number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsMap);
|
||||
|
||||
const t = makeCompoundType(Kind.Map, numberType,
|
||||
numberType);
|
||||
const t = makeMapType(numberType, numberType);
|
||||
const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
|
||||
assert.isTrue(v.equals(m));
|
||||
});
|
||||
@@ -185,11 +184,11 @@ suite('Decode', () => {
|
||||
['sha1-0000000000000000000000000000000000000001', '2',
|
||||
'sha1-0000000000000000000000000000000000000002', '4']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsMap<RefValue<Value>, number> = await r.readTopLevelValue();
|
||||
const v: NomsMap<RefValue<Value>, number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsMap);
|
||||
|
||||
const refOfValueType = makeCompoundType(Kind.Ref, valueType);
|
||||
const mapType = makeCompoundType(Kind.Map, refOfValueType, numberType);
|
||||
const refOfValueType = makeRefType(valueType);
|
||||
const mapType = makeMapType(refOfValueType, numberType);
|
||||
const rv1 = new RefValue(new Ref('sha1-0000000000000000000000000000000000000001'),
|
||||
refOfValueType);
|
||||
const rv2 = new RefValue(new Ref('sha1-0000000000000000000000000000000000000002'),
|
||||
@@ -204,11 +203,10 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const a = [Kind.Value, Kind.Map, Kind.Number, Kind.Number, false, ['0', '1', '2', '3']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsMap<number, number> = await r.readTopLevelValue();
|
||||
const v: NomsMap<number, number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsMap);
|
||||
|
||||
const t = makeCompoundType(Kind.Map, numberType,
|
||||
numberType);
|
||||
const t = makeMapType(numberType, numberType);
|
||||
const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}]));
|
||||
assert.isTrue(v.equals(m));
|
||||
});
|
||||
@@ -218,10 +216,10 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const a = [Kind.Set, Kind.Number, false, ['0', '1', '2', '3']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsSet<number> = await r.readTopLevelValue();
|
||||
const v: NomsSet<number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsSet);
|
||||
|
||||
const t = makeCompoundType(Kind.Set, numberType);
|
||||
const t = makeSetType(numberType);
|
||||
const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
|
||||
assert.isTrue(v.equals(s));
|
||||
});
|
||||
@@ -229,7 +227,7 @@ suite('Decode', () => {
|
||||
test('read compound set', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const ltr = makeCompoundType(Kind.Set, numberType);
|
||||
const ltr = makeSetType(numberType);
|
||||
const r1 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [0]))).targetRef;
|
||||
const r2 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [1, 2]))).targetRef;
|
||||
const r3 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [3, 4, 5]))).targetRef;
|
||||
@@ -253,10 +251,10 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const a = [Kind.Value, Kind.Set, Kind.Number, false, ['0', '1', '2', '3']];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsSet<number> = await r.readTopLevelValue();
|
||||
const v: NomsSet<number> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsSet);
|
||||
|
||||
const t = makeCompoundType(Kind.Set, numberType);
|
||||
const t = makeSetType(numberType);
|
||||
const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
|
||||
assert.isTrue(v.equals(s));
|
||||
});
|
||||
@@ -281,10 +279,11 @@ suite('Decode', () => {
|
||||
new Field('b', boolType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true];
|
||||
const a = [Kind.Struct, 'A1', [
|
||||
'x', Kind.Number, false,
|
||||
's', Kind.String, false,
|
||||
'b', Kind.Bool, false,
|
||||
], [], '42', 'hi', true];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -305,10 +304,8 @@ suite('Decode', () => {
|
||||
new Field('s', stringType, false),
|
||||
]);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', '1', 'hi'];
|
||||
const a = [Kind.Struct, 'A2', ['x', Kind.Number, false],
|
||||
['b', Kind.Bool, false, 's', Kind.String, false], '42', '1', 'hi'];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -327,10 +324,9 @@ suite('Decode', () => {
|
||||
new Field('b', boolType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', false, true, false];
|
||||
const a = [Kind.Struct, 'A3',
|
||||
['x', Kind.Number, false, 's', Kind.String, true, 'b', Kind.Bool, true], [],
|
||||
'42', false, true, false];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -343,17 +339,18 @@ suite('Decode', () => {
|
||||
test('test read struct with list', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const ltr = makeCompoundType(Kind.List, numberType);
|
||||
const ltr = makeListType(numberType);
|
||||
const tr = makeStructType('A4', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('l', ltr, false),
|
||||
new Field('s', stringType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, false, ['0', '1', '2'], 'hi'];
|
||||
const a = [Kind.Struct, 'A4', [
|
||||
'b', Kind.Bool, false,
|
||||
'l', Kind.List, Kind.Number, false,
|
||||
's', Kind.String, false,
|
||||
], [], true, false, ['0', '1', '2'], 'hi'];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -373,10 +370,9 @@ suite('Decode', () => {
|
||||
new Field('s', stringType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, Kind.Number, '42', 'hi'];
|
||||
const a = [Kind.Struct, 'A5',
|
||||
['b', Kind.Bool, false, 'v', Kind.Value, false, 's', Kind.String, false], [],
|
||||
true, Kind.Number, '42', 'hi'];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -396,10 +392,9 @@ suite('Decode', () => {
|
||||
new Field('b', boolType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Value, Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true];
|
||||
const a = [Kind.Value, Kind.Struct, 'A1',
|
||||
['x', Kind.Number, false, 's', Kind.String, false, 'b', Kind.Bool, false], [],
|
||||
'42', 'hi', true];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
@@ -418,14 +413,12 @@ suite('Decode', () => {
|
||||
new Field('i', numberType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([tr], []);
|
||||
registerPackage(pkg);
|
||||
|
||||
const a = [Kind.Value, Kind.Map, Kind.String, Kind.Unresolved, pkg.ref.toString(), '0', false,
|
||||
['bar', false, '2', 'baz', false, '1', 'foo', true, '3']];
|
||||
const a = [Kind.Value, Kind.Map, Kind.String,
|
||||
Kind.Struct, 's', ['b', Kind.Bool, false, 'i', Kind.Number, false], [],
|
||||
false, ['bar', false, '2', 'baz', false, '1', 'foo', true, '3']];
|
||||
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v:NomsMap<string, Struct> = await r.readTopLevelValue();
|
||||
const v: NomsMap<string, Struct> = await r.readTopLevelValue();
|
||||
invariant(v instanceof NomsMap);
|
||||
|
||||
assert.strictEqual(3, v.size);
|
||||
@@ -439,9 +432,9 @@ suite('Decode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const chunk = Chunk.fromString(
|
||||
`t [${Kind.Value}, ${Kind.Set}, ${Kind.Number}, false, ["0", "1", "2", "3"]]`);
|
||||
const v:NomsSet<number> = await decodeNomsValue(chunk, new DataStore(new MemoryStore()));
|
||||
const v: NomsSet<number> = await decodeNomsValue(chunk, new DataStore(new MemoryStore()));
|
||||
|
||||
const t = makeCompoundType(Kind.Set, numberType);
|
||||
const t = makeSetType(numberType);
|
||||
const s:NomsSet<number> = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3]));
|
||||
assert.isTrue(v.equals(s));
|
||||
});
|
||||
@@ -452,28 +445,23 @@ suite('Decode', () => {
|
||||
|
||||
const makeChunk = a => Chunk.fromString(`t ${JSON.stringify(a)}`);
|
||||
|
||||
// Package containing Commit def
|
||||
const packageArray = [Kind.Package, [Kind.Struct, 'Commit', [
|
||||
'value', Kind.Value, false,
|
||||
'parents', Kind.Set, [
|
||||
Kind.Ref, [
|
||||
Kind.Unresolved, 'sha1-0000000000000000000000000000000000000000','0',
|
||||
],
|
||||
], false,
|
||||
], []],[]];
|
||||
const pkgChunk = makeChunk(packageArray);
|
||||
const pkgRef = pkgChunk.ref;
|
||||
ms.put(pkgChunk);
|
||||
// struct Commit {
|
||||
// value: Value
|
||||
// parents: Set<Ref<Commit>>
|
||||
// }
|
||||
|
||||
// Commit value
|
||||
const commitChunk = makeChunk(
|
||||
[Kind.Unresolved, pkgRef.toString(), '0', Kind.Number, '1', false, []]);
|
||||
[Kind.Struct, 'Commit',
|
||||
['value', Kind.Value, false, 'parents', Kind.Set, Kind.Ref, Kind.BackRef, 0, false], [],
|
||||
Kind.Number, '1', false, []]);
|
||||
const commitRef = commitChunk.ref;
|
||||
ms.put(commitChunk);
|
||||
|
||||
// Root
|
||||
const rootChunk = makeChunk([Kind.Map, Kind.String, Kind.Ref, Kind.Unresolved,
|
||||
pkgRef.toString(), '0', false, ['counter', commitRef.toString()]]);
|
||||
const rootChunk = makeChunk([Kind.Map, Kind.String, Kind.Ref, Kind.Struct, 'Commit',
|
||||
['value', Kind.Value, false, 'parents', Kind.Set, Kind.Ref, Kind.BackRef, 0, false], [],
|
||||
false, ['counter', commitRef.toString()]]);
|
||||
const rootRef = rootChunk.ref;
|
||||
ms.put(rootChunk);
|
||||
|
||||
@@ -542,10 +530,41 @@ suite('Decode', () => {
|
||||
|
||||
const reader = v.getReader();
|
||||
assert.deepEqual(await reader.read(), {done: false, value: stringToUint8Array('hi')});
|
||||
// console.log(stringToUint8Array('world'));
|
||||
const x = await reader.read();
|
||||
// console.log(x);
|
||||
assert.deepEqual(x, {done: false, value: stringToUint8Array('world')});
|
||||
assert.deepEqual(await reader.read(), {done: true});
|
||||
});
|
||||
|
||||
test('recursive struct', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
// struct A {
|
||||
// b: struct B {
|
||||
// a: List<A>
|
||||
// b: List<B>
|
||||
// }
|
||||
// }
|
||||
|
||||
const ta = makeStructType('A', [], []);
|
||||
const tb = makeStructType('B', [], []);
|
||||
invariant(ta.desc instanceof StructDesc);
|
||||
ta.desc.fields.push(new Field('b', tb, false));
|
||||
|
||||
invariant(tb.desc instanceof StructDesc);
|
||||
const {fields} = tb.desc;
|
||||
fields.push(new Field('a', makeListType(ta), false), new Field('b', makeListType(tb), false));
|
||||
|
||||
const a = [Kind.Struct, 'A',
|
||||
['b', Kind.Struct, 'B', [
|
||||
'a', Kind.List, Kind.BackRef, 1, false,
|
||||
'b', Kind.List, Kind.BackRef, 0, false,
|
||||
], [], false], [],
|
||||
false, [], false, []];
|
||||
const r = new JsonArrayReader(a, ds);
|
||||
const v = await r.readTopLevelValue();
|
||||
|
||||
assert.isTrue(v.type.equals(ta));
|
||||
assert.isTrue(v.b.type.equals(tb));
|
||||
});
|
||||
});
|
||||
|
||||
223
js/src/decode.js
223
js/src/decode.js
@@ -14,18 +14,15 @@ import {
|
||||
getPrimitiveType,
|
||||
makeCompoundType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
makeUnresolvedType,
|
||||
StructDesc,
|
||||
Type,
|
||||
typeType,
|
||||
numberType,
|
||||
} from './type.js';
|
||||
import {indexTypeForMetaSequence, MetaTuple, newMetaSequenceFromData} from './meta-sequence.js';
|
||||
import {invariant, notNull} from './assert.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {isPrimitiveKind, Kind} from './noms-kind.js';
|
||||
import {ListLeafSequence, NomsList} from './list.js';
|
||||
import {lookupPackage, Package, readPackage} from './package.js';
|
||||
import {NomsMap, MapLeafSequence} from './map.js';
|
||||
import {NomsSet, SetLeafSequence} from './set.js';
|
||||
import {IndexedMetaSequence} from './meta-sequence.js';
|
||||
@@ -33,14 +30,6 @@ import {IndexedMetaSequence} from './meta-sequence.js';
|
||||
const typedTag = 't ';
|
||||
const blobTag = 'b ';
|
||||
|
||||
class UnresolvedPackage {
|
||||
pkgRef: Ref;
|
||||
|
||||
constructor(pkgRef: Ref) {
|
||||
this.pkgRef = pkgRef;
|
||||
}
|
||||
}
|
||||
|
||||
export class JsonArrayReader {
|
||||
_a: Array<any>;
|
||||
_i: number;
|
||||
@@ -84,6 +73,12 @@ export class JsonArrayReader {
|
||||
return v;
|
||||
}
|
||||
|
||||
readUint8(): number {
|
||||
const v = this.read();
|
||||
invariant((v & 0xff) === v);
|
||||
return v;
|
||||
}
|
||||
|
||||
readFloat(): number {
|
||||
const next = this.read();
|
||||
invariant(typeof next === 'string');
|
||||
@@ -111,26 +106,27 @@ export class JsonArrayReader {
|
||||
return Ref.parse(next);
|
||||
}
|
||||
|
||||
readTypeAsTag(): Type {
|
||||
readTypeAsTag(backRefs: Type[]): Type {
|
||||
const kind = this.readKind();
|
||||
switch (kind) {
|
||||
case Kind.List:
|
||||
case Kind.Set:
|
||||
case Kind.Ref: {
|
||||
const elemType = this.readTypeAsTag();
|
||||
const elemType = this.readTypeAsTag(backRefs);
|
||||
return makeCompoundType(kind, elemType);
|
||||
}
|
||||
case Kind.Map: {
|
||||
const keyType = this.readTypeAsTag();
|
||||
const valueType = this.readTypeAsTag();
|
||||
const keyType = this.readTypeAsTag(backRefs);
|
||||
const valueType = this.readTypeAsTag(backRefs);
|
||||
return makeCompoundType(kind, keyType, valueType);
|
||||
}
|
||||
case Kind.Type:
|
||||
return typeType;
|
||||
case Kind.Unresolved: {
|
||||
const pkgRef = this.readRef();
|
||||
const ordinal = this.readOrdinal();
|
||||
return makeType(pkgRef, ordinal);
|
||||
case Kind.Struct:
|
||||
return this.readStructType(backRefs);
|
||||
case Kind.BackRef: {
|
||||
const i = this.readUint8();
|
||||
return backRefs[backRefs.length - 1 - i];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,100 +143,65 @@ export class JsonArrayReader {
|
||||
return new BlobLeafSequence(this._ds, bytes);
|
||||
}
|
||||
|
||||
readSequence(t: Type, pkg: ?Package): Array<any> {
|
||||
readSequence(t: Type): Array<any> {
|
||||
const elemType = t.elemTypes[0];
|
||||
const list = [];
|
||||
while (!this.atEnd()) {
|
||||
const v = this.readValueWithoutTag(elemType, pkg);
|
||||
const v = this.readValueWithoutTag(elemType);
|
||||
list.push(v);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
readListLeafSequence(t: Type, pkg: ?Package): ListLeafSequence {
|
||||
const seq = this.readSequence(t, pkg);
|
||||
readListLeafSequence(t: Type): ListLeafSequence {
|
||||
const seq = this.readSequence(t);
|
||||
return new ListLeafSequence(this._ds, t, seq);
|
||||
}
|
||||
|
||||
readSetLeafSequence(t: Type, pkg: ?Package): SetLeafSequence {
|
||||
const seq = this.readSequence(t, pkg);
|
||||
readSetLeafSequence(t: Type): SetLeafSequence {
|
||||
const seq = this.readSequence(t);
|
||||
return new SetLeafSequence(this._ds, t, seq);
|
||||
}
|
||||
|
||||
readMapLeafSequence(t: Type, pkg: ?Package): MapLeafSequence {
|
||||
readMapLeafSequence(t: Type): MapLeafSequence {
|
||||
const keyType = t.elemTypes[0];
|
||||
const valueType = t.elemTypes[1];
|
||||
const entries = [];
|
||||
while (!this.atEnd()) {
|
||||
const k = this.readValueWithoutTag(keyType, pkg);
|
||||
const v = this.readValueWithoutTag(valueType, pkg);
|
||||
const k = this.readValueWithoutTag(keyType);
|
||||
const v = this.readValueWithoutTag(valueType);
|
||||
entries.push({key: k, value: v});
|
||||
}
|
||||
|
||||
return new MapLeafSequence(this._ds, t, entries);
|
||||
}
|
||||
|
||||
readMetaSequence(t: Type, pkg: ?Package): any {
|
||||
readMetaSequence(t: Type): any {
|
||||
const data: Array<MetaTuple> = [];
|
||||
const indexType = indexTypeForMetaSequence(t);
|
||||
while (!this.atEnd()) {
|
||||
const ref = this.readRef();
|
||||
const v = this.readValueWithoutTag(indexType, pkg);
|
||||
const numLeaves = this.readValueWithoutTag(numberType, pkg);
|
||||
const v = this.readValueWithoutTag(indexType);
|
||||
const numLeaves = this.readValueWithoutTag(numberType);
|
||||
data.push(new MetaTuple(ref, v, numLeaves));
|
||||
}
|
||||
|
||||
return newMetaSequenceFromData(this._ds, t, data);
|
||||
}
|
||||
|
||||
readPackage(t: Type, pkg: ?Package): Package {
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const types = [];
|
||||
while (!r2.atEnd()) {
|
||||
types.push(r2.readTypeAsValue(pkg));
|
||||
}
|
||||
|
||||
const r3 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const deps = [];
|
||||
while (!r3.atEnd()) {
|
||||
deps.push(r3.readRef());
|
||||
}
|
||||
|
||||
return new Package(types, deps);
|
||||
}
|
||||
|
||||
readRefValue(t: Type): RefValue {
|
||||
const ref = this.readRef();
|
||||
return new RefValue(ref, t);
|
||||
}
|
||||
|
||||
readTopLevelValue(): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const t = this.readTypeAsTag();
|
||||
const doRead = () => {
|
||||
const i = this._i;
|
||||
|
||||
try {
|
||||
const v = this.readValueWithoutTag(t);
|
||||
resolve(v);
|
||||
} catch (ex) {
|
||||
if (ex instanceof UnresolvedPackage) {
|
||||
readPackage(ex.pkgRef, this._ds).then(() => {
|
||||
this._i = i;
|
||||
doRead();
|
||||
});
|
||||
} else {
|
||||
reject(ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
doRead();
|
||||
});
|
||||
const t = this.readTypeAsTag([]);
|
||||
const v = this.readValueWithoutTag(t);
|
||||
return Promise.resolve(v);
|
||||
}
|
||||
|
||||
readValueWithoutTag(t: Type, pkg: ?Package = null): any {
|
||||
readValueWithoutTag(t: Type): any {
|
||||
// TODO: Verify read values match tagged kinds.
|
||||
switch (t.kind) {
|
||||
case Kind.Blob: {
|
||||
@@ -248,7 +209,7 @@ export class JsonArrayReader {
|
||||
let sequence;
|
||||
if (isMeta) {
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
sequence = r2.readMetaSequence(t, pkg);
|
||||
sequence = r2.readMetaSequence(t);
|
||||
invariant(sequence instanceof IndexedMetaSequence);
|
||||
} else {
|
||||
sequence = this.readBlobLeafSequence();
|
||||
@@ -262,67 +223,45 @@ export class JsonArrayReader {
|
||||
case Kind.String:
|
||||
return this.readString();
|
||||
case Kind.Value: {
|
||||
const t2 = this.readTypeAsTag();
|
||||
return this.readValueWithoutTag(t2, pkg);
|
||||
const t2 = this.readTypeAsTag([]);
|
||||
return this.readValueWithoutTag(t2);
|
||||
}
|
||||
case Kind.List: {
|
||||
const isMeta = this.readBool();
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const sequence = isMeta ?
|
||||
r2.readMetaSequence(t, pkg) :
|
||||
r2.readListLeafSequence(t, pkg);
|
||||
r2.readMetaSequence(t) :
|
||||
r2.readListLeafSequence(t);
|
||||
return new NomsList(t, sequence);
|
||||
}
|
||||
case Kind.Map: {
|
||||
const isMeta = this.readBool();
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const sequence = isMeta ?
|
||||
r2.readMetaSequence(t, pkg) :
|
||||
r2.readMapLeafSequence(t, pkg);
|
||||
r2.readMetaSequence(t) :
|
||||
r2.readMapLeafSequence(t);
|
||||
return new NomsMap(t, sequence);
|
||||
}
|
||||
case Kind.Package:
|
||||
return this.readPackage(t, pkg);
|
||||
case Kind.Ref:
|
||||
return this.readRefValue(t);
|
||||
case Kind.Set: {
|
||||
const isMeta = this.readBool();
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const sequence = isMeta ?
|
||||
r2.readMetaSequence(t, pkg) :
|
||||
r2.readSetLeafSequence(t, pkg);
|
||||
r2.readMetaSequence(t) :
|
||||
r2.readSetLeafSequence(t);
|
||||
return new NomsSet(t, sequence);
|
||||
}
|
||||
case Kind.Struct:
|
||||
throw new Error('Not allowed');
|
||||
return this.readStruct(t);
|
||||
case Kind.Type:
|
||||
return this.readTypeAsValue(pkg);
|
||||
case Kind.Unresolved:
|
||||
return this.readUnresolvedKindToValue(t, pkg);
|
||||
return this.readTypeAsValue([]);
|
||||
}
|
||||
|
||||
throw new Error('Unreached');
|
||||
}
|
||||
|
||||
readUnresolvedKindToValue(t: Type, pkg: ?Package = null): any {
|
||||
const pkgRef = t.packageRef;
|
||||
const ordinal = t.ordinal;
|
||||
if (!pkgRef.isEmpty()) {
|
||||
pkg = lookupPackage(pkgRef);
|
||||
if (!pkg) {
|
||||
throw new UnresolvedPackage(pkgRef);
|
||||
}
|
||||
invariant(pkg);
|
||||
}
|
||||
|
||||
pkg = notNull(pkg);
|
||||
const typeDef = pkg.types[ordinal];
|
||||
|
||||
invariant(typeDef.kind === Kind.Struct);
|
||||
return this.readStruct(typeDef, t, pkg);
|
||||
}
|
||||
|
||||
readTypeAsValue(pkg: ?Package): Type {
|
||||
readTypeAsValue(backRefs: Type[]): Type {
|
||||
const k = this.readKind();
|
||||
|
||||
switch (k) {
|
||||
@@ -333,48 +272,24 @@ export class JsonArrayReader {
|
||||
const r2 = new JsonArrayReader(this.readArray(), this._ds);
|
||||
const elemTypes: Array<Type> = [];
|
||||
while (!r2.atEnd()) {
|
||||
elemTypes.push(r2.readTypeAsValue());
|
||||
elemTypes.push(r2.readTypeAsValue(backRefs));
|
||||
}
|
||||
|
||||
return makeCompoundType(k, ...elemTypes);
|
||||
}
|
||||
case Kind.Struct: {
|
||||
const name = this.readString();
|
||||
const readFields = () => {
|
||||
const fields: Array<Field> = [];
|
||||
const fieldReader = new JsonArrayReader(this.readArray(), this._ds);
|
||||
while (!fieldReader.atEnd()) {
|
||||
const fieldName = fieldReader.readString();
|
||||
const fieldType = fieldReader.readTypeAsValue(pkg);
|
||||
const optional = fieldReader.readBool();
|
||||
fields.push(new Field(fieldName, fieldType, optional));
|
||||
}
|
||||
return fields;
|
||||
};
|
||||
case Kind.Struct:
|
||||
return this.readStructType(backRefs);
|
||||
|
||||
const fields = readFields();
|
||||
const choices = readFields();
|
||||
return makeStructType(name, fields, choices);
|
||||
}
|
||||
case Kind.Unresolved: {
|
||||
const pkgRef = this.readRef();
|
||||
const ordinal = this.readOrdinal();
|
||||
if (ordinal === -1) {
|
||||
const namespace = this.readString();
|
||||
const name = this.readString();
|
||||
return makeUnresolvedType(namespace, name);
|
||||
}
|
||||
|
||||
return makeType(pkgRef, ordinal);
|
||||
}
|
||||
case Kind.BackRef:
|
||||
throw new Error('not reachable');
|
||||
}
|
||||
|
||||
invariant(isPrimitiveKind(k));
|
||||
return getPrimitiveType(k);
|
||||
}
|
||||
|
||||
readStruct<T: Struct>(typeDef: Type, type: Type, pkg: Package): T {
|
||||
const desc = typeDef.desc;
|
||||
readStruct<T: Struct>(type: Type): T {
|
||||
const desc = type.desc;
|
||||
invariant(desc instanceof StructDesc);
|
||||
|
||||
const data: {[key: string]: any} = Object.create(null);
|
||||
@@ -384,11 +299,11 @@ export class JsonArrayReader {
|
||||
if (field.optional) {
|
||||
const b = this.readBool();
|
||||
if (b) {
|
||||
const v = this.readValueWithoutTag(field.t, pkg);
|
||||
const v = this.readValueWithoutTag(field.t);
|
||||
data[field.name] = v;
|
||||
}
|
||||
} else {
|
||||
const v = this.readValueWithoutTag(field.t, pkg);
|
||||
const v = this.readValueWithoutTag(field.t);
|
||||
data[field.name] = v;
|
||||
}
|
||||
}
|
||||
@@ -397,11 +312,39 @@ export class JsonArrayReader {
|
||||
if (desc.union.length > 0) {
|
||||
unionIndex = this.readUint();
|
||||
const unionField = desc.union[unionIndex];
|
||||
const v = this.readValueWithoutTag(unionField.t, pkg);
|
||||
const v = this.readValueWithoutTag(unionField.t);
|
||||
data[unionField.name] = v;
|
||||
}
|
||||
|
||||
return newStruct(type, typeDef, data);
|
||||
return newStruct(type, data);
|
||||
}
|
||||
|
||||
readStructType(backRefs: Type[]): Type {
|
||||
const name = this.readString();
|
||||
const fields = [];
|
||||
const choices = [];
|
||||
const structType = makeStructType(name, fields, choices);
|
||||
backRefs = backRefs.concat(structType); // needs to be a copy.
|
||||
const readFields = () => {
|
||||
const fields: Array<Field> = [];
|
||||
const fieldReader = new JsonArrayReader(this.readArray(), this._ds);
|
||||
while (!fieldReader.atEnd()) {
|
||||
const fieldName = fieldReader.readString();
|
||||
const fieldType = fieldReader.readTypeAsTag(backRefs);
|
||||
const optional = fieldReader.readBool();
|
||||
fields.push(new Field(fieldName, fieldType, optional));
|
||||
}
|
||||
return fields;
|
||||
};
|
||||
|
||||
const newFields = readFields();
|
||||
const newChoices = readFields();
|
||||
|
||||
// Update the existing structType to keep object identity.
|
||||
invariant(structType.desc instanceof StructDesc);
|
||||
structType.desc.fields = newFields;
|
||||
structType.desc.union = newChoices;
|
||||
return structType;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import {suite, test} from 'mocha';
|
||||
import {assert} from 'chai';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import {
|
||||
boolType,
|
||||
Field,
|
||||
@@ -10,16 +9,16 @@ import {
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
stringType,
|
||||
numberType,
|
||||
valueType,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
import {defToNoms} from './defs.js';
|
||||
import {newList} from './list.js';
|
||||
import {newStruct} from './struct.js';
|
||||
import {newSet} from './set.js';
|
||||
import {newMap} from './map.js';
|
||||
import {emptyRef} from './ref.js';
|
||||
import {ValueBase} from './value.js';
|
||||
import {invariant} from './assert.js';
|
||||
|
||||
@@ -92,17 +91,12 @@ suite('defs', () => {
|
||||
});
|
||||
|
||||
test('struct', async () => {
|
||||
let typeDef;
|
||||
const pkg = new Package([
|
||||
typeDef = makeStructType('Struct', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('s', stringType, false),
|
||||
], []),
|
||||
const type = makeStructType('Struct', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('s', stringType, false),
|
||||
], []);
|
||||
registerPackage(pkg);
|
||||
const type = makeType(pkg.ref, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {
|
||||
const s1 = newStruct(type, {
|
||||
b: true,
|
||||
s: 'hi',
|
||||
});
|
||||
@@ -115,17 +109,12 @@ suite('defs', () => {
|
||||
});
|
||||
|
||||
test('struct with list', async () => {
|
||||
let typeDef;
|
||||
const listOfNumberType = makeListType(numberType);
|
||||
const pkg = new Package([
|
||||
typeDef = makeStructType('StructWithList', [
|
||||
new Field('l', listOfNumberType, false),
|
||||
], []),
|
||||
const type = makeStructType('StructWithList', [
|
||||
new Field('l', listOfNumberType, false),
|
||||
], []);
|
||||
registerPackage(pkg);
|
||||
const type = makeType(pkg.ref, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {
|
||||
const s1 = newStruct(type, {
|
||||
l: await newList([0, 1, 2, 3], listOfNumberType),
|
||||
});
|
||||
|
||||
@@ -138,55 +127,44 @@ suite('defs', () => {
|
||||
});
|
||||
|
||||
test('list of struct', async () => {
|
||||
let typeDef;
|
||||
const pkg = new Package([
|
||||
typeDef = makeStructType('Struct', [
|
||||
new Field('i', numberType, false),
|
||||
], []),
|
||||
const structType = makeStructType('Struct', [
|
||||
new Field('i', numberType, false),
|
||||
], []);
|
||||
registerPackage(pkg);
|
||||
const structType = makeType(pkg.ref, 0);
|
||||
const listType = makeListType(structType);
|
||||
|
||||
const l1 = await newList([
|
||||
newStruct(structType, typeDef, {i: 1}),
|
||||
newStruct(structType, typeDef, {i: 2}),
|
||||
newStruct(structType, {i: 1}),
|
||||
newStruct(structType, {i: 2}),
|
||||
], listType);
|
||||
|
||||
const l2 = await defToNoms([{i: 1}, {i: 2}], listType);
|
||||
invariant(l2 instanceof ValueBase);
|
||||
assert.isTrue(l1.equals(l2));
|
||||
|
||||
const l3 = await defToNoms([newStruct(structType, typeDef, {i: 1}), {i: 2}], listType);
|
||||
const l3 = await defToNoms([newStruct(structType, {i: 1}), {i: 2}], listType);
|
||||
invariant(l3 instanceof ValueBase);
|
||||
assert.isTrue(l1.equals(l3));
|
||||
});
|
||||
|
||||
test('recursive struct', async () => {
|
||||
const pkg = new Package([
|
||||
makeStructType('Struct', [
|
||||
new Field('children', makeListType(makeType(emptyRef, 0)), false),
|
||||
], []),
|
||||
const type = makeStructType('Struct', [
|
||||
new Field('children', valueType /* placeholder */, false),
|
||||
], []);
|
||||
registerPackage(pkg);
|
||||
const type = makeType(pkg.ref, 0);
|
||||
const typeDef = makeStructType('Struct', [
|
||||
new Field('children', makeListType(makeType(pkg.ref, 0)), false),
|
||||
], []);
|
||||
|
||||
const listType = makeListType(type);
|
||||
invariant(type.desc instanceof StructDesc);
|
||||
type.desc.fields[0].t = listType;
|
||||
|
||||
const a = await newList([], listType);
|
||||
const b = await newList([], listType);
|
||||
const x = newStruct(type, typeDef, {
|
||||
const x = newStruct(type, {
|
||||
children: a,
|
||||
});
|
||||
const y = newStruct(type, typeDef, {
|
||||
const y = newStruct(type, {
|
||||
children: b,
|
||||
});
|
||||
const c = await newList([x, y], listType);
|
||||
|
||||
const t1 = newStruct(type, typeDef, {
|
||||
const t1 = newStruct(type, {
|
||||
children: c,
|
||||
});
|
||||
|
||||
|
||||
@@ -2,23 +2,21 @@
|
||||
|
||||
import type {valueOrPrimitive} from './value.js';
|
||||
import {ValueBase} from './value.js';
|
||||
import {Type, CompoundDesc, StructDesc, makeType} from './type.js';
|
||||
import {Type, CompoundDesc, StructDesc} from './type.js';
|
||||
import type {Field} from './type.js';
|
||||
import {invariant, notNull} from './assert.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {newList} from './list.js';
|
||||
import {newSet} from './set.js';
|
||||
import {newMap} from './map.js';
|
||||
import {newBlob} from './blob.js';
|
||||
import {lookupPackage} from './package.js';
|
||||
import type {Package} from './package.js';
|
||||
import type Struct from './struct.js';
|
||||
import {newStruct} from './struct.js';
|
||||
|
||||
type StructDefType = {[name: string]: DefType};
|
||||
type DefType = number | string | boolean | Array<DefType> | StructDefType | Uint8Array | ValueBase;
|
||||
|
||||
export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<valueOrPrimitive> {
|
||||
export async function defToNoms(v: DefType, t: Type): Promise<valueOrPrimitive> {
|
||||
switch (typeof v) {
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
@@ -41,7 +39,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<val
|
||||
invariant(v instanceof Array);
|
||||
invariant(t.desc instanceof CompoundDesc);
|
||||
const vt = t.desc.elemTypes[0];
|
||||
const vs = await Promise.all(v.map(e => defToNoms(e, vt, pkg)));
|
||||
const vs = await Promise.all(v.map(e => defToNoms(e, vt)));
|
||||
return newList(vs, t);
|
||||
}
|
||||
|
||||
@@ -49,7 +47,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<val
|
||||
invariant(v instanceof Array);
|
||||
invariant(t.desc instanceof CompoundDesc);
|
||||
const vt = t.desc.elemTypes[0];
|
||||
const vs = await Promise.all(v.map(e => defToNoms(e, vt, pkg)));
|
||||
const vs = await Promise.all(v.map(e => defToNoms(e, vt)));
|
||||
return newSet(vs, t);
|
||||
}
|
||||
|
||||
@@ -57,7 +55,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<val
|
||||
invariant(v instanceof Array);
|
||||
invariant(t.desc instanceof CompoundDesc);
|
||||
const ets = t.desc.elemTypes;
|
||||
const vs = await Promise.all(v.map((e, i) => defToNoms(e, ets[i % 2], pkg)));
|
||||
const vs = await Promise.all(v.map((e, i) => defToNoms(e, ets[i % 2])));
|
||||
return newMap(vs, t);
|
||||
}
|
||||
|
||||
@@ -65,16 +63,9 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<val
|
||||
invariant(v instanceof Uint8Array);
|
||||
return newBlob(v);
|
||||
|
||||
case Kind.Unresolved: {
|
||||
if (t.hasPackageRef) {
|
||||
pkg = lookupPackage(t.packageRef);
|
||||
} else {
|
||||
t = makeType(notNull(pkg).ref, t.ordinal);
|
||||
}
|
||||
const typeDef = notNull(pkg).types[t.ordinal];
|
||||
invariant(typeDef.kind === Kind.Struct);
|
||||
case Kind.Struct: {
|
||||
invariant(v instanceof Object);
|
||||
return structDefToNoms(v, t, typeDef, pkg);
|
||||
return structDefToNoms(v, t);
|
||||
}
|
||||
|
||||
default:
|
||||
@@ -82,9 +73,8 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise<val
|
||||
}
|
||||
}
|
||||
|
||||
async function structDefToNoms<T: Struct>(data: StructDefType, type: Type, typeDef: Type,
|
||||
pkg: ?Package): Promise<T> {
|
||||
const {desc} = typeDef;
|
||||
async function structDefToNoms<T: Struct>(data: StructDefType, type: Type): Promise<T> {
|
||||
const {desc} = type;
|
||||
invariant(desc instanceof StructDesc);
|
||||
const keys = [];
|
||||
const ps: Array<Promise<valueOrPrimitive>> = [];
|
||||
@@ -92,7 +82,7 @@ async function structDefToNoms<T: Struct>(data: StructDefType, type: Type, typeD
|
||||
const v = data[f.name];
|
||||
if (v !== undefined) {
|
||||
keys.push(f.name);
|
||||
ps.push(defToNoms(v, f.t, pkg));
|
||||
ps.push(defToNoms(v, f.t));
|
||||
}
|
||||
};
|
||||
desc.fields.forEach(add);
|
||||
@@ -103,5 +93,5 @@ async function structDefToNoms<T: Struct>(data: StructDefType, type: Type, typeD
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
newData[keys[i]] = vals[i];
|
||||
}
|
||||
return newStruct(type, typeDef, newData);
|
||||
return newStruct(type, newData);
|
||||
}
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {ValueBase} from './value.js';
|
||||
|
||||
export default function describeType(v: any): string {
|
||||
const t = typeof v;
|
||||
if (t === 'object') {
|
||||
if (v === null) {
|
||||
return 'null';
|
||||
}
|
||||
if (v instanceof ValueBase) {
|
||||
return v.type.describe();
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
122
js/src/encode-human-readable-test.js
Normal file
122
js/src/encode-human-readable-test.js
Normal file
@@ -0,0 +1,122 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {TypeWriter} from './encode-human-readable.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {
|
||||
blobType,
|
||||
boolType,
|
||||
Field,
|
||||
numberType,
|
||||
makeRefType,
|
||||
makeListType,
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
stringType,
|
||||
valueType,
|
||||
Type,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
|
||||
suite('Encode human readable types', () => {
|
||||
function assertWriteType(expected: string, t: Type) {
|
||||
let actual = '';
|
||||
const w = {
|
||||
write(s: string) {
|
||||
actual += s;
|
||||
},
|
||||
};
|
||||
const tw = new TypeWriter(w);
|
||||
tw.writeType(t);
|
||||
assert.equal(actual, expected);
|
||||
}
|
||||
|
||||
test('primitives', () => {
|
||||
assertWriteType('Bool', boolType);
|
||||
assertWriteType('Blob', blobType);
|
||||
assertWriteType('String', stringType);
|
||||
assertWriteType('Number', numberType);
|
||||
});
|
||||
|
||||
test('compound', () => {
|
||||
assertWriteType('List<Number>', makeListType(numberType));
|
||||
assertWriteType('Set<Number>', makeSetType(numberType));
|
||||
assertWriteType('Ref<Number>', makeRefType(numberType));
|
||||
assertWriteType('Map<Number, String>', makeMapType(numberType, stringType));
|
||||
});
|
||||
|
||||
test('struct', () => {
|
||||
const type = makeStructType('S1', [
|
||||
new Field('x', numberType, false),
|
||||
new Field('y', numberType, true),
|
||||
], []);
|
||||
assertWriteType('struct S1 {\n x: Number\n y: optional Number\n}', type);
|
||||
});
|
||||
|
||||
test('struct with union', () => {
|
||||
const type = makeStructType('S1', [], [
|
||||
new Field('x', numberType, false),
|
||||
new Field('y', numberType, true),
|
||||
]);
|
||||
assertWriteType('struct S1 {\n union {\n x: Number\n y: optional Number\n }\n}', type);
|
||||
});
|
||||
|
||||
test('list of struct', () => {
|
||||
const type = makeStructType('S3', [
|
||||
new Field('x', numberType, false),
|
||||
], []);
|
||||
assertWriteType('List<struct S3 {\n x: Number\n}>', makeListType(type));
|
||||
});
|
||||
|
||||
test('recursive struct', () => {
|
||||
// struct A {
|
||||
// b: A
|
||||
// c: List<A>
|
||||
// d: struct D {
|
||||
// e: D
|
||||
// f: A
|
||||
// }
|
||||
// }
|
||||
|
||||
const a = makeStructType('A', [
|
||||
new Field('b', valueType /* placeholder */, false),
|
||||
new Field('c', valueType /* placeholder */, false),
|
||||
new Field('d', valueType /* placeholder */, false),
|
||||
], []);
|
||||
const d = makeStructType('D', [
|
||||
new Field('e', valueType /* placeholder */, false),
|
||||
new Field('f', a, false),
|
||||
], []);
|
||||
const aDesc = a.desc;
|
||||
invariant(aDesc instanceof StructDesc);
|
||||
const dDesc = d.desc;
|
||||
invariant(dDesc instanceof StructDesc);
|
||||
aDesc.fields[0].t = a;
|
||||
aDesc.fields[2].t = d;
|
||||
dDesc.fields[0].t = d;
|
||||
dDesc.fields[1].t = a;
|
||||
aDesc.fields[1].t = makeListType(a);
|
||||
|
||||
|
||||
assertWriteType(`struct A {
|
||||
b: BackRef<0>
|
||||
c: List<BackRef<0>>
|
||||
d: struct D {
|
||||
e: BackRef<0>
|
||||
f: BackRef<1>
|
||||
}
|
||||
}`, a);
|
||||
|
||||
assertWriteType(`struct D {
|
||||
e: BackRef<0>
|
||||
f: struct A {
|
||||
b: BackRef<0>
|
||||
c: List<BackRef<0>>
|
||||
d: BackRef<1>
|
||||
}
|
||||
}`, d);
|
||||
});
|
||||
});
|
||||
186
js/src/encode-human-readable.js
Normal file
186
js/src/encode-human-readable.js
Normal file
@@ -0,0 +1,186 @@
|
||||
// @flow
|
||||
|
||||
import {StructDesc, BackRefDesc, CompoundDesc} from './type.js';
|
||||
import type {Field, Type} from './type.js';
|
||||
import {Kind, kindToString} from './noms-kind.js';
|
||||
import type {NomsKind} from './noms-kind.js';
|
||||
import {invariant} from './assert.js';
|
||||
import type {Value} from './value.js';
|
||||
import {ValueBase} from './value.js';
|
||||
|
||||
export interface StringWriter {
|
||||
write(s: string): void;
|
||||
}
|
||||
|
||||
class Writer {
|
||||
ind: number;
|
||||
w: StringWriter;
|
||||
lineLength: number;
|
||||
|
||||
constructor(w: StringWriter) {
|
||||
this.ind = 0;
|
||||
this.w = w;
|
||||
this.lineLength = 0;
|
||||
}
|
||||
|
||||
maybeWriteIndentation() {
|
||||
if (this.lineLength === 0) {
|
||||
for (let i = 0; i < this.ind; i++) {
|
||||
this.w.write(' ');
|
||||
}
|
||||
this.lineLength = 2 * this.ind;
|
||||
}
|
||||
}
|
||||
|
||||
write(s: string) {
|
||||
this.maybeWriteIndentation();
|
||||
this.w.write(s);
|
||||
this.lineLength += s.length;
|
||||
}
|
||||
|
||||
indent() {
|
||||
this.ind++;
|
||||
}
|
||||
|
||||
outdent() {
|
||||
this.ind--;
|
||||
}
|
||||
|
||||
newLine() {
|
||||
this.write('\n');
|
||||
this.lineLength = 0;
|
||||
}
|
||||
|
||||
writeKind(k: NomsKind) {
|
||||
this.write(kindToString(k));
|
||||
}
|
||||
}
|
||||
|
||||
export class TypeWriter {
|
||||
_w: Writer;
|
||||
|
||||
constructor(w: StringWriter) {
|
||||
this._w = new Writer(w);
|
||||
}
|
||||
|
||||
writeType(t: Type) {
|
||||
this._writeType(t, []);
|
||||
}
|
||||
|
||||
_writeType(t: Type, backRefs: Type[]) {
|
||||
switch (t.kind) {
|
||||
case Kind.Blob:
|
||||
case Kind.Bool:
|
||||
case Kind.Number:
|
||||
case Kind.String:
|
||||
case Kind.Type:
|
||||
case Kind.Value:
|
||||
this._w.writeKind(t.kind);
|
||||
break;
|
||||
case Kind.List:
|
||||
case Kind.Ref:
|
||||
case Kind.Set:
|
||||
this._w.writeKind(t.kind);
|
||||
this._w.write('<');
|
||||
invariant(t.desc instanceof CompoundDesc);
|
||||
this._writeType(t.desc.elemTypes[0], backRefs);
|
||||
this._w.write('>');
|
||||
break;
|
||||
case Kind.Map: {
|
||||
this._w.writeKind(t.kind);
|
||||
this._w.write('<');
|
||||
invariant(t.desc instanceof CompoundDesc);
|
||||
const [keyType, valueType] = t.desc.elemTypes;
|
||||
this._writeType(keyType, backRefs);
|
||||
this._w.write(', ');
|
||||
this._writeType(valueType, backRefs);
|
||||
this._w.write('>');
|
||||
break;
|
||||
}
|
||||
case Kind.Struct:
|
||||
this._writeStructType(t, backRefs);
|
||||
break;
|
||||
case Kind.BackRef:
|
||||
invariant(t.desc instanceof BackRefDesc);
|
||||
this._writeBackRef(t.desc.value);
|
||||
break;
|
||||
default:
|
||||
throw new Error('unreachable');
|
||||
}
|
||||
}
|
||||
|
||||
_writeBackRef(i: number) {
|
||||
this._w.write(`BackRef<${i}>`);
|
||||
}
|
||||
|
||||
_writeStructType(t: Type, backRefs: Type[]) {
|
||||
const idx = backRefs.indexOf(t);
|
||||
if (idx !== -1) {
|
||||
this._writeBackRef(backRefs.length - idx - 1);
|
||||
return;
|
||||
}
|
||||
backRefs = backRefs.concat(t);
|
||||
|
||||
const desc = t.desc;
|
||||
invariant(desc instanceof StructDesc);
|
||||
this._w.write('struct ');
|
||||
this._w.write(desc.name);
|
||||
this._w.write(' {');
|
||||
this._w.indent();
|
||||
let i = 0;
|
||||
const writeField = (f: Field) => {
|
||||
if (i === 0) {
|
||||
this._w.newLine();
|
||||
}
|
||||
this._w.write(f.name);
|
||||
this._w.write(': ');
|
||||
if (f.optional) {
|
||||
this._w.write('optional ');
|
||||
}
|
||||
this._writeType(f.t, backRefs);
|
||||
this._w.newLine();
|
||||
i++;
|
||||
};
|
||||
desc.fields.forEach(writeField);
|
||||
|
||||
if (desc.union.length > 0) {
|
||||
if (i === 0) {
|
||||
this._w.newLine();
|
||||
i++;
|
||||
}
|
||||
this._w.write('union {');
|
||||
this._w.indent();
|
||||
i = 0;
|
||||
desc.union.forEach(writeField);
|
||||
this._w.outdent();
|
||||
this._w.write('}');
|
||||
this._w.newLine();
|
||||
}
|
||||
this._w.outdent();
|
||||
this._w.write('}');
|
||||
}
|
||||
}
|
||||
|
||||
export function describeType(t: Type) {
|
||||
let s = '';
|
||||
const w = new TypeWriter({
|
||||
write(s2: string) {
|
||||
s += s2;
|
||||
},
|
||||
});
|
||||
w.writeType(t);
|
||||
return s;
|
||||
}
|
||||
|
||||
export function describeTypeOfValue(v: Value) {
|
||||
const t = typeof v;
|
||||
if (t === 'object') {
|
||||
if (v === null) {
|
||||
return 'null';
|
||||
}
|
||||
if (v instanceof ValueBase) {
|
||||
return describeType(v.type);
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import MemoryStore from './memory-store.js';
|
||||
import {default as Ref, emptyRef} from './ref.js';
|
||||
import Ref from './ref.js';
|
||||
import RefValue from './ref-value.js';
|
||||
import {newStruct} from './struct.js';
|
||||
import type {NomsKind} from './noms-kind.js';
|
||||
@@ -18,20 +18,20 @@ import {
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
stringType,
|
||||
Type,
|
||||
valueType,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
import {IndexedMetaSequence, MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {newList, ListLeafSequence, NomsList} from './list.js';
|
||||
import {newMap, MapLeafSequence, NomsMap} from './map.js';
|
||||
import {newSet, NomsSet, SetLeafSequence} from './set.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import {newBlob} from './blob.js';
|
||||
import DataStore from './data-store.js';
|
||||
import {invariant} from './assert.js';
|
||||
|
||||
suite('Encode', () => {
|
||||
test('write primitives', () => {
|
||||
@@ -189,16 +189,11 @@ suite('Encode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const w = new JsonArrayWriter(ds);
|
||||
|
||||
const typeDef = makeStructType('S', [], []);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const v = newStruct(type, typeDef, {});
|
||||
const type = makeStructType('S', [], []);
|
||||
const v = newStruct(type, {});
|
||||
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0'], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S', [], []], w.array);
|
||||
});
|
||||
|
||||
test('write struct', async() => {
|
||||
@@ -206,19 +201,16 @@ suite('Encode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const w = new JsonArrayWriter(ds);
|
||||
|
||||
const typeDef = makeStructType('S', [
|
||||
const type = makeStructType('S', [
|
||||
new Field('x', numberType, false),
|
||||
new Field('b', boolType, false),
|
||||
], []);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const v = newStruct(type, typeDef, {x: 42, b: true});
|
||||
const v = newStruct(type, {x: 42, b: true});
|
||||
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', true], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S', ['x', Kind.Number, false, 'b', Kind.Bool, false], [],
|
||||
'42', true], w.array);
|
||||
});
|
||||
|
||||
test('write struct optional field', async() => {
|
||||
@@ -226,23 +218,21 @@ suite('Encode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
let w = new JsonArrayWriter(ds);
|
||||
|
||||
const typeDef = makeStructType('S', [
|
||||
const type = makeStructType('S', [
|
||||
new Field('x', numberType, true),
|
||||
new Field('b', boolType, false),
|
||||
], []);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
let v = newStruct(type, typeDef, {x: 42, b: true});
|
||||
let v = newStruct(type, {x: 42, b: true});
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', true, '42', true], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S',
|
||||
['x', Kind.Number, true, 'b', Kind.Bool, false], [], true, '42', true], w.array);
|
||||
|
||||
v = newStruct(type, typeDef, {b: true});
|
||||
v = newStruct(type, {b: true});
|
||||
w = new JsonArrayWriter(ds);
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, true], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S', ['x', Kind.Number, true, 'b', Kind.Bool, false], [],
|
||||
false, true], w.array);
|
||||
});
|
||||
|
||||
test('write struct with union', async() => {
|
||||
@@ -250,25 +240,25 @@ suite('Encode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
let w = new JsonArrayWriter(ds);
|
||||
|
||||
const typeDef = makeStructType('S', [
|
||||
const type = makeStructType('S', [
|
||||
new Field('x', numberType, false),
|
||||
], [
|
||||
new Field('b', boolType, false),
|
||||
new Field('s', stringType, false),
|
||||
]);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
let v = newStruct(type, typeDef, {x: 42, s: 'hi'});
|
||||
let v = newStruct(type, {x: 42, s: 'hi'});
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '1', 'hi'], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S',
|
||||
['x', Kind.Number, false], ['b', Kind.Bool, false, 's', Kind.String, false], '42', '1', 'hi'],
|
||||
w.array);
|
||||
|
||||
v = newStruct(type, typeDef, {x: 42, b: true});
|
||||
v = newStruct(type, {x: 42, b: true});
|
||||
w = new JsonArrayWriter(ds);
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '0', true], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S',
|
||||
['x', Kind.Number, false], ['b', Kind.Bool, false, 's', Kind.String, false], '42', '0', true],
|
||||
w.array);
|
||||
});
|
||||
|
||||
test('write struct with list', async() => {
|
||||
@@ -277,23 +267,20 @@ suite('Encode', () => {
|
||||
let w = new JsonArrayWriter(ds);
|
||||
|
||||
const ltr = makeCompoundType(Kind.List, stringType);
|
||||
const typeDef = makeStructType('S', [
|
||||
const type = makeStructType('S', [
|
||||
new Field('l', ltr, false),
|
||||
], []);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
let v = newStruct(type, typeDef, {l: new NomsList(ltr,
|
||||
new ListLeafSequence(ds, ltr, ['a', 'b']))});
|
||||
let v = newStruct(type, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, ['a', 'b']))});
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, ['a', 'b']], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S',
|
||||
['l', Kind.List, Kind.String, false], [], false, ['a', 'b']], w.array);
|
||||
|
||||
v = newStruct(type, typeDef, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, []))});
|
||||
v = newStruct(type, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, []))});
|
||||
w = new JsonArrayWriter(ds);
|
||||
w.writeTopLevel(type, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, []], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S', ['l', Kind.List, Kind.String, false], [], false, []],
|
||||
w.array);
|
||||
});
|
||||
|
||||
test('write struct with struct', async () => {
|
||||
@@ -301,30 +288,24 @@ suite('Encode', () => {
|
||||
const ds = new DataStore(ms);
|
||||
const w = new JsonArrayWriter(ds);
|
||||
|
||||
const s2TypeDef = makeStructType('S2', [
|
||||
const s2Type = makeStructType('S2', [
|
||||
new Field('x', numberType, false),
|
||||
], []);
|
||||
let sTypeDef = makeStructType('S', [
|
||||
new Field('s', makeType(emptyRef, 0), false),
|
||||
const sType = makeStructType('S', [
|
||||
new Field('s', s2Type, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([s2TypeDef, sTypeDef], []);
|
||||
registerPackage(pkg);
|
||||
sTypeDef = pkg.types[1];
|
||||
const pkgRef = pkg.ref;
|
||||
const s2Type = makeType(pkgRef, 0);
|
||||
const sType = makeType(pkgRef, 1);
|
||||
|
||||
const v = newStruct(sType, sTypeDef, {s: newStruct(s2Type, s2TypeDef, {x: 42})});
|
||||
const v = newStruct(sType, {s: newStruct(s2Type, {x: 42})});
|
||||
w.writeTopLevel(sType, v);
|
||||
assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '1', '42'], w.array);
|
||||
assert.deepEqual([Kind.Struct, 'S',
|
||||
['s', Kind.Struct, 'S2', ['x', Kind.Number, false], [], false], [], '42'], w.array);
|
||||
});
|
||||
|
||||
test('write compound list', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const w = new JsonArrayWriter(ds);
|
||||
const ltr = makeCompoundType(Kind.List, numberType);
|
||||
const ltr = makeListType(numberType);
|
||||
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0]))).targetRef;
|
||||
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [1, 2]))).targetRef;
|
||||
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [3, 4, 5]))).targetRef;
|
||||
@@ -347,7 +328,7 @@ suite('Encode', () => {
|
||||
const test = (expected: Array<any>, v: Type) => {
|
||||
const w = new JsonArrayWriter(ds);
|
||||
w.writeTopLevel(v.type, v);
|
||||
assert.deepEqual(expected, w.array);
|
||||
assert.deepEqual(w.array, expected);
|
||||
};
|
||||
|
||||
test([Kind.Type, Kind.Number], numberType);
|
||||
@@ -366,18 +347,29 @@ suite('Encode', () => {
|
||||
new Field('v', valueType, false),
|
||||
]));
|
||||
|
||||
const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567');
|
||||
test([Kind.Type, Kind.Unresolved, pkgRef.toString(), '123'], makeType(pkgRef, 123));
|
||||
|
||||
test([Kind.Type, Kind.Struct, 'S',
|
||||
['e', Kind.Unresolved, pkgRef.toString(), '123', false, 'x', Kind.Number, false], []],
|
||||
['e', Kind.Bool, true, 'x', Kind.Number, false], []],
|
||||
makeStructType('S', [
|
||||
new Field('e', makeType(pkgRef, 123), false),
|
||||
new Field('e', boolType, true),
|
||||
new Field('x', numberType, false),
|
||||
], []));
|
||||
|
||||
// test([Kind.Type, Kind.Unresolved, new Ref().toString(), -1, 'ns', 'n'],
|
||||
// makeUnresolvedType('ns', 'n'));
|
||||
|
||||
// struct A6 {
|
||||
// v: Number
|
||||
// cs: List<A6>
|
||||
// }
|
||||
|
||||
const st = makeStructType('A6', [
|
||||
new Field('v', numberType, false),
|
||||
new Field('cs', valueType /* placeholder */, false),
|
||||
], []);
|
||||
const lt = makeListType(st);
|
||||
invariant(st.desc instanceof StructDesc);
|
||||
st.desc.fields[1].t = lt;
|
||||
|
||||
test([Kind.Type, Kind.Struct, 'A6',
|
||||
['v', Kind.Number, false, 'cs', Kind.List, Kind.BackRef, 0, false], []], st);
|
||||
});
|
||||
|
||||
test('top level blob', async () => {
|
||||
|
||||
244
js/src/encode.js
244
js/src/encode.js
@@ -2,24 +2,22 @@
|
||||
|
||||
import Chunk from './chunk.js';
|
||||
import type Ref from './ref.js';
|
||||
import {emptyRef} from './ref.js';
|
||||
import RefValue from './ref-value.js';
|
||||
import {default as Struct, StructMirror} from './struct.js';
|
||||
import type DataStore from './data-store.js';
|
||||
import type {NomsKind} from './noms-kind.js';
|
||||
import {encode as encodeBase64} from './base64.js';
|
||||
import {boolType, stringType, StructDesc, Type, typeType, numberType} from './type.js';
|
||||
import {StructDesc, Type, numberType, getTypeOfValue} from './type.js';
|
||||
import {indexTypeForMetaSequence, MetaTuple} from './meta-sequence.js';
|
||||
import {invariant, notNull} from './assert.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {isPrimitiveKind, Kind} from './noms-kind.js';
|
||||
import {ListLeafSequence, NomsList} from './list.js';
|
||||
import {lookupPackage, Package} from './package.js';
|
||||
import {MapLeafSequence, NomsMap} from './map.js';
|
||||
import {NomsSet, SetLeafSequence} from './set.js';
|
||||
import {Sequence} from './sequence.js';
|
||||
import {setEncodeNomsValue} from './get-ref.js';
|
||||
import {NomsBlob, BlobLeafSequence} from './blob.js';
|
||||
import describeType from './describe-type.js';
|
||||
import {describeTypeOfValue} from './encode-human-readable.js';
|
||||
|
||||
const typedTag = 't ';
|
||||
|
||||
@@ -52,6 +50,10 @@ export class JsonArrayWriter {
|
||||
this.write(n.toFixed(0));
|
||||
}
|
||||
|
||||
writeUint8(n: number) {
|
||||
this.write(n);
|
||||
}
|
||||
|
||||
writeKind(k: NomsKind) {
|
||||
this.write(k);
|
||||
}
|
||||
@@ -60,40 +62,30 @@ export class JsonArrayWriter {
|
||||
this.write(r.toString());
|
||||
}
|
||||
|
||||
writeTypeAsTag(t: Type) {
|
||||
writeTypeAsTag(t: Type, backRefs: Type[]) {
|
||||
const k = t.kind;
|
||||
this.writeKind(k);
|
||||
switch (k) {
|
||||
case Kind.Struct:
|
||||
throw new Error('Unreachable');
|
||||
case Kind.List:
|
||||
case Kind.Map:
|
||||
case Kind.Ref:
|
||||
case Kind.Set: {
|
||||
t.elemTypes.forEach(elemType => this.writeTypeAsTag(elemType));
|
||||
case Kind.Set:
|
||||
this.writeKind(k);
|
||||
t.elemTypes.forEach(elemType => this.writeTypeAsTag(elemType, backRefs));
|
||||
break;
|
||||
}
|
||||
case Kind.Unresolved: {
|
||||
const pkgRef = t.packageRef;
|
||||
invariant(!pkgRef.isEmpty());
|
||||
this.writeRef(pkgRef);
|
||||
this.writeInt(t.ordinal);
|
||||
|
||||
const pkg = lookupPackage(pkgRef);
|
||||
if (pkg && this._ds) {
|
||||
this._ds.writeValue(pkg);
|
||||
}
|
||||
case Kind.Struct:
|
||||
this.writeStructType(t, backRefs);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this.writeKind(k);
|
||||
}
|
||||
}
|
||||
|
||||
writeTopLevel(t: Type, v: any) {
|
||||
this.writeTypeAsTag(t);
|
||||
this.writeTypeAsTag(t, []);
|
||||
this.writeValue(v, t);
|
||||
}
|
||||
|
||||
maybeWriteMetaSequence(v: Sequence, t: Type, pkg: ?Package): boolean {
|
||||
maybeWriteMetaSequence(v: Sequence, t: Type): boolean {
|
||||
if (!v.isMeta) {
|
||||
this.write(false);
|
||||
return false;
|
||||
@@ -110,21 +102,21 @@ export class JsonArrayWriter {
|
||||
this._ds.writeValue(child);
|
||||
}
|
||||
w2.writeRef(tuple.ref);
|
||||
w2.writeValue(tuple.value, indexType, pkg);
|
||||
w2.writeValue(tuple.numLeaves, numberType, pkg);
|
||||
w2.writeValue(tuple.value, indexType);
|
||||
w2.writeValue(tuple.numLeaves, numberType);
|
||||
}
|
||||
this.write(w2.array);
|
||||
return true;
|
||||
}
|
||||
|
||||
writeValue(v: any, t: Type, pkg: ?Package) {
|
||||
writeValue(v: any, t: Type) {
|
||||
switch (t.kind) {
|
||||
case Kind.Blob: {
|
||||
invariant(v instanceof NomsBlob || v instanceof Sequence,
|
||||
`Failed to write Blob. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write Blob. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
const sequence: Sequence = v instanceof NomsBlob ? v.sequence : v;
|
||||
|
||||
if (this.maybeWriteMetaSequence(sequence, t, pkg)) {
|
||||
if (this.maybeWriteMetaSequence(sequence, t)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -133,41 +125,42 @@ export class JsonArrayWriter {
|
||||
break;
|
||||
}
|
||||
case Kind.Bool:
|
||||
invariant(typeof v === 'boolean', `Failed to write Bool. Invalid type: ${describeType(v)}`);
|
||||
invariant(typeof v === 'boolean',
|
||||
`Failed to write Bool. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
this.write(v);
|
||||
break;
|
||||
case Kind.String:
|
||||
invariant(typeof v === 'string',
|
||||
`Failed to write String. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write String. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
this.write(v);
|
||||
break;
|
||||
case Kind.Number:
|
||||
invariant(typeof v === 'number',
|
||||
`Failed to write ${t.describe()}. Invalid type: ${describeType(v)}`);
|
||||
this.writeFloat(v); // TODO: Verify value fits in type
|
||||
`Failed to write Number. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
this.writeFloat(v);
|
||||
break;
|
||||
case Kind.List: {
|
||||
invariant(v instanceof NomsList || v instanceof Sequence,
|
||||
`Failed to write List. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write List. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
const sequence: Sequence = v instanceof NomsList ? v.sequence : v;
|
||||
|
||||
if (this.maybeWriteMetaSequence(sequence, t, pkg)) {
|
||||
if (this.maybeWriteMetaSequence(sequence, t)) {
|
||||
break;
|
||||
}
|
||||
|
||||
invariant(sequence instanceof ListLeafSequence);
|
||||
const w2 = new JsonArrayWriter(this._ds);
|
||||
const elemType = t.elemTypes[0];
|
||||
sequence.items.forEach(sv => w2.writeValue(sv, elemType, pkg));
|
||||
sequence.items.forEach(sv => w2.writeValue(sv, elemType));
|
||||
this.write(w2.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Map: {
|
||||
invariant(v instanceof NomsMap || v instanceof Sequence,
|
||||
`Failed to write Map. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write Map. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
const sequence: Sequence = v instanceof NomsMap ? v.sequence : v;
|
||||
|
||||
if (this.maybeWriteMetaSequence(sequence, t, pkg)) {
|
||||
if (this.maybeWriteMetaSequence(sequence, t)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -176,35 +169,24 @@ export class JsonArrayWriter {
|
||||
const keyType = t.elemTypes[0];
|
||||
const valueType = t.elemTypes[1];
|
||||
sequence.items.forEach(entry => {
|
||||
w2.writeValue(entry.key, keyType, pkg);
|
||||
w2.writeValue(entry.value, valueType, pkg);
|
||||
w2.writeValue(entry.key, keyType);
|
||||
w2.writeValue(entry.value, valueType);
|
||||
});
|
||||
this.write(w2.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Package: {
|
||||
invariant(v instanceof Package,
|
||||
`Failed to write Package. Invalid type: ${describeType(v)}`);
|
||||
const w2 = new JsonArrayWriter(this._ds);
|
||||
v.types.forEach(type => w2.writeValue(type, typeType, v));
|
||||
this.write(w2.array);
|
||||
const w3 = new JsonArrayWriter(this._ds);
|
||||
v.dependencies.forEach(ref => w3.writeRef(ref));
|
||||
this.write(w3.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Ref: {
|
||||
invariant(v instanceof RefValue,
|
||||
`Failed to write Ref. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write Ref. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
this.writeRef(v.targetRef);
|
||||
break;
|
||||
}
|
||||
case Kind.Set: {
|
||||
invariant(v instanceof NomsSet || v instanceof Sequence,
|
||||
`Failed to write Set. Invalid type: ${describeType(v)}`);
|
||||
`Failed to write Set. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
const sequence: Sequence = v instanceof NomsSet ? v.sequence : v;
|
||||
|
||||
if (this.maybeWriteMetaSequence(sequence, t, pkg)) {
|
||||
if (this.maybeWriteMetaSequence(sequence, t)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -215,166 +197,118 @@ export class JsonArrayWriter {
|
||||
sequence.items.forEach(v => {
|
||||
elems.push(v);
|
||||
});
|
||||
elems.forEach(elem => w2.writeValue(elem, elemType, pkg));
|
||||
elems.forEach(elem => w2.writeValue(elem, elemType));
|
||||
this.write(w2.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Type: {
|
||||
invariant(v instanceof Type,
|
||||
`Failed to write Type. Invalid type: ${describeType(v)}`);
|
||||
this.writeTypeAsValue(v, pkg);
|
||||
break;
|
||||
}
|
||||
case Kind.Unresolved: {
|
||||
if (t.hasPackageRef) {
|
||||
pkg = lookupPackage(t.packageRef);
|
||||
}
|
||||
pkg = notNull(pkg);
|
||||
this.writeUnresolvedKindValue(v, t, pkg);
|
||||
`Failed to write Type. Invalid type: ${describeTypeOfValue(v)}`);
|
||||
this.writeTypeAsValue(v, []);
|
||||
break;
|
||||
}
|
||||
case Kind.Value: {
|
||||
const valueType = getTypeOfValue(v);
|
||||
this.writeTypeAsTag(valueType);
|
||||
this.writeValue(v, valueType, pkg);
|
||||
this.writeTypeAsTag(valueType, []);
|
||||
this.writeValue(v, valueType);
|
||||
break;
|
||||
}
|
||||
case Kind.Struct:
|
||||
this.writeStruct(v);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Not implemented: ${t.kind} ${v}`);
|
||||
}
|
||||
}
|
||||
|
||||
writeTypeAsValue(t: Type, pkg: ?Package) {
|
||||
writeTypeAsValue(t: Type, backRefs: Type[]) {
|
||||
const k = t.kind;
|
||||
this.writeKind(k);
|
||||
switch (k) {
|
||||
case Kind.List:
|
||||
case Kind.Map:
|
||||
case Kind.Ref:
|
||||
case Kind.Set: {
|
||||
this.writeKind(k);
|
||||
const w2 = new JsonArrayWriter(this._ds);
|
||||
t.elemTypes.forEach(elem => w2.writeTypeAsValue(elem, pkg));
|
||||
t.elemTypes.forEach(elem => w2.writeTypeAsValue(elem, backRefs));
|
||||
this.write(w2.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Struct: {
|
||||
const desc = t.desc;
|
||||
invariant(desc instanceof StructDesc);
|
||||
this.write(t.name);
|
||||
const fieldWriter = new JsonArrayWriter(this._ds);
|
||||
desc.fields.forEach(field => {
|
||||
fieldWriter.write(field.name);
|
||||
fieldWriter.writeTypeAsValue(field.t, pkg);
|
||||
fieldWriter.write(field.optional);
|
||||
});
|
||||
this.write(fieldWriter.array);
|
||||
const choiceWriter = new JsonArrayWriter(this._ds);
|
||||
desc.union.forEach(choice => {
|
||||
choiceWriter.write(choice.name);
|
||||
choiceWriter.writeTypeAsValue(choice.t, pkg);
|
||||
choiceWriter.write(choice.optional);
|
||||
});
|
||||
this.write(choiceWriter.array);
|
||||
break;
|
||||
}
|
||||
case Kind.Unresolved: {
|
||||
const pkgRef = t.packageRef;
|
||||
// When we compute the ref for the package the first time it does not have a ref.
|
||||
const isCurrentPackage = pkg && pkg.ref && pkg.ref.equals(pkgRef);
|
||||
if (isCurrentPackage) {
|
||||
this.writeRef(emptyRef);
|
||||
} else {
|
||||
this.writeRef(pkgRef);
|
||||
}
|
||||
const ordinal = t.ordinal;
|
||||
this.writeInt(ordinal);
|
||||
if (ordinal === -1) {
|
||||
this.write(t.namespace);
|
||||
this.write(t.name);
|
||||
}
|
||||
|
||||
if (!isCurrentPackage) {
|
||||
const pkg = lookupPackage(pkgRef);
|
||||
if (this._ds && pkg) {
|
||||
this._ds.writeValue(pkg);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
invariant(isPrimitiveKind(k));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeUnresolvedKindValue(v: any, t: Type, pkg: Package) {
|
||||
const typeDef = pkg.types[t.ordinal];
|
||||
switch (typeDef.kind) {
|
||||
case Kind.Struct: {
|
||||
invariant(v instanceof Struct,
|
||||
`Failed to write ${typeDef.describe()}. Invalid type: ${describeType(v)}`);
|
||||
this.writeStruct(v, t, typeDef, pkg);
|
||||
this.writeStructType(t, backRefs);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error('Not reached');
|
||||
invariant(isPrimitiveKind(k));
|
||||
this.writeKind(k);
|
||||
}
|
||||
}
|
||||
|
||||
writeStructType(t: Type, backRefs: Type[]) {
|
||||
const i = backRefs.indexOf(t);
|
||||
if (i !== -1) {
|
||||
this.writeBackRef(backRefs.length - i - 1);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
backRefs = backRefs.concat(t); // we want a new array here.
|
||||
const desc = t.desc;
|
||||
invariant(desc instanceof StructDesc);
|
||||
this.writeKind(t.kind);
|
||||
this.write(t.name);
|
||||
const fieldWriter = new JsonArrayWriter(this._ds);
|
||||
desc.fields.forEach(field => {
|
||||
fieldWriter.write(field.name);
|
||||
fieldWriter.writeTypeAsTag(field.t, backRefs);
|
||||
fieldWriter.write(field.optional);
|
||||
});
|
||||
this.write(fieldWriter.array);
|
||||
const choiceWriter = new JsonArrayWriter(this._ds);
|
||||
desc.union.forEach(choice => {
|
||||
choiceWriter.write(choice.name);
|
||||
choiceWriter.writeTypeAsTag(choice.t, backRefs);
|
||||
choiceWriter.write(choice.optional);
|
||||
});
|
||||
this.write(choiceWriter.array);
|
||||
}
|
||||
|
||||
writeBackRef(i: number) {
|
||||
this.write(Kind.BackRef);
|
||||
this.writeUint8(i);
|
||||
}
|
||||
|
||||
writeBlob(seq: BlobLeafSequence) {
|
||||
// HACK: The items property is declared as Array<T> in Flow.
|
||||
invariant(seq.items instanceof Uint8Array);
|
||||
this.write(encodeBase64(seq.items));
|
||||
}
|
||||
|
||||
writeStruct(s: Struct, type: Type, typeDef: Type, pkg: Package) {
|
||||
writeStruct(s: Struct) {
|
||||
const mirror = new StructMirror(s);
|
||||
mirror.forEachField(field => {
|
||||
if (field.optional) {
|
||||
if (field.present) {
|
||||
this.writeBoolean(true);
|
||||
this.writeValue(field.value, field.type, pkg);
|
||||
this.writeValue(field.value, field.type);
|
||||
} else {
|
||||
this.writeBoolean(false);
|
||||
}
|
||||
} else {
|
||||
invariant(field.present);
|
||||
this.writeValue(field.value, field.type, pkg);
|
||||
this.writeValue(field.value, field.type);
|
||||
}
|
||||
});
|
||||
|
||||
if (mirror.hasUnion) {
|
||||
const {unionField} = mirror;
|
||||
this.writeInt(mirror.unionIndex);
|
||||
this.writeValue(unionField.value, unionField.type, pkg);
|
||||
this.writeValue(unionField.value, unionField.type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getTypeOfValue(v: any): Type {
|
||||
switch (typeof v) {
|
||||
case 'object':
|
||||
return v.type;
|
||||
case 'string':
|
||||
return stringType;
|
||||
case 'boolean':
|
||||
return boolType;
|
||||
case 'number':
|
||||
throw new Error('Encoding untagged numbers is not supported');
|
||||
default:
|
||||
throw new Error('Unknown type');
|
||||
}
|
||||
}
|
||||
|
||||
function encodeEmbeddedNomsValue(v: any, t: Type, ds: ?DataStore): Chunk {
|
||||
if (v instanceof Package) {
|
||||
// if (v.dependencies.length > 0) {
|
||||
// throw new Error('Not implemented');
|
||||
// }
|
||||
}
|
||||
|
||||
const w = new JsonArrayWriter(ds);
|
||||
w.writeTopLevel(t, v);
|
||||
return Chunk.fromString(typedTag + JSON.stringify(w.array));
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {
|
||||
CompoundDesc,
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
PrimitiveDesc,
|
||||
StructDesc,
|
||||
Type,
|
||||
UnresolvedDesc,
|
||||
} from './type.js';
|
||||
import {Package} from './package.js';
|
||||
import {invariant, notNull} from './assert.js';
|
||||
|
||||
/**
|
||||
* Goes through the type and returns a new type where all the empty refs have been replaced by
|
||||
* the package ref.
|
||||
*/
|
||||
export default function fixupType(t: Type, pkg: ?Package): Type {
|
||||
const desc = t.desc;
|
||||
|
||||
if (desc instanceof CompoundDesc) {
|
||||
let changed = false;
|
||||
const newTypes = desc.elemTypes.map(t => {
|
||||
const newT = fixupType(t, pkg);
|
||||
if (newT === t) {
|
||||
return t;
|
||||
}
|
||||
changed = true;
|
||||
return newT;
|
||||
});
|
||||
|
||||
return changed ? makeCompoundType(t.kind, ...newTypes) : t;
|
||||
}
|
||||
|
||||
if (desc instanceof UnresolvedDesc) {
|
||||
if (t.hasPackageRef) {
|
||||
return t;
|
||||
}
|
||||
|
||||
return makeType(notNull(pkg).ref, t.ordinal);
|
||||
}
|
||||
|
||||
if (desc instanceof StructDesc) {
|
||||
let changed = false;
|
||||
const fixField = f => {
|
||||
const newT = fixupType(f.t, pkg);
|
||||
if (newT === t) {
|
||||
return f;
|
||||
}
|
||||
changed = true;
|
||||
return new Field(f.name, newT, f.optional);
|
||||
};
|
||||
|
||||
const newFields = desc.fields.map(fixField);
|
||||
const newUnion = desc.union.map(fixField);
|
||||
return changed ? makeStructType(t.name, newFields, newUnion) : t;
|
||||
}
|
||||
|
||||
invariant(desc instanceof PrimitiveDesc);
|
||||
return t;
|
||||
}
|
||||
@@ -10,9 +10,9 @@ import {newStruct} from './struct.js';
|
||||
import {calcSplices} from './edit-distance.js';
|
||||
import {
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
makeRefType,
|
||||
makeListType,
|
||||
numberType,
|
||||
stringType,
|
||||
valueType,
|
||||
@@ -20,9 +20,7 @@ import {
|
||||
import {flatten, flattenParallel} from './test-util.js';
|
||||
import {IndexedMetaSequence, MetaTuple} from './meta-sequence.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {ListLeafSequence, newList, NomsList} from './list.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import type {Type} from './type.js';
|
||||
|
||||
const testListSize = 5000;
|
||||
@@ -52,7 +50,7 @@ suite('BuildList', () => {
|
||||
|
||||
test('LONG: set of n numbers, length', async () => {
|
||||
const nums = firstNNumbers(testListSize);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const s = await newList(nums, tr);
|
||||
assert.strictEqual(s.ref.toString(), listOfNRef);
|
||||
assert.strictEqual(testListSize, s.length);
|
||||
@@ -61,30 +59,26 @@ suite('BuildList', () => {
|
||||
test('LONG: list of ref, set of n numbers, length', async () => {
|
||||
const nums = firstNNumbers(testListSize);
|
||||
|
||||
const structTypeDef = makeStructType('num', [
|
||||
const structType = makeStructType('num', [
|
||||
new Field('n', numberType, false),
|
||||
], []);
|
||||
const pkg = new Package([structTypeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const structType = makeType(pkgRef, 0);
|
||||
const refOfStructType = makeCompoundType(Kind.Ref, structType);
|
||||
const tr = makeCompoundType(Kind.List, refOfStructType);
|
||||
const refOfStructType = makeRefType(structType);
|
||||
const tr = makeListType(refOfStructType);
|
||||
|
||||
const refs = nums.map(n => {
|
||||
const s = newStruct(structType, structTypeDef, {n});
|
||||
const s = newStruct(structType, {n});
|
||||
const r = s.ref;
|
||||
return new RefValue(r, refOfStructType);
|
||||
});
|
||||
|
||||
const s = await newList(refs, tr);
|
||||
assert.strictEqual(s.ref.toString(), 'sha1-f2e6c3aae6e8ac4c3776830e2d8141fc527c55c5');
|
||||
assert.strictEqual(s.ref.toString(), 'sha1-f2db6a2f8026ee6e12bb584cd38c813604774a69');
|
||||
assert.strictEqual(testListSize, s.length);
|
||||
});
|
||||
|
||||
test('LONG: toJS', async () => {
|
||||
const nums = firstNNumbers(5000);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const s = await newList(nums, tr);
|
||||
assert.strictEqual(s.ref.toString(), listOfNRef);
|
||||
assert.strictEqual(testListSize, s.length);
|
||||
@@ -104,7 +98,7 @@ suite('BuildList', () => {
|
||||
|
||||
test('LONG: insert', async () => {
|
||||
const nums = firstNNumbers(testListSize - 10);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
let s = await newList(nums, tr);
|
||||
|
||||
for (let i = testListSize - 10; i < testListSize; i++) {
|
||||
@@ -116,7 +110,7 @@ suite('BuildList', () => {
|
||||
|
||||
test('LONG: append', async () => {
|
||||
const nums = firstNNumbers(testListSize - 10);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
let s = await newList(nums, tr);
|
||||
|
||||
for (let i = testListSize - 10; i < testListSize; i++) {
|
||||
@@ -128,7 +122,7 @@ suite('BuildList', () => {
|
||||
|
||||
test('LONG: remove', async () => {
|
||||
const nums = firstNNumbers(testListSize + 10);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
let s = await newList(nums, tr);
|
||||
|
||||
let count = 10;
|
||||
@@ -141,7 +135,7 @@ suite('BuildList', () => {
|
||||
|
||||
test('LONG: splice', async () => {
|
||||
const nums = firstNNumbers(testListSize);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
let s = await newList(nums, tr);
|
||||
|
||||
const splice500At = async (idx: number) => {
|
||||
@@ -162,7 +156,7 @@ suite('BuildList', () => {
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const nums = firstNNumbers(testListSize);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const s = await newList(nums, tr);
|
||||
const r = ds.writeValue(s).targetRef;
|
||||
const s2 = await ds.readValue(r);
|
||||
@@ -181,7 +175,7 @@ suite('ListLeafSequence', () => {
|
||||
test('isEmpty', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, stringType);
|
||||
const tr = makeListType(stringType);
|
||||
const newList = items => new NomsList(tr, new ListLeafSequence(ds, tr, items));
|
||||
assert.isTrue(newList([]).isEmpty());
|
||||
assert.isFalse(newList(['z', 'x', 'a', 'b']).isEmpty());
|
||||
@@ -190,7 +184,7 @@ suite('ListLeafSequence', () => {
|
||||
test('get', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, stringType);
|
||||
const tr = makeListType(stringType);
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['z', 'x', 'a', 'b']));
|
||||
assert.strictEqual('z', await l.get(0));
|
||||
assert.strictEqual('x', await l.get(1));
|
||||
@@ -201,7 +195,7 @@ suite('ListLeafSequence', () => {
|
||||
test('forEach', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, [4, 2, 10, 16]));
|
||||
|
||||
const values = [];
|
||||
@@ -212,7 +206,7 @@ suite('ListLeafSequence', () => {
|
||||
test('iterator', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
|
||||
const test = async items => {
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, items));
|
||||
@@ -228,7 +222,7 @@ suite('ListLeafSequence', () => {
|
||||
test('iteratorAt', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
|
||||
const test = async items => {
|
||||
const l = new NomsList(tr, new ListLeafSequence(ds, tr, items));
|
||||
@@ -247,7 +241,7 @@ suite('ListLeafSequence', () => {
|
||||
function testChunks(elemType: Type) {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, elemType);
|
||||
const tr = makeListType(elemType);
|
||||
const r1 = ds.writeValue('x');
|
||||
const r2 = ds.writeValue('a');
|
||||
const r3 = ds.writeValue('b');
|
||||
@@ -271,7 +265,7 @@ suite('CompoundList', () => {
|
||||
function build(): NomsList {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.List, stringType);
|
||||
const tr = makeListType(stringType);
|
||||
const l1 = new NomsList(tr, new ListLeafSequence(ds, tr, ['a', 'b']));
|
||||
const r1 = ds.writeValue(l1).targetRef;
|
||||
const l2 = new NomsList(tr, new ListLeafSequence(ds, tr, ['e', 'f']));
|
||||
@@ -406,7 +400,7 @@ suite('Diff List', () => {
|
||||
|
||||
const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]);
|
||||
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l1 = await newList(nums1, tr);
|
||||
const l2 = await newList(nums2, tr);
|
||||
|
||||
@@ -425,7 +419,7 @@ suite('Diff List', () => {
|
||||
|
||||
const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]);
|
||||
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l1 = await newList(nums1, tr);
|
||||
const l2 = await newList(nums2, tr);
|
||||
|
||||
@@ -444,7 +438,7 @@ suite('Diff List', () => {
|
||||
}
|
||||
|
||||
const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l1 = await newList(nums1, tr);
|
||||
const l2 = await newList(nums2, tr);
|
||||
|
||||
@@ -457,7 +451,7 @@ suite('Diff List', () => {
|
||||
const nums2 = firstNNumbers(5000);
|
||||
|
||||
const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]);
|
||||
const tr = makeCompoundType(Kind.List, numberType);
|
||||
const tr = makeListType(numberType);
|
||||
const l1 = await newList(nums1, tr);
|
||||
const l2 = await newList(nums2, tr);
|
||||
|
||||
|
||||
@@ -11,9 +11,8 @@ import {
|
||||
boolType,
|
||||
Field,
|
||||
makeMapType,
|
||||
makeCompoundType,
|
||||
makeRefType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
stringType,
|
||||
valueType,
|
||||
@@ -21,10 +20,8 @@ import {
|
||||
import {flatten, flattenParallel} from './test-util.js';
|
||||
import {invariant} from './assert.js';
|
||||
import Chunk from './chunk.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {MapLeafSequence, newMap, NomsMap} from './map.js';
|
||||
import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import Ref from './ref.js';
|
||||
import type {Type} from './type.js';
|
||||
|
||||
@@ -80,7 +77,7 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, numberType);
|
||||
const tr = makeMapType(numberType, numberType);
|
||||
const m = await newMap(kvs, tr);
|
||||
assert.strictEqual(m.ref.toString(), mapOfNRef);
|
||||
|
||||
@@ -102,24 +99,20 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const structTypeDef = makeStructType('num', [
|
||||
const structType = makeStructType('num', [
|
||||
new Field('n', numberType, false),
|
||||
], []);
|
||||
const pkg = new Package([structTypeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const structType = makeType(pkgRef, 0);
|
||||
const refOfStructType = makeCompoundType(Kind.Ref, structType);
|
||||
const tr = makeCompoundType(Kind.Map, refOfStructType, refOfStructType);
|
||||
const refOfStructType = makeRefType(structType);
|
||||
const tr = makeMapType(refOfStructType, refOfStructType);
|
||||
|
||||
const kvRefs = kvs.map(n => {
|
||||
const s = newStruct(structType, structTypeDef, {n});
|
||||
const s = newStruct(structType, {n});
|
||||
const r = s.ref;
|
||||
return new RefValue(r, refOfStructType);
|
||||
});
|
||||
|
||||
const m = await newMap(kvRefs, tr);
|
||||
assert.strictEqual(m.ref.toString(), 'sha1-f440a024602218f2373063281d233f69e449a64a');
|
||||
assert.strictEqual(m.ref.toString(), 'sha1-d10e24f082d6f8270ef5809168f2f2466b126dc6');
|
||||
});
|
||||
|
||||
test('LONG: set', async () => {
|
||||
@@ -128,7 +121,7 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, numberType);
|
||||
const tr = makeMapType(numberType, numberType);
|
||||
let m = await newMap(kvs, tr);
|
||||
for (let i = testMapSize - 10; i < testMapSize; i++) {
|
||||
m = await m.set(i, i + 1);
|
||||
@@ -144,7 +137,7 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, numberType);
|
||||
const tr = makeMapType(numberType, numberType);
|
||||
let m = await newMap(kvs, tr);
|
||||
for (let i = 0; i < testMapSize; i++) {
|
||||
m = await m.set(i, i + 1);
|
||||
@@ -160,7 +153,7 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, numberType);
|
||||
const tr = makeMapType(numberType, numberType);
|
||||
let m = await newMap(kvs, tr);
|
||||
for (let i = testMapSize; i < testMapSize + 10; i++) {
|
||||
m = await m.remove(i);
|
||||
@@ -179,7 +172,7 @@ suite('BuildMap', () => {
|
||||
kvs.push(i, i + 1);
|
||||
}
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, numberType);
|
||||
const tr = makeMapType(numberType, numberType);
|
||||
const m = await newMap(kvs, tr);
|
||||
|
||||
const r = ds.writeValue(m).targetRef;
|
||||
@@ -203,7 +196,7 @@ suite('MapLeaf', () => {
|
||||
test('isEmpty/size', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, boolType);
|
||||
const tr = makeMapType(stringType, boolType);
|
||||
const newMap = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
|
||||
let m = newMap([]);
|
||||
assert.isTrue(m.isEmpty());
|
||||
@@ -216,7 +209,7 @@ suite('MapLeaf', () => {
|
||||
test('has', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, boolType);
|
||||
const tr = makeMapType(stringType, boolType);
|
||||
const m = new NomsMap(tr,
|
||||
new MapLeafSequence(ds, tr, [{key: 'a', value: false}, {key:'k', value:true}]));
|
||||
assert.isTrue(await m.has('a'));
|
||||
@@ -228,7 +221,7 @@ suite('MapLeaf', () => {
|
||||
test('first/last/get', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const tr = makeMapType(stringType, numberType);
|
||||
const m = new NomsMap(tr,
|
||||
new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
|
||||
|
||||
@@ -244,7 +237,7 @@ suite('MapLeaf', () => {
|
||||
test('forEach', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const tr = makeMapType(stringType, numberType);
|
||||
const m = new NomsMap(tr,
|
||||
new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}]));
|
||||
|
||||
@@ -256,7 +249,7 @@ suite('MapLeaf', () => {
|
||||
test('iterator', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const tr = makeMapType(stringType, numberType);
|
||||
|
||||
const test = async entries => {
|
||||
const m = new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
|
||||
@@ -272,7 +265,7 @@ suite('MapLeaf', () => {
|
||||
test('LONG: iteratorAt', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const tr = makeMapType(stringType, numberType);
|
||||
const build = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries));
|
||||
|
||||
assert.deepEqual([], await flatten(build([]).iteratorAt('a')));
|
||||
@@ -297,7 +290,7 @@ suite('MapLeaf', () => {
|
||||
function testChunks(keyType: Type, valueType: Type) {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Map, keyType, valueType);
|
||||
const tr = makeMapType(keyType, valueType);
|
||||
const r1 = ds.writeValue('x');
|
||||
const r2 = ds.writeValue(true);
|
||||
const r3 = ds.writeValue('b');
|
||||
@@ -322,7 +315,7 @@ suite('MapLeaf', () => {
|
||||
|
||||
suite('CompoundMap', () => {
|
||||
function build(ds: DataStore): Array<NomsMap> {
|
||||
const tr = makeCompoundType(Kind.Map, stringType,
|
||||
const tr = makeMapType(stringType,
|
||||
boolType);
|
||||
const l1 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false},
|
||||
{key:'b', value:false}]));
|
||||
@@ -492,7 +485,7 @@ suite('CompoundMap', () => {
|
||||
async function testRandomDiff(mapSize: number, inM1: number, inM2: number, inBoth: number) {
|
||||
invariant(inM1 + inM2 + inBoth <= 1);
|
||||
|
||||
const tr = makeCompoundType(Kind.Map, numberType, stringType);
|
||||
const tr = makeMapType(numberType, stringType);
|
||||
const kv1 = [], kv2 = [], added = [], removed = [], modified = [];
|
||||
|
||||
// Randomly populate kv1/kv2 which will be the contents of m1/m2 respectively, and record which
|
||||
|
||||
@@ -14,8 +14,7 @@ export const Kind: {
|
||||
Set: NomsKind,
|
||||
Struct: NomsKind,
|
||||
Type: NomsKind,
|
||||
Unresolved: NomsKind,
|
||||
Package: NomsKind,
|
||||
BackRef: NomsKind,
|
||||
} = {
|
||||
Bool: 0,
|
||||
Number: 1,
|
||||
@@ -28,8 +27,7 @@ export const Kind: {
|
||||
Set: 8,
|
||||
Struct: 9,
|
||||
Type: 10,
|
||||
Unresolved: 11,
|
||||
Package: 12,
|
||||
BackRef: 11,
|
||||
};
|
||||
|
||||
const kindToStringMap: { [key: number]: string } = Object.create(null);
|
||||
@@ -44,10 +42,9 @@ kindToStringMap[Kind.Ref] = 'Ref';
|
||||
kindToStringMap[Kind.Set] = 'Set';
|
||||
kindToStringMap[Kind.Struct] = 'Struct';
|
||||
kindToStringMap[Kind.Type] = 'Type';
|
||||
kindToStringMap[Kind.Unresolved] = 'Unresolved';
|
||||
kindToStringMap[Kind.Package] = 'Package';
|
||||
kindToStringMap[Kind.BackRef] = 'BackRef';
|
||||
|
||||
export function kindToString(kind: number): string {
|
||||
export function kindToString(kind: NomsKind): string {
|
||||
return kindToStringMap[kind];
|
||||
}
|
||||
|
||||
@@ -59,7 +56,6 @@ export function isPrimitiveKind(k: NomsKind): boolean {
|
||||
case Kind.Blob:
|
||||
case Kind.Value:
|
||||
case Kind.Type:
|
||||
case Kind.Package:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
||||
@@ -20,7 +20,6 @@ export {
|
||||
export {encodeNomsValue} from './encode.js';
|
||||
export {invariant, notNull} from './assert.js';
|
||||
export {isPrimitiveKind, Kind, kindToString} from './noms-kind.js';
|
||||
export {lookupPackage, Package, readPackage, registerPackage} from './package.js';
|
||||
export {newList, ListLeafSequence, NomsList} from './list.js';
|
||||
export {newMap, NomsMap, MapLeafSequence} from './map.js';
|
||||
export {newSet, NomsSet, SetLeafSequence} from './set.js';
|
||||
@@ -38,17 +37,14 @@ export {
|
||||
makeRefType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
makeUnresolvedType,
|
||||
numberType,
|
||||
packageType,
|
||||
PrimitiveDesc,
|
||||
stringType,
|
||||
StructDesc,
|
||||
Type,
|
||||
typeType,
|
||||
UnresolvedDesc,
|
||||
valueType,
|
||||
getTypeOfValue,
|
||||
} from './type.js';
|
||||
export {equals, less} from './compare.js';
|
||||
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import type Ref from './ref.js';
|
||||
import RefValue from './ref-value.js';
|
||||
import {invariant} from './assert.js';
|
||||
import type {Type} from './type.js';
|
||||
import {packageType, packageRefType} from './type.js';
|
||||
import {ValueBase} from './value.js';
|
||||
import type DataStore from './data-store.js';
|
||||
import {getRef} from './get-ref.js';
|
||||
import fixupType from './fixup-type.js';
|
||||
|
||||
export class Package extends ValueBase {
|
||||
types: Array<Type>;
|
||||
dependencies: Array<Ref>;
|
||||
_ref: Ref;
|
||||
|
||||
constructor(types: Array<Type>, dependencies: Array<Ref>) {
|
||||
super();
|
||||
this.types = types;
|
||||
this.dependencies = dependencies;
|
||||
this._ref = getRef(this, this.type);
|
||||
this.types = types.map(t => fixupType(t, this));
|
||||
}
|
||||
|
||||
get ref(): Ref {
|
||||
return this._ref;
|
||||
}
|
||||
|
||||
get type(): Type {
|
||||
return packageType;
|
||||
}
|
||||
|
||||
get chunks(): Array<RefValue> {
|
||||
const chunks = [];
|
||||
for (let i = 0; i < this.types.length; i++) {
|
||||
chunks.push(...this.types[i].chunks);
|
||||
}
|
||||
for (let i = 0; i < this.dependencies.length; i++) {
|
||||
chunks.push(new RefValue(this.dependencies[i], packageRefType));
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
}
|
||||
|
||||
const packageRegistry: {[key: string]: Package} = Object.create(null);
|
||||
|
||||
export function lookupPackage(r: Ref): ?Package {
|
||||
return packageRegistry[r.toString()];
|
||||
}
|
||||
|
||||
export function registerPackage(p: Package) {
|
||||
packageRegistry[p.ref.toString()] = p;
|
||||
}
|
||||
|
||||
const pendingPackages: {[key: string]: Promise<Package>} = Object.create(null);
|
||||
|
||||
export function readPackage(r: Ref, ds: DataStore): Promise<Package> {
|
||||
const refStr = r.toString();
|
||||
const p = pendingPackages[refStr];
|
||||
if (p) {
|
||||
return p;
|
||||
}
|
||||
|
||||
return pendingPackages[refStr] = ds.readValue(r).then(p => {
|
||||
invariant(p instanceof Package);
|
||||
registerPackage(p);
|
||||
delete pendingPackages[refStr];
|
||||
return p;
|
||||
});
|
||||
}
|
||||
@@ -11,21 +11,18 @@ import {newStruct} from './struct.js';
|
||||
import {
|
||||
boolType,
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeSetType,
|
||||
makeRefType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
stringType,
|
||||
valueType,
|
||||
} from './type.js';
|
||||
import {flatten, flattenParallel} from './test-util.js';
|
||||
import {invariant, notNull} from './assert.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js';
|
||||
import {newSet, NomsSet, SetLeafSequence} from './set.js';
|
||||
import {OrderedSequence} from './ordered-sequence.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import Ref from './ref.js';
|
||||
import type {Type} from './type.js';
|
||||
|
||||
@@ -59,7 +56,7 @@ function firstNNumbers(n: number): Array<number> {
|
||||
suite('BuildSet', () => {
|
||||
test('unique keys - strings', async () => {
|
||||
const strs = ['hello', 'world', 'hello'];
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
const s = await newSet(strs, tr);
|
||||
assert.strictEqual(2, s.size);
|
||||
assert.isTrue(await s.has('hello'));
|
||||
@@ -81,7 +78,7 @@ suite('BuildSet', () => {
|
||||
|
||||
test('LONG: set of n numbers', async () => {
|
||||
const nums = firstNNumbers(testSetSize);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
const s = await newSet(nums, tr);
|
||||
assert.strictEqual(s.ref.toString(), setOfNRef);
|
||||
|
||||
@@ -94,30 +91,26 @@ suite('BuildSet', () => {
|
||||
test('LONG: set of ref, set of n numbers', async () => {
|
||||
const nums = firstNNumbers(testSetSize);
|
||||
|
||||
const structTypeDef = makeStructType('num', [
|
||||
const structType = makeStructType('num', [
|
||||
new Field('n', numberType, false),
|
||||
], []);
|
||||
const pkg = new Package([structTypeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const structType = makeType(pkgRef, 0);
|
||||
const refOfStructType = makeCompoundType(Kind.Ref, structType);
|
||||
const tr = makeCompoundType(Kind.Set, refOfStructType);
|
||||
const refOfStructType = makeRefType(structType);
|
||||
const tr = makeSetType(refOfStructType);
|
||||
|
||||
const refs = nums.map(n => {
|
||||
const s = newStruct(structType, structTypeDef, {n});
|
||||
const s = newStruct(structType, {n});
|
||||
const r = s.ref;
|
||||
return new RefValue(r, refOfStructType);
|
||||
});
|
||||
|
||||
const s = await newSet(refs, tr);
|
||||
assert.strictEqual(s.ref.toString(), 'sha1-4c2b0e159ae443ec99299b6ea266d9a408f7987d');
|
||||
assert.strictEqual(s.ref.toString(), 'sha1-b06811c4abafef5e2198c04a81d3a300a709fd02');
|
||||
});
|
||||
|
||||
|
||||
test('LONG: insert', async () => {
|
||||
const nums = firstNNumbers(testSetSize - 10);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
let s = await newSet(nums, tr);
|
||||
for (let i = testSetSize - 10; i < testSetSize; i++) {
|
||||
s = await s.insert(i);
|
||||
@@ -129,7 +122,7 @@ suite('BuildSet', () => {
|
||||
|
||||
test('LONG: remove', async () => {
|
||||
const nums = firstNNumbers(testSetSize + 10);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
let s = await newSet(nums, tr);
|
||||
let count = 10;
|
||||
while (count-- > 0) {
|
||||
@@ -145,7 +138,7 @@ suite('BuildSet', () => {
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const nums = firstNNumbers(testSetSize);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
const s = await newSet(nums, tr);
|
||||
const r = ds.writeValue(s).targetRef;
|
||||
const s2 = await ds.readValue(r);
|
||||
@@ -168,7 +161,7 @@ suite('SetLeaf', () => {
|
||||
test('isEmpty/size', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
const newSet = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items));
|
||||
let s = newSet([]);
|
||||
assert.isTrue(s.isEmpty());
|
||||
@@ -181,7 +174,7 @@ suite('SetLeaf', () => {
|
||||
test('first/last/has', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
const s = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'k']));
|
||||
|
||||
assert.strictEqual('a', await s.first());
|
||||
@@ -196,7 +189,7 @@ suite('SetLeaf', () => {
|
||||
test('forEach', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
const m = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'b']));
|
||||
|
||||
const values = [];
|
||||
@@ -207,7 +200,7 @@ suite('SetLeaf', () => {
|
||||
test('iterator', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
|
||||
const test = async items => {
|
||||
const m = new NomsSet(tr, new SetLeafSequence(ds, tr, items));
|
||||
@@ -223,7 +216,7 @@ suite('SetLeaf', () => {
|
||||
test('LONG: iteratorAt', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
const build = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items));
|
||||
|
||||
assert.deepEqual([], await flatten(build([]).iteratorAt('a')));
|
||||
@@ -242,7 +235,7 @@ suite('SetLeaf', () => {
|
||||
function testChunks(elemType: Type) {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, elemType);
|
||||
const tr = makeSetType(elemType);
|
||||
const r1 = ds.writeValue('x');
|
||||
const r2 = ds.writeValue('a');
|
||||
const r3 = ds.writeValue('b');
|
||||
@@ -264,7 +257,7 @@ suite('SetLeaf', () => {
|
||||
|
||||
suite('CompoundSet', () => {
|
||||
function build(ds: DataStore, values: Array<string>): NomsSet {
|
||||
const tr = makeCompoundType(Kind.Set, stringType);
|
||||
const tr = makeSetType(stringType);
|
||||
assert.isTrue(values.length > 1 && Math.log2(values.length) % 1 === 0);
|
||||
|
||||
let tuples = [];
|
||||
@@ -498,7 +491,7 @@ suite('CompoundSet', () => {
|
||||
test('iterator at 0', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
|
||||
const test = async (expected, items) => {
|
||||
const set = new NomsSet(tr, new SetLeafSequence(ds, tr, items));
|
||||
@@ -522,7 +515,7 @@ suite('CompoundSet', () => {
|
||||
test('LONG: canned set diff', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
const s1 = await newSet(
|
||||
firstNNumbers(testSetSize), tr).then(s => ds.readValue(ds.writeValue(s).targetRef));
|
||||
|
||||
@@ -549,7 +542,7 @@ suite('CompoundSet', () => {
|
||||
async function testRandomDiff(setSize: number, inS1: number, inS2: number): Promise<void> {
|
||||
invariant(inS1 + inS2 <= 1);
|
||||
|
||||
const tr = makeCompoundType(Kind.Set, numberType);
|
||||
const tr = makeSetType(numberType);
|
||||
const nums1 = [], nums2 = [], added = [], removed = [];
|
||||
|
||||
// Randomly populate nums1/nums2 which will be the contents of s1/s2 respectively, and record
|
||||
|
||||
@@ -6,33 +6,27 @@ import {assert} from 'chai';
|
||||
import {
|
||||
boolType,
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
makeStructType,
|
||||
makeRefType,
|
||||
stringType,
|
||||
valueType,
|
||||
StructDesc,
|
||||
} from './type.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import {suite, test} from 'mocha';
|
||||
import DataStore from './data-store.js';
|
||||
import {emptyRef} from './ref.js';
|
||||
import {invariant} from './assert.js';
|
||||
|
||||
suite('Struct', () => {
|
||||
test('equals', () => {
|
||||
const typeDef = makeStructType('S1', [
|
||||
const type = makeStructType('S1', [
|
||||
new Field('x', boolType, false),
|
||||
new Field('o', stringType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const data1 = {x: true};
|
||||
const s1 = newStruct(type, typeDef, data1);
|
||||
const s2 = newStruct(type, typeDef, data1);
|
||||
const s1 = newStruct(type, data1);
|
||||
const s2 = newStruct(type, data1);
|
||||
|
||||
assert.isTrue(s1.equals(s2));
|
||||
});
|
||||
@@ -42,139 +36,102 @@ suite('Struct', () => {
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const bt = boolType;
|
||||
const refOfBoolType = makeCompoundType(Kind.Ref, bt);
|
||||
const typeDef = makeStructType('S1', [
|
||||
const refOfBoolType = makeRefType(bt);
|
||||
const type = makeStructType('S1', [
|
||||
new Field('r', refOfBoolType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const b = true;
|
||||
const r = ds.writeValue(b);
|
||||
const s1 = newStruct(type, typeDef, {r: r});
|
||||
assert.strictEqual(2, s1.chunks.length);
|
||||
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
|
||||
assert.isTrue(r.equals(s1.chunks[1]));
|
||||
const s1 = newStruct(type, {r: r});
|
||||
assert.strictEqual(1, s1.chunks.length);
|
||||
assert.isTrue(r.equals(s1.chunks[0]));
|
||||
});
|
||||
|
||||
test('chunks optional', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const bt = boolType;
|
||||
const refOfBoolType = makeCompoundType(Kind.Ref, bt);
|
||||
const typeDef = makeStructType('S1', [
|
||||
const refOfBoolType = makeRefType(boolType);
|
||||
const type = makeStructType('S1', [
|
||||
new Field('r', refOfBoolType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
const s1 = newStruct(type, {});
|
||||
|
||||
const s1 = newStruct(type, typeDef, {});
|
||||
|
||||
assert.strictEqual(1, s1.chunks.length);
|
||||
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
|
||||
assert.strictEqual(0, s1.chunks.length);
|
||||
|
||||
const b = true;
|
||||
const r = ds.writeValue(b);
|
||||
const s2 = newStruct(type, typeDef, {r: r});
|
||||
assert.strictEqual(2, s2.chunks.length);
|
||||
assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef));
|
||||
assert.isTrue(r.equals(s2.chunks[1]));
|
||||
const s2 = newStruct(type, {r: r});
|
||||
assert.strictEqual(1, s2.chunks.length);
|
||||
assert.isTrue(r.equals(s2.chunks[0]));
|
||||
});
|
||||
|
||||
test('chunks union', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const bt = boolType;
|
||||
const refOfBoolType = makeCompoundType(Kind.Ref, bt);
|
||||
const typeDef = makeStructType('S1', [], [
|
||||
const refOfBoolType = makeRefType(boolType);
|
||||
const type = makeStructType('S1', [], [
|
||||
new Field('r', refOfBoolType, false),
|
||||
new Field('s', stringType, false),
|
||||
]);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {s: 'hi'});
|
||||
assert.strictEqual(1, s1.chunks.length);
|
||||
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
|
||||
const s1 = newStruct(type, {s: 'hi'});
|
||||
assert.strictEqual(0, s1.chunks.length);
|
||||
|
||||
const b = true;
|
||||
const r = ds.writeValue(b);
|
||||
const s2 = newStruct(type, typeDef, {r: r});
|
||||
assert.strictEqual(2, s2.chunks.length);
|
||||
assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef));
|
||||
assert.isTrue(r.equals(s2.chunks[1]));
|
||||
const s2 = newStruct(type, {r});
|
||||
assert.strictEqual(1, s2.chunks.length);
|
||||
assert.isTrue(r.equals(s2.chunks[0]));
|
||||
});
|
||||
|
||||
test('new', () => {
|
||||
const typeDef = makeStructType('S2', [
|
||||
const type = makeStructType('S2', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('o', stringType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {b: true});
|
||||
const s1 = newStruct(type, {b: true});
|
||||
assert.strictEqual(true, s1.b);
|
||||
assert.strictEqual(s1.o, undefined);
|
||||
|
||||
const s2 = newStruct(type, typeDef, {b: false, o: 'hi'});
|
||||
const s2 = newStruct(type, {b: false, o: 'hi'});
|
||||
assert.strictEqual(false, s2.b);
|
||||
assert.strictEqual('hi', s2.o);
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(type, typeDef, {o: 'hi'}); // missing required field
|
||||
newStruct(type, {o: 'hi'}); // missing required field
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(type, typeDef, {x: 'hi'}); // unknown field
|
||||
newStruct(type, {x: 'hi'}); // unknown field
|
||||
});
|
||||
|
||||
const s3 = newStruct(type, typeDef, {b: true, o: undefined});
|
||||
const s3 = newStruct(type, {b: true, o: undefined});
|
||||
assert.isTrue(s1.equals(s3));
|
||||
});
|
||||
|
||||
test('new union', () => {
|
||||
const typeDef = makeStructType('S3', [], [
|
||||
const type = makeStructType('S3', [], [
|
||||
new Field('b', boolType, false),
|
||||
new Field('o', stringType, false),
|
||||
]);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {b: true});
|
||||
const s1 = newStruct(type, {b: true});
|
||||
assert.strictEqual(true, s1.b);
|
||||
assert.strictEqual(s1.o, undefined);
|
||||
});
|
||||
|
||||
test('struct set', () => {
|
||||
const typeDef = makeStructType('S3', [
|
||||
const type = makeStructType('S3', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('o', stringType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {b: true});
|
||||
const s1 = newStruct(type, {b: true});
|
||||
const s2 = s1.setB(false);
|
||||
|
||||
// TODO: assert throws on set wrong type
|
||||
@@ -195,17 +152,12 @@ suite('Struct', () => {
|
||||
});
|
||||
|
||||
test('struct set union', () => {
|
||||
const typeDef = makeStructType('S3', [], [
|
||||
const type = makeStructType('S3', [], [
|
||||
new Field('b', boolType, false),
|
||||
new Field('s', stringType, false),
|
||||
]);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const s1 = newStruct(type, typeDef, {b: true});
|
||||
const s1 = newStruct(type, {b: true});
|
||||
const m1 = new StructMirror(s1);
|
||||
assert.strictEqual(0, m1.unionIndex);
|
||||
assert.strictEqual(true, m1.unionValue);
|
||||
@@ -223,50 +175,28 @@ suite('Struct', () => {
|
||||
});
|
||||
|
||||
test('type assertion on construct', () => {
|
||||
const typeDef = makeStructType('S3', [
|
||||
new Field('b', boolType, false),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(type, type, {b: true});
|
||||
});
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(typeDef, typeDef, {b: true});
|
||||
newStruct(boolType, {b: true});
|
||||
});
|
||||
});
|
||||
|
||||
test('named union', () => {
|
||||
|
||||
const pkg = new Package([
|
||||
makeStructType('StructWithUnions', [
|
||||
new Field('a', makeType(emptyRef, 1), false),
|
||||
new Field('d', makeType(emptyRef, 2), false),
|
||||
], []),
|
||||
makeStructType('', [], [
|
||||
new Field('b', numberType, false),
|
||||
new Field('c', stringType, false),
|
||||
]),
|
||||
makeStructType('', [], [
|
||||
new Field('e', numberType, false),
|
||||
new Field('f', stringType, false),
|
||||
]),
|
||||
const typeA = makeStructType('', [], [
|
||||
new Field('b', numberType, false),
|
||||
new Field('c', stringType, false),
|
||||
]);
|
||||
const typeD = makeStructType('', [], [
|
||||
new Field('e', numberType, false),
|
||||
new Field('f', stringType, false),
|
||||
]);
|
||||
const type = makeStructType('StructWithUnions', [
|
||||
new Field('a', typeA, false),
|
||||
new Field('d', typeD, false),
|
||||
], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const [typeDef, typeDefA, typeDefD] = pkg.types;
|
||||
const type = makeType(pkgRef, 0);
|
||||
const typeA = makeType(pkgRef, 1);
|
||||
const typeD = makeType(pkgRef, 2);
|
||||
|
||||
const StructWithUnions = createStructClass(type, typeDef);
|
||||
const A = createStructClass(typeA, typeDefA);
|
||||
const D = createStructClass(typeD, typeDefD);
|
||||
const StructWithUnions = createStructClass(type);
|
||||
const A = createStructClass(typeA);
|
||||
const D = createStructClass(typeD);
|
||||
|
||||
const s = new StructWithUnions({
|
||||
a: new A({b: 1}),
|
||||
@@ -291,24 +221,38 @@ suite('Struct', () => {
|
||||
});
|
||||
|
||||
test('type validation', () => {
|
||||
const typeDef = makeStructType('S1', [
|
||||
const type = makeStructType('S1', [
|
||||
new Field('x', boolType, false),
|
||||
new Field('o', stringType, true),
|
||||
], []);
|
||||
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(type, typeDef, {x: 1});
|
||||
newStruct(type, {x: 1});
|
||||
});
|
||||
assert.throws(() => {
|
||||
newStruct(type, typeDef, {o: 1});
|
||||
newStruct(type, {o: 1});
|
||||
});
|
||||
|
||||
newStruct(type, typeDef, {x: true, o: undefined});
|
||||
newStruct(type, typeDef, {x: true});
|
||||
newStruct(type, {x: true, o: undefined});
|
||||
newStruct(type, {x: true});
|
||||
});
|
||||
|
||||
test('type validation cyclic', () => {
|
||||
const type = makeStructType('S', [
|
||||
new Field('b', boolType, false),
|
||||
new Field('o', valueType /* placeholder */, true),
|
||||
], []);
|
||||
invariant(type.desc instanceof StructDesc);
|
||||
type.desc.fields[1].t = type;
|
||||
|
||||
newStruct(type, {b: true});
|
||||
newStruct(type, {b: true, o: newStruct(type, {b: false})});
|
||||
|
||||
assert.throws(() => {
|
||||
newStruct(type, {b: 1});
|
||||
});
|
||||
assert.throws(() => {
|
||||
newStruct(type, {b: true, o: 1});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -36,22 +36,18 @@ type StructData = {[key: string]: ?valueOrPrimitive};
|
||||
export default class Struct extends ValueBase {
|
||||
_data: StructData;
|
||||
_type: Type;
|
||||
_typeDef: Type;
|
||||
|
||||
|
||||
constructor(type: Type, typeDef: Type, data: StructData) {
|
||||
constructor(type: Type, data: StructData) {
|
||||
super();
|
||||
|
||||
invariant(type.kind === Kind.Unresolved);
|
||||
invariant(typeDef.kind === Kind.Struct);
|
||||
invariant(type.kind === Kind.Struct);
|
||||
|
||||
// TODO: Even in dev mode there are paths where the passed in data has already been validated.
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
validate(typeDef, data);
|
||||
validate(type, data);
|
||||
}
|
||||
|
||||
this._type = type;
|
||||
this._typeDef = typeDef;
|
||||
this._data = data;
|
||||
}
|
||||
|
||||
@@ -62,7 +58,6 @@ export default class Struct extends ValueBase {
|
||||
get chunks(): Array<RefValue> {
|
||||
const mirror = new StructMirror(this);
|
||||
const chunks = [];
|
||||
chunks.push(...this.type.chunks);
|
||||
|
||||
const add = field => {
|
||||
if (!field.present) {
|
||||
@@ -83,9 +78,9 @@ export default class Struct extends ValueBase {
|
||||
}
|
||||
}
|
||||
|
||||
function validate(typeDef: Type, data: StructData): void {
|
||||
function validate(type: Type, data: StructData): void {
|
||||
// TODO: Validate field values match field types.
|
||||
const {desc} = typeDef;
|
||||
const {desc} = type;
|
||||
invariant(desc instanceof StructDesc);
|
||||
const {fields} = desc;
|
||||
let dataCount = Object.keys(data).length;
|
||||
@@ -159,18 +154,16 @@ type FieldCallback = (f: StructFieldMirror) => void;
|
||||
|
||||
export class StructMirror<T: Struct> {
|
||||
_data: StructData;
|
||||
_type :Type;
|
||||
typeDef: Type;
|
||||
type :Type;
|
||||
|
||||
constructor(s: Struct) {
|
||||
this._data = s._data;
|
||||
this._type = s.type;
|
||||
this.typeDef = s._typeDef;
|
||||
this.type = s.type;
|
||||
}
|
||||
|
||||
get desc(): StructDesc {
|
||||
invariant(this.typeDef.desc instanceof StructDesc);
|
||||
return this.typeDef.desc;
|
||||
invariant(this.type.desc instanceof StructDesc);
|
||||
return this.type.desc;
|
||||
}
|
||||
|
||||
forEachField(cb: FieldCallback) {
|
||||
@@ -195,7 +188,7 @@ export class StructMirror<T: Struct> {
|
||||
}
|
||||
|
||||
get name(): string {
|
||||
return this.typeDef.name;
|
||||
return this.type.name;
|
||||
}
|
||||
|
||||
get(name: string): ?valueOrPrimitive {
|
||||
@@ -208,7 +201,7 @@ export class StructMirror<T: Struct> {
|
||||
|
||||
set(name: string, value: ?valueOrPrimitive): T {
|
||||
const data = addProperty(this, name, value);
|
||||
return newStruct(this._type, this.typeDef, data);
|
||||
return newStruct(this.type, data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +211,7 @@ function setterName(name) {
|
||||
return `set${name[0].toUpperCase()}${name.slice(1)}`;
|
||||
}
|
||||
|
||||
export function createStructClass<T: Struct>(type: Type, typeDef: Type): Class<T> {
|
||||
export function createStructClass<T: Struct>(type: Type): Class<T> {
|
||||
const k = type.ref.toString();
|
||||
if (cache[k]) {
|
||||
return cache[k];
|
||||
@@ -226,11 +219,11 @@ export function createStructClass<T: Struct>(type: Type, typeDef: Type): Class<T
|
||||
|
||||
const c: any = class extends Struct {
|
||||
constructor(data: StructData) {
|
||||
super(type, typeDef, data);
|
||||
super(type, data);
|
||||
}
|
||||
};
|
||||
|
||||
const {desc} = typeDef;
|
||||
const {desc} = type;
|
||||
invariant(desc instanceof StructDesc);
|
||||
|
||||
for (const fields of [desc.fields, desc.union]) {
|
||||
@@ -316,7 +309,7 @@ function addProperty(mirror: StructMirror, name: string, value: ?valueOrPrimitiv
|
||||
return data;
|
||||
}
|
||||
|
||||
export function newStruct<T: Struct>(type: Type, typeDef: Type, data: StructData): T {
|
||||
const c = createStructClass(type, typeDef);
|
||||
export function newStruct<T: Struct>(type: Type, data: StructData): T {
|
||||
const c = createStructClass(type);
|
||||
return new c(data);
|
||||
}
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
// @flow
|
||||
|
||||
import MemoryStore from './memory-store.js';
|
||||
import {default as Ref, emptyRef} from './ref.js';
|
||||
import {assert} from 'chai';
|
||||
import {
|
||||
boolType,
|
||||
Field,
|
||||
makeCompoundType,
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
numberType,
|
||||
stringType,
|
||||
typeType,
|
||||
} from './type.js';
|
||||
import {Kind} from './noms-kind.js';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import {suite, test} from 'mocha';
|
||||
import DataStore from './data-store.js';
|
||||
|
||||
@@ -23,8 +20,8 @@ suite('Type', () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const mapType = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const setType = makeCompoundType(Kind.Set, stringType);
|
||||
const mapType = makeMapType(stringType, numberType);
|
||||
const setType = makeSetType(stringType);
|
||||
const mahType = makeStructType('MahStruct', [
|
||||
new Field('Field1', stringType, false),
|
||||
new Field('Field2', boolType, true),
|
||||
@@ -34,80 +31,18 @@ suite('Type', () => {
|
||||
new Field('StringField', stringType, false),
|
||||
]);
|
||||
|
||||
const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567');
|
||||
const trType = makeType(pkgRef, 42);
|
||||
|
||||
const otherRef = ds.writeValue(otherType).targetRef;
|
||||
const mapRef = ds.writeValue(mapType).targetRef;
|
||||
const setRef = ds.writeValue(setType).targetRef;
|
||||
const mahRef = ds.writeValue(mahType).targetRef;
|
||||
const trRef = ds.writeValue(trType).targetRef;
|
||||
|
||||
assert.isTrue(otherType.equals(await ds.readValue(otherRef)));
|
||||
assert.isTrue(mapType.equals(await ds.readValue(mapRef)));
|
||||
assert.isTrue(setType.equals(await ds.readValue(setRef)));
|
||||
assert.isTrue(mahType.equals(await ds.readValue(mahRef)));
|
||||
assert.isTrue(trType.equals(await ds.readValue(trRef)));
|
||||
});
|
||||
|
||||
test('typeRef describe', async () => {
|
||||
const mapType = makeCompoundType(Kind.Map, stringType, numberType);
|
||||
const setType = makeCompoundType(Kind.Set, stringType);
|
||||
|
||||
assert.strictEqual('Bool', boolType.describe());
|
||||
assert.strictEqual('Number', numberType.describe());
|
||||
assert.strictEqual('String', stringType.describe());
|
||||
assert.strictEqual('Map<String, Number>', mapType.describe());
|
||||
assert.strictEqual('Set<String>', setType.describe());
|
||||
|
||||
const mahType = makeStructType('MahStruct',[
|
||||
new Field('Field1', stringType, false),
|
||||
new Field('Field2', boolType, true),
|
||||
], [
|
||||
]);
|
||||
assert.strictEqual('struct MahStruct {\n Field1: String\n Field2: optional Bool\n}',
|
||||
mahType.describe());
|
||||
|
||||
const otherType = makeStructType('MahOtherStruct',[
|
||||
new Field('Field1', stringType, false),
|
||||
new Field('Field2', boolType, true),
|
||||
], [
|
||||
new Field('NumberField', numberType, false),
|
||||
new Field('StringField', stringType, false),
|
||||
]);
|
||||
|
||||
const exp = `struct MahOtherStruct {\n Field1: String\n Field2: optional Bool\n union {\n NumberField: Number\n StringField: String\n }\n}`; // eslint-disable-line max-len
|
||||
assert.strictEqual(exp, otherType.describe());
|
||||
});
|
||||
|
||||
test('type with pkgRef', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
|
||||
const pkg = new Package([numberType], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
|
||||
const unresolvedType = makeType(pkgRef, 42);
|
||||
const unresolvedRef = ds.writeValue(unresolvedType).targetRef;
|
||||
|
||||
const v = await ds.readValue(unresolvedRef);
|
||||
assert.isNotNull(v);
|
||||
assert.isTrue(pkgRef.equals(v.chunks[0].targetRef));
|
||||
const p = await ds.readValue(pkgRef);
|
||||
assert.isNotNull(p);
|
||||
});
|
||||
|
||||
test('type Type', () => {
|
||||
assert.isTrue(boolType.type.equals(typeType));
|
||||
});
|
||||
|
||||
test('empty package ref', async () => {
|
||||
const ms = new MemoryStore();
|
||||
const ds = new DataStore(ms);
|
||||
const v = makeType(emptyRef, -1);
|
||||
const r = ds.writeValue(v).targetRef;
|
||||
const v2 = await ds.readValue(r);
|
||||
assert.isTrue(v.equals(v2));
|
||||
});
|
||||
});
|
||||
|
||||
240
js/src/type.js
240
js/src/type.js
@@ -1,18 +1,16 @@
|
||||
// @flow
|
||||
|
||||
import type Ref from './ref.js';
|
||||
import {emptyRef} from './ref.js';
|
||||
import RefValue from './ref-value.js';
|
||||
import type {NomsKind} from './noms-kind.js';
|
||||
import {invariant} from './assert.js';
|
||||
import {isPrimitiveKind, Kind, kindToString} from './noms-kind.js';
|
||||
import {isPrimitiveKind, Kind} from './noms-kind.js';
|
||||
import {ValueBase} from './value.js';
|
||||
|
||||
export type TypeDesc = {
|
||||
export interface TypeDesc {
|
||||
kind: NomsKind;
|
||||
equals: (other: TypeDesc) => boolean;
|
||||
describe: () => string;
|
||||
};
|
||||
equals(other: TypeDesc): boolean;
|
||||
}
|
||||
|
||||
export class PrimitiveDesc {
|
||||
kind: NomsKind;
|
||||
@@ -24,37 +22,6 @@ export class PrimitiveDesc {
|
||||
equals(other: TypeDesc): boolean {
|
||||
return other instanceof PrimitiveDesc && other.kind === this.kind;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
return kindToString(this.kind);
|
||||
}
|
||||
}
|
||||
|
||||
export class UnresolvedDesc {
|
||||
_pkgRef: Ref;
|
||||
_ordinal: number;
|
||||
|
||||
constructor(pkgRef: Ref, ordinal: number) {
|
||||
this._pkgRef = pkgRef;
|
||||
this._ordinal = ordinal;
|
||||
}
|
||||
|
||||
get kind(): NomsKind {
|
||||
return Kind.Unresolved;
|
||||
}
|
||||
|
||||
equals(other: TypeDesc): boolean {
|
||||
if (other.kind !== this.kind) {
|
||||
return false;
|
||||
}
|
||||
invariant(other instanceof UnresolvedDesc);
|
||||
|
||||
return other._pkgRef.equals(this._pkgRef) && other._ordinal === this._ordinal;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
return `Unresolved(${this._pkgRef.toString()}, ${this._ordinal})`;
|
||||
}
|
||||
}
|
||||
|
||||
export class CompoundDesc {
|
||||
@@ -84,18 +51,15 @@ export class CompoundDesc {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
const elemsDesc = this.elemTypes.map(e => e.describe()).join(', ');
|
||||
return `${kindToString(this.kind)}<${elemsDesc}>`;
|
||||
}
|
||||
}
|
||||
|
||||
export class StructDesc {
|
||||
name: string;
|
||||
fields: Array<Field>;
|
||||
union: Array<Field>;
|
||||
|
||||
constructor(fields: Array<Field>, union: Array<Field>) {
|
||||
constructor(name: string, fields: Array<Field>, union: Array<Field>) {
|
||||
this.name = name;
|
||||
this.fields = fields;
|
||||
this.union = union;
|
||||
}
|
||||
@@ -105,6 +69,10 @@ export class StructDesc {
|
||||
}
|
||||
|
||||
equals(other: TypeDesc): boolean {
|
||||
if (this === other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (other.kind !== this.kind) {
|
||||
return false;
|
||||
}
|
||||
@@ -128,24 +96,6 @@ export class StructDesc {
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
let out = '{\n';
|
||||
this.fields.forEach(f => {
|
||||
const optional = f.optional ? 'optional ' : '';
|
||||
out += ` ${f.name}: ${optional}${f.t.describe()}\n`;
|
||||
});
|
||||
|
||||
if (this.union.length > 0) {
|
||||
out += ' union {\n';
|
||||
this.union.forEach(f => {
|
||||
out += ` ${f.name}: ${f.t.describe()}\n`;
|
||||
});
|
||||
out += ' }\n';
|
||||
}
|
||||
|
||||
return out + '}';
|
||||
}
|
||||
}
|
||||
|
||||
export class Field {
|
||||
@@ -166,15 +116,11 @@ export class Field {
|
||||
}
|
||||
|
||||
export class Type extends ValueBase {
|
||||
_namespace: string;
|
||||
_name: string;
|
||||
_desc: TypeDesc;
|
||||
_ref: ?Ref;
|
||||
|
||||
constructor(name: string = '', namespace: string = '', desc: TypeDesc) {
|
||||
constructor(desc: TypeDesc) {
|
||||
super();
|
||||
this._name = name;
|
||||
this._namespace = namespace;
|
||||
this._desc = desc;
|
||||
}
|
||||
|
||||
@@ -183,21 +129,7 @@ export class Type extends ValueBase {
|
||||
}
|
||||
|
||||
get chunks(): Array<RefValue> {
|
||||
const chunks = [];
|
||||
if (this.unresolved) {
|
||||
if (this.hasPackageRef) {
|
||||
chunks.push(new RefValue(this.packageRef, packageType));
|
||||
}
|
||||
|
||||
return chunks;
|
||||
}
|
||||
|
||||
const desc = this._desc;
|
||||
if (desc instanceof CompoundDesc) {
|
||||
desc.elemTypes.forEach(et => chunks.push(...et.chunks()));
|
||||
}
|
||||
|
||||
return chunks;
|
||||
return [];
|
||||
}
|
||||
|
||||
get kind(): NomsKind {
|
||||
@@ -219,98 +151,23 @@ export class Type extends ValueBase {
|
||||
return this._desc;
|
||||
}
|
||||
|
||||
get unresolved(): boolean {
|
||||
return this._desc instanceof UnresolvedDesc;
|
||||
}
|
||||
|
||||
get hasPackageRef(): boolean {
|
||||
return this.unresolved && !this.packageRef.isEmpty();
|
||||
}
|
||||
|
||||
get packageRef(): Ref {
|
||||
invariant(this._desc instanceof UnresolvedDesc);
|
||||
return this._desc._pkgRef;
|
||||
}
|
||||
|
||||
get ordinal(): number {
|
||||
invariant(this._desc instanceof UnresolvedDesc);
|
||||
return this._desc._ordinal;
|
||||
}
|
||||
|
||||
get name(): string {
|
||||
return this._name;
|
||||
}
|
||||
|
||||
get namespace(): string {
|
||||
return this._namespace;
|
||||
}
|
||||
|
||||
get namespacedName(): string {
|
||||
let out = '';
|
||||
|
||||
if (this._namespace !== '') {
|
||||
out = this._namespace + '.';
|
||||
}
|
||||
if (this._name !== '') {
|
||||
out += this._name;
|
||||
}
|
||||
|
||||
return out;
|
||||
invariant(this._desc instanceof StructDesc);
|
||||
return this._desc.name;
|
||||
}
|
||||
|
||||
get elemTypes(): Array<Type> {
|
||||
invariant(this._desc instanceof CompoundDesc);
|
||||
return this._desc.elemTypes;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
let out = '';
|
||||
switch (this.kind) {
|
||||
case Kind.Struct:
|
||||
out += 'struct ';
|
||||
break;
|
||||
}
|
||||
if (this.name) {
|
||||
invariant(!this.namespace || (this.namespace && this.name));
|
||||
if (this.namespace) {
|
||||
out += this.namespace + '.';
|
||||
}
|
||||
if (this.name) {
|
||||
out += this.name;
|
||||
}
|
||||
out += ' ';
|
||||
|
||||
if (this.unresolved) {
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
out += this.desc.describe();
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
function buildType(n: string, desc: TypeDesc): Type {
|
||||
if (isPrimitiveKind(desc.kind)) {
|
||||
return new Type(n, '', desc);
|
||||
}
|
||||
|
||||
switch (desc.kind) {
|
||||
case Kind.List:
|
||||
case Kind.Ref:
|
||||
case Kind.Set:
|
||||
case Kind.Map:
|
||||
case Kind.Struct:
|
||||
case Kind.Unresolved:
|
||||
return new Type(n, '', desc);
|
||||
|
||||
default:
|
||||
throw new Error('Unrecognized Kind: ' + desc.kind);
|
||||
}
|
||||
function buildType(desc: TypeDesc): Type {
|
||||
return new Type(desc);
|
||||
}
|
||||
|
||||
function makePrimitiveType(k: NomsKind): Type {
|
||||
return buildType('', new PrimitiveDesc(k));
|
||||
return buildType(new PrimitiveDesc(k));
|
||||
}
|
||||
|
||||
export function makeCompoundType(k: NomsKind, ...elemTypes: Array<Type>): Type {
|
||||
@@ -322,35 +179,27 @@ export function makeCompoundType(k: NomsKind, ...elemTypes: Array<Type>): Type {
|
||||
invariant(elemTypes.length === 2, 'Map requires 2 element types');
|
||||
}
|
||||
|
||||
return buildType('', new CompoundDesc(k, elemTypes));
|
||||
return buildType(new CompoundDesc(k, elemTypes));
|
||||
}
|
||||
|
||||
export function makeListType(elemType: Type): Type {
|
||||
return buildType('', new CompoundDesc(Kind.List, [elemType]));
|
||||
return buildType(new CompoundDesc(Kind.List, [elemType]));
|
||||
}
|
||||
|
||||
export function makeSetType(elemType: Type): Type {
|
||||
return buildType('', new CompoundDesc(Kind.Set, [elemType]));
|
||||
return buildType(new CompoundDesc(Kind.Set, [elemType]));
|
||||
}
|
||||
|
||||
export function makeMapType(keyType: Type, valueType: Type): Type {
|
||||
return buildType('', new CompoundDesc(Kind.Map, [keyType, valueType]));
|
||||
return buildType(new CompoundDesc(Kind.Map, [keyType, valueType]));
|
||||
}
|
||||
|
||||
export function makeRefType(elemType: Type): Type {
|
||||
return buildType('', new CompoundDesc(Kind.Ref, [elemType]));
|
||||
return buildType(new CompoundDesc(Kind.Ref, [elemType]));
|
||||
}
|
||||
|
||||
export function makeStructType(name: string, fields: Array<Field>, choices: Array<Field>): Type {
|
||||
return buildType(name, new StructDesc(fields, choices));
|
||||
}
|
||||
|
||||
export function makeType(pkgRef: Ref, ordinal: number): Type {
|
||||
return new Type('', '', new UnresolvedDesc(pkgRef, ordinal));
|
||||
}
|
||||
|
||||
export function makeUnresolvedType(namespace: string, name: string): Type {
|
||||
return new Type(name, namespace, new UnresolvedDesc(emptyRef, -1));
|
||||
return buildType(new StructDesc(name, fields, choices));
|
||||
}
|
||||
|
||||
export const boolType = makePrimitiveType(Kind.Bool);
|
||||
@@ -358,15 +207,13 @@ export const numberType = makePrimitiveType(Kind.Number);
|
||||
export const stringType = makePrimitiveType(Kind.String);
|
||||
export const blobType = makePrimitiveType(Kind.Blob);
|
||||
export const typeType = makePrimitiveType(Kind.Type);
|
||||
export const packageType = makePrimitiveType(Kind.Package);
|
||||
export const valueType = makePrimitiveType(Kind.Value);
|
||||
|
||||
export const refOfValueType = makeCompoundType(Kind.Ref, valueType);
|
||||
export const listOfValueType = makeCompoundType(Kind.List, valueType);
|
||||
export const setOfValueType = makeCompoundType(Kind.Set, valueType);
|
||||
export const mapOfValueType = makeCompoundType(Kind.Map, valueType, valueType);
|
||||
|
||||
export const packageRefType = makeCompoundType(Kind.Ref, packageType);
|
||||
|
||||
/**
|
||||
* Gives the existing primitive Type value for a NomsKind.
|
||||
*/
|
||||
@@ -383,11 +230,44 @@ export function getPrimitiveType(k: NomsKind): Type {
|
||||
return blobType;
|
||||
case Kind.Type:
|
||||
return typeType;
|
||||
case Kind.Package:
|
||||
return packageType;
|
||||
case Kind.Value:
|
||||
return valueType;
|
||||
default:
|
||||
invariant(false, 'not reachable');
|
||||
}
|
||||
}
|
||||
|
||||
export function getTypeOfValue(v: any): Type {
|
||||
switch (typeof v) {
|
||||
case 'object':
|
||||
return v.type;
|
||||
case 'string':
|
||||
return stringType;
|
||||
case 'boolean':
|
||||
return boolType;
|
||||
case 'number':
|
||||
throw new Error('Encoding untagged numbers is not supported');
|
||||
default:
|
||||
throw new Error('Unknown type');
|
||||
}
|
||||
}
|
||||
|
||||
export class BackRefDesc {
|
||||
value: number;
|
||||
|
||||
constructor(value: number) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
get kind(): NomsKind {
|
||||
return Kind.BackRef;
|
||||
}
|
||||
|
||||
equals(other: TypeDesc): boolean {
|
||||
return other instanceof BackRefDesc && other.value === this.value;
|
||||
}
|
||||
|
||||
describe(): string {
|
||||
return `BackRef<${this.value}>`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import {newMap} from './map.js';
|
||||
import {newSet} from './set.js';
|
||||
import {newStruct} from './struct.js';
|
||||
import {suite, test} from 'mocha';
|
||||
import {Package, registerPackage} from './package.js';
|
||||
import validateType from './validate-type.js';
|
||||
import type {Type} from './type.js';
|
||||
import {
|
||||
@@ -19,10 +18,8 @@ import {
|
||||
makeMapType,
|
||||
makeSetType,
|
||||
makeStructType,
|
||||
makeType,
|
||||
mapOfValueType,
|
||||
numberType,
|
||||
packageType,
|
||||
setOfValueType,
|
||||
stringType,
|
||||
typeType,
|
||||
@@ -41,7 +38,6 @@ suite('validate type', () => {
|
||||
stringType,
|
||||
blobType,
|
||||
typeType,
|
||||
packageType,
|
||||
valueType,
|
||||
];
|
||||
|
||||
@@ -120,24 +116,12 @@ suite('validate type', () => {
|
||||
validateType(valueType, t);
|
||||
});
|
||||
|
||||
test('package', async () => {
|
||||
const pkg = new Package([], []);
|
||||
validateType(packageType, pkg);
|
||||
assertAll(packageType, pkg);
|
||||
|
||||
validateType(valueType, pkg);
|
||||
});
|
||||
|
||||
test('struct', async () => {
|
||||
const typeDef = makeStructType('Struct', [
|
||||
const type = makeStructType('Struct', [
|
||||
new Field('x', boolType, false),
|
||||
], []);
|
||||
const pkg = new Package([typeDef], []);
|
||||
registerPackage(pkg);
|
||||
const pkgRef = pkg.ref;
|
||||
const type = makeType(pkgRef, 0);
|
||||
|
||||
const v = newStruct(type, typeDef, {x: true});
|
||||
const v = newStruct(type, {x: true});
|
||||
validateType(type, v);
|
||||
assertAll(type, v);
|
||||
|
||||
|
||||
@@ -27,23 +27,17 @@ export default function validateType(t: Type, v: any): void {
|
||||
return;
|
||||
}
|
||||
|
||||
case Kind.Unresolved: {
|
||||
// Struct.
|
||||
assertSubtype(v, t);
|
||||
return;
|
||||
}
|
||||
|
||||
case Kind.Blob:
|
||||
case Kind.List:
|
||||
case Kind.Map:
|
||||
case Kind.Ref:
|
||||
case Kind.Set:
|
||||
case Kind.Blob:
|
||||
case Kind.Struct:
|
||||
case Kind.Type:
|
||||
case Kind.Package:
|
||||
assertSubtype(v, t);
|
||||
return;
|
||||
|
||||
case Kind.Struct:
|
||||
case Kind.BackRef:
|
||||
default:
|
||||
throw new Error('unreachable');
|
||||
}
|
||||
|
||||
1
nomdl/codegen/.gitignore
vendored
1
nomdl/codegen/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
codegen
|
||||
@@ -1,60 +0,0 @@
|
||||
To update the codegen there are some subtleties because the code depends on generated code.
|
||||
|
||||
## Build a working version
|
||||
|
||||
First step is to build a binary.
|
||||
|
||||
```
|
||||
cd nomdl/codegen/
|
||||
go build
|
||||
```
|
||||
|
||||
## Change templates
|
||||
|
||||
Not much to say here but you can see the result without breaking things
|
||||
|
||||
```
|
||||
./codegen --in=test/struct.noms
|
||||
```
|
||||
|
||||
This generates `test.noms.go` in the current directory. Iterate until it looks correct.
|
||||
|
||||
## Change system go files
|
||||
|
||||
There are a few files that are generated that codegen itself depends on.
|
||||
|
||||
1. `types/compound_blob_struct.noms.go`
|
||||
1. `datas/types.noms.go`
|
||||
|
||||
Both of these can be updated by running `go generate` in their respective directories
|
||||
|
||||
There is also one more file that is generated but it requires manual intervention
|
||||
|
||||
### `types/package_set_of_ref.go`
|
||||
|
||||
This one is generated from `types/package_set_of_ref.noms`. However, it uses the symbol
|
||||
`Package` to refer to a `types.Package`. Currently we have no convenient way to make this work
|
||||
out of the box. However, it is pretty straight forward to make it work.
|
||||
|
||||
1. Open `nomdl/pkg/grammar.pg`
|
||||
2. Find `Number`
|
||||
3. At that line, add one more builtin type called `Package`.
|
||||
4. Run `go generate` `in nomdl/pkg`
|
||||
5. Run `go run ../nomdl/codegen/codegen.go --in=package_set_of_ref.noms` in `types/`.
|
||||
|
||||
Here is the diff:
|
||||
|
||||
```diff
|
||||
--- a/nomdl/pkg/grammar.peg
|
||||
+++ b/nomdl/pkg/grammar.peg
|
||||
@@ -159,7 +159,7 @@ CompoundType <- `List` _ `(` _ t:Type _ `)` _ {
|
||||
return types.MakeRefType(t.(*types.Type)), nil
|
||||
}
|
||||
|
||||
-PrimitiveType <- p:(`Number` / `Bool` / `String` / `Blob` / `Value` / `Type`) {
|
||||
+PrimitiveType <- p:(`Number` / `Bool` / `String` / `Blob` / `Value` / `Type` / `Package`) {
|
||||
return types.MakePrimitiveTypeByString(string(p.([]uint8))), nil
|
||||
}
|
||||
```
|
||||
|
||||
Once [#577](https://github.com/attic-labs/noms/issues/577) is fixed this will need no manual intervention.
|
||||
@@ -1,491 +0,0 @@
|
||||
// Package code provides Generator, which has methods for generating code snippets from a *types.Type.
|
||||
// Conceptually there are few type spaces here:
|
||||
//
|
||||
// - Def - MyStructDef, ListOfBoolDef; convenient Go types for working with data from a given Noms Value.
|
||||
// - Native - such as string, uint32
|
||||
// - Value - the generic types.Value
|
||||
// - Nom - types.String, types.Uint32, MyStruct, ListOfBool
|
||||
// - User - User defined structs as well as native primitves. This uses Native when possible or Nom if not. These are to be used in APIs for generated types -- Getters and setters for maps and structs, etc.
|
||||
package code
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/attic-labs/noms/d"
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
)
|
||||
|
||||
// Resolver provides a single method for resolving an unresolved types.Type.
|
||||
type Resolver interface {
|
||||
Resolve(t *types.Type, pkg *types.Package) *types.Type
|
||||
}
|
||||
|
||||
// Generator provides methods for generating code snippets from both resolved and unresolved types.Types. In the latter case, it uses R to resolve the types.Type before generating code.
|
||||
type Generator struct {
|
||||
R Resolver
|
||||
TypesPackage string
|
||||
ImportedJS map[string]bool
|
||||
ImportedJSTypes map[string]bool
|
||||
AliasNames map[ref.Ref]string
|
||||
Package *types.Package
|
||||
}
|
||||
|
||||
// DefType returns a string containing the Go type that should be used as the 'Def' for the Noms type described by t.
|
||||
func (gen *Generator) DefType(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return fmt.Sprintf("%sBlob", gen.TypesPackage)
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return strings.ToLower(kindToString(k))
|
||||
case types.ListKind, types.MapKind, types.SetKind, types.StructKind:
|
||||
return gen.UserName(t) + "Def"
|
||||
case types.PackageKind:
|
||||
return fmt.Sprintf("%sPackage", gen.TypesPackage)
|
||||
case types.RefKind:
|
||||
return "ref.Ref"
|
||||
case types.ValueKind:
|
||||
return fmt.Sprintf("%sValue", gen.TypesPackage)
|
||||
case types.TypeKind:
|
||||
return fmt.Sprintf("%sType", gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserType returns a string containing the Go type that should be used when the Noms type described by t needs to be returned by a generated getter or taken as a parameter to a generated setter.
|
||||
func (gen *Generator) UserType(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return fmt.Sprintf("%sBlob", gen.TypesPackage)
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return strings.ToLower(kindToString(k))
|
||||
case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind:
|
||||
return gen.UserName(t)
|
||||
case types.PackageKind:
|
||||
return fmt.Sprintf("%sPackage", gen.TypesPackage)
|
||||
case types.ValueKind:
|
||||
return fmt.Sprintf("%sValue", gen.TypesPackage)
|
||||
case types.TypeKind:
|
||||
return fmt.Sprintf("%sType", gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserTypeJS returns a string containing the JS type that should be used when the Noms type described by t needs to be returned by a generated getter or taken as a parameter to a generated setter.
|
||||
func (gen *Generator) UserTypeJS(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return gen.ImportJSType("Blob")
|
||||
case types.BoolKind:
|
||||
return "boolean"
|
||||
case types.StringKind:
|
||||
return "string"
|
||||
case types.NumberKind:
|
||||
return gen.ImportJSType(strings.ToLower(kindToString(k)))
|
||||
case types.StructKind:
|
||||
if t.HasPackageRef() && gen.Package.Ref() != t.PackageRef() {
|
||||
return gen.importedUserNameJS(t)
|
||||
}
|
||||
return gen.UserName(t)
|
||||
case types.ListKind:
|
||||
return fmt.Sprintf("%s<%s>", gen.ImportJSType("NomsList"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.SetKind:
|
||||
return fmt.Sprintf("%s<%s>", gen.ImportJSType("NomsSet"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("%s<%s>", gen.ImportJSType("RefValue"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.MapKind:
|
||||
elemTypes := t.Desc.(types.CompoundDesc).ElemTypes
|
||||
return fmt.Sprintf("%s<%s, %s>", gen.ImportJSType("NomsMap"), gen.UserTypeJS(elemTypes[0]), gen.UserTypeJS(elemTypes[1]))
|
||||
case types.PackageKind:
|
||||
return gen.ImportJSType("Package")
|
||||
case types.ValueKind:
|
||||
return gen.ImportJSType("Value")
|
||||
case types.TypeKind:
|
||||
return gen.ImportJSType("Type")
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// DefToValue returns a string containing Go code to convert an instance of a Def type (named val) to a Noms types.Value of the type described by t.
|
||||
func (gen *Generator) DefToValue(val string, t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
switch rt.Kind() {
|
||||
case types.BlobKind, types.PackageKind, types.ValueKind, types.TypeKind:
|
||||
return val // No special Def representation
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return gen.NativeToValue(val, rt)
|
||||
case types.ListKind, types.MapKind, types.SetKind, types.StructKind:
|
||||
return fmt.Sprintf("%s.New()", val)
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("New%s(%s)", gen.UserName(rt), val)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// DefToUser returns a string containing Go code to convert an instance of a Def type (named val) to a User type described by t.
|
||||
func (gen *Generator) DefToUser(val string, t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
switch rt.Kind() {
|
||||
case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.TypeKind, types.ValueKind:
|
||||
return val
|
||||
case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind:
|
||||
return gen.DefToValue(val, rt)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// MayHaveChunks returns whether the type (t) may contain more chunks.
|
||||
func (gen *Generator) MayHaveChunks(t *types.Type) bool {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
switch rt.Kind() {
|
||||
case types.BlobKind, types.ListKind, types.MapKind, types.PackageKind, types.RefKind, types.SetKind, types.StructKind, types.TypeKind, types.ValueKind:
|
||||
return true
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return false
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// ValueToDef returns a string containing Go code to convert an instance of a types.Value (val) into the Def type appropriate for t.
|
||||
func (gen *Generator) ValueToDef(val string, t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
switch rt.Kind() {
|
||||
case types.BlobKind, types.PackageKind, types.TypeKind:
|
||||
return gen.ValueToUser(val, rt) // No special Def representation
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return gen.ValueToNative(val, rt)
|
||||
case types.ListKind, types.MapKind, types.SetKind, types.StructKind:
|
||||
return fmt.Sprintf("%s.Def()", gen.ValueToUser(val, t))
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("%s.TargetRef()", gen.ValueToUser(val, t))
|
||||
case types.ValueKind:
|
||||
return val // Value is already a Value
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserToDef returns a string containing Go code to convert an User value (val) into the Def type appropriate for t.
|
||||
func (gen *Generator) UserToDef(val string, t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
switch rt.Kind() {
|
||||
case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.TypeKind, types.ValueKind:
|
||||
return val
|
||||
case types.ListKind, types.MapKind, types.SetKind, types.StructKind:
|
||||
return fmt.Sprintf("%s.Def()", val)
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("%s.TargetRef()", val)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// NativeToValue returns a string containing Go code to convert an instance of a native type (named val) to a Noms types.Value of the type described by t.
|
||||
func (gen *Generator) NativeToValue(val string, t *types.Type) string {
|
||||
t = gen.R.Resolve(t, gen.Package)
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case types.BoolKind, types.NumberKind:
|
||||
return fmt.Sprintf("%s%s(%s)", gen.TypesPackage, kindToString(k), val)
|
||||
case types.StringKind:
|
||||
return fmt.Sprintf("%sNewString(%s)", gen.TypesPackage, val)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// ValueToNative returns a string containing Go code to convert an instance of a types.Value (val) into the native type appropriate for t.
|
||||
func (gen *Generator) ValueToNative(val string, t *types.Type) string {
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case types.BoolKind, types.NumberKind:
|
||||
n := kindToString(k)
|
||||
return fmt.Sprintf("%s(%s.(%s%s))", strings.ToLower(n), val, gen.TypesPackage, n)
|
||||
case types.StringKind:
|
||||
return fmt.Sprintf("%s.(%sString).String()", val, gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserToValue returns a string containing Go code to convert an instance of a User type (named val) to a Noms types.Value of the type described by t. For Go primitive types, this will use NativeToValue(). For other types, their UserType is a Noms types.Value (or a wrapper around one), so this is more-or-less a pass-through.
|
||||
func (gen *Generator) UserToValue(val string, t *types.Type) string {
|
||||
t = gen.R.Resolve(t, gen.Package)
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case types.BlobKind, types.ListKind, types.MapKind, types.PackageKind, types.RefKind, types.SetKind, types.StructKind, types.TypeKind, types.ValueKind:
|
||||
return val
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return gen.NativeToValue(val, t)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// ValueToUser returns a string containing Go code to convert an instance of a types.Value (val) into the User type appropriate for t. For Go primitives, this will use ValueToNative().
|
||||
func (gen *Generator) ValueToUser(val string, t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return fmt.Sprintf("%s.(%sBlob)", val, gen.TypesPackage)
|
||||
case types.BoolKind, types.NumberKind, types.StringKind:
|
||||
return gen.ValueToNative(val, rt)
|
||||
case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind:
|
||||
return fmt.Sprintf("%s.(%s)", val, gen.UserName(t))
|
||||
case types.PackageKind:
|
||||
return fmt.Sprintf("%s.(%sPackage)", val, gen.TypesPackage)
|
||||
case types.ValueKind:
|
||||
return val
|
||||
case types.TypeKind:
|
||||
return fmt.Sprintf("%s.(%sType)", val, gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserZero returns a string containing Go code to create an uninitialized instance of the User type appropriate for t.
|
||||
func (gen *Generator) UserZero(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return fmt.Sprintf("%sNewEmptyBlob()", gen.TypesPackage)
|
||||
case types.BoolKind:
|
||||
return "false"
|
||||
case types.NumberKind:
|
||||
return fmt.Sprintf("%s(0)", strings.ToLower(kindToString(k)))
|
||||
case types.ListKind, types.MapKind, types.SetKind, types.StructKind:
|
||||
return fmt.Sprintf("New%s()", gen.UserName(rt))
|
||||
case types.PackageKind:
|
||||
return fmt.Sprintf("New%s()", gen.UserName(rt))
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("New%s(ref.Ref{})", gen.UserName(rt))
|
||||
case types.StringKind:
|
||||
return `""`
|
||||
case types.ValueKind:
|
||||
// TODO: This is where a null Value would have been useful.
|
||||
return fmt.Sprintf("%sBool(false)", gen.TypesPackage)
|
||||
case types.TypeKind:
|
||||
return fmt.Sprintf("%sType{R: ref.Ref{}}", gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// ValueZero returns a string containing Go code to create an uninitialized instance of the Noms types.Value appropriate for t.
|
||||
func (gen *Generator) ValueZero(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind:
|
||||
return fmt.Sprintf("%sNewEmptyBlob()", gen.TypesPackage)
|
||||
case types.BoolKind:
|
||||
return fmt.Sprintf("%sBool(false)", gen.TypesPackage)
|
||||
case types.NumberKind:
|
||||
return fmt.Sprintf("%s%s(0)", gen.TypesPackage, kindToString(k))
|
||||
case types.ListKind, types.MapKind, types.RefKind, types.SetKind:
|
||||
return gen.UserZero(t)
|
||||
case types.PackageKind:
|
||||
return fmt.Sprintf("%sNewPackage()", gen.TypesPackage)
|
||||
case types.StringKind:
|
||||
return fmt.Sprintf(`%sNewString("")`, gen.TypesPackage)
|
||||
case types.StructKind:
|
||||
return fmt.Sprintf("New%s()", gen.UserName(rt))
|
||||
case types.ValueKind:
|
||||
// TODO: Use nil here
|
||||
return fmt.Sprintf("%sBool(false)", gen.TypesPackage)
|
||||
case types.TypeKind:
|
||||
return fmt.Sprintf("%sType{R: ref.Ref{}}", gen.TypesPackage)
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// UserName returns the name of the User type appropriate for t, taking into account Noms types imported from other packages.
|
||||
func (gen *Generator) UserName(t *types.Type) string {
|
||||
rt := gen.R.Resolve(t, gen.Package)
|
||||
k := rt.Kind()
|
||||
switch k {
|
||||
case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.ValueKind, types.TypeKind:
|
||||
return kindToString(k)
|
||||
case types.ListKind:
|
||||
return fmt.Sprintf("ListOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.MapKind:
|
||||
elemTypes := rt.Desc.(types.CompoundDesc).ElemTypes
|
||||
return fmt.Sprintf("MapOf%sTo%s", gen.refToID(elemTypes[0]), gen.refToID(elemTypes[1]))
|
||||
case types.RefKind:
|
||||
return fmt.Sprintf("RefOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.SetKind:
|
||||
return fmt.Sprintf("SetOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0]))
|
||||
case types.StructKind:
|
||||
// We get an empty name when we have a struct that is used as union
|
||||
if rt.Name() == "" {
|
||||
choices := rt.Desc.(types.StructDesc).Union
|
||||
s := "__unionOf"
|
||||
for i, f := range choices {
|
||||
if i > 0 {
|
||||
s += "And"
|
||||
}
|
||||
s += strings.Title(f.Name) + "Of" + gen.refToID(f.T)
|
||||
}
|
||||
return s
|
||||
}
|
||||
return rt.Name()
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func (gen Generator) importedUserNameJS(t *types.Type) string {
|
||||
d.Chk.True(t.HasPackageRef())
|
||||
return fmt.Sprintf("%s.%s", gen.RefToAliasName(t.PackageRef()), gen.UserName(t))
|
||||
}
|
||||
|
||||
func (gen *Generator) refToID(t *types.Type) string {
|
||||
if !t.IsUnresolved() || !t.HasPackageRef() {
|
||||
return gen.UserName(t)
|
||||
}
|
||||
return gen.UserName(gen.R.Resolve(t, gen.Package))
|
||||
}
|
||||
|
||||
// RefToJSIdentfierName generates an identifier name representing a Ref. ie. `sha1_abc1234`.
|
||||
func (gen *Generator) RefToJSIdentfierName(r ref.Ref) string {
|
||||
return strings.Replace(r.String(), "-", "_", 1)[0:12]
|
||||
}
|
||||
|
||||
// RefToAliasName is used to map the ref of an import to the alias name used in the noms file
|
||||
func (gen *Generator) RefToAliasName(r ref.Ref) string {
|
||||
// When we generate code from a Package stored in a DataStore we do not have the alias names.
|
||||
if n, ok := gen.AliasNames[r]; ok {
|
||||
return n
|
||||
}
|
||||
return fmt.Sprintf("_%s", gen.RefToJSIdentfierName(r))
|
||||
}
|
||||
|
||||
// ToTypesType returns a string containing Go code that instantiates a *types.Type instance equivalent to t.
|
||||
func (gen *Generator) ToTypesType(t *types.Type, inPackageDef bool) string {
|
||||
if t.IsUnresolved() {
|
||||
d.Chk.True(t.HasPackageRef())
|
||||
d.Chk.True(t.HasOrdinal(), "%s does not have an ordinal set", t.Name())
|
||||
if t.PackageRef() == gen.Package.Ref() && inPackageDef {
|
||||
return fmt.Sprintf(`%sMakeType(ref.Ref{}, %d)`, gen.TypesPackage, t.Ordinal())
|
||||
}
|
||||
return fmt.Sprintf(`%sMakeType(ref.Parse("%s"), %d)`, gen.TypesPackage, t.PackageRef().String(), t.Ordinal())
|
||||
}
|
||||
|
||||
if types.IsPrimitiveKind(t.Kind()) {
|
||||
return fmt.Sprintf("%sMakePrimitiveType(%s%sKind)", gen.TypesPackage, gen.TypesPackage, kindToString(t.Kind()))
|
||||
}
|
||||
|
||||
switch desc := t.Desc.(type) {
|
||||
case types.CompoundDesc:
|
||||
types := make([]string, len(desc.ElemTypes))
|
||||
for i, t := range desc.ElemTypes {
|
||||
types[i] = gen.ToTypesType(t, inPackageDef)
|
||||
}
|
||||
return fmt.Sprintf(`%sMakeCompoundType(%s%sKind, %s)`, gen.TypesPackage, gen.TypesPackage, kindToString(t.Kind()), strings.Join(types, ", "))
|
||||
case types.StructDesc:
|
||||
flatten := func(f []types.Field) string {
|
||||
out := make([]string, 0, len(f))
|
||||
for _, field := range f {
|
||||
out = append(out, fmt.Sprintf(`%sField{"%s", %s, %t},`, gen.TypesPackage, field.Name, gen.ToTypesType(field.T, inPackageDef), field.Optional))
|
||||
}
|
||||
return strings.Join(out, "\n")
|
||||
}
|
||||
fields := fmt.Sprintf("[]%sField{\n%s\n}", gen.TypesPackage, flatten(desc.Fields))
|
||||
choices := fmt.Sprintf("%sChoices{\n%s\n}", gen.TypesPackage, flatten(desc.Union))
|
||||
return fmt.Sprintf("%sMakeStructType(\"%s\",\n%s,\n%s,\n)", gen.TypesPackage, t.Name(), fields, choices)
|
||||
default:
|
||||
d.Chk.Fail("Unknown TypeDesc.", "%#v (%T)", desc, desc)
|
||||
}
|
||||
panic("Unreachable")
|
||||
}
|
||||
|
||||
func ind(i int) string {
|
||||
return strings.Repeat(" ", i)
|
||||
}
|
||||
|
||||
func firstToLower(s string) string {
|
||||
b := []rune(s)
|
||||
b[0] = unicode.ToLower(b[0])
|
||||
return string(b)
|
||||
}
|
||||
|
||||
// ToTypeValueJS returns a string containing JS code that instantiates a Type instance equivalent to t for JavaScript.
|
||||
func (gen *Generator) ToTypeValueJS(t *types.Type, inPackageDef bool, indent int) string {
|
||||
if t.IsUnresolved() {
|
||||
d.Chk.True(t.HasPackageRef())
|
||||
d.Chk.True(t.HasOrdinal(), "%s does not have an ordinal set", t.Name())
|
||||
if t.PackageRef() == gen.Package.Ref() {
|
||||
if inPackageDef {
|
||||
return fmt.Sprintf(`%s(%s, %d)`, gen.ImportJS("makeType"), gen.ImportJS("emptyRef"), t.Ordinal())
|
||||
} else {
|
||||
return fmt.Sprintf(`%s(_pkg.ref, %d)`, gen.ImportJS("makeType"), t.Ordinal())
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf(`%s(%s.parse('%s'), %d)`, gen.ImportJS("makeType"), gen.ImportJS("Ref"), t.PackageRef().String(), t.Ordinal())
|
||||
}
|
||||
|
||||
if types.IsPrimitiveKind(t.Kind()) {
|
||||
return gen.ImportJS(firstToLower(kindToString(t.Kind())) + "Type")
|
||||
}
|
||||
|
||||
switch desc := t.Desc.(type) {
|
||||
case types.CompoundDesc:
|
||||
types := make([]string, len(desc.ElemTypes))
|
||||
for i, t := range desc.ElemTypes {
|
||||
types[i] = gen.ToTypeValueJS(t, inPackageDef, 0)
|
||||
}
|
||||
return fmt.Sprintf(`%s(%s.%s, %s)`, gen.ImportJS("makeCompoundType"), gen.ImportJS("Kind"), kindToString(t.Kind()), strings.Join(types, ", "))
|
||||
case types.StructDesc:
|
||||
flatten := func(f []types.Field) string {
|
||||
out := make([]string, 0, len(f))
|
||||
for _, field := range f {
|
||||
out = append(out, fmt.Sprintf(`%snew %s('%s', %s, %t),`, ind(indent+1), gen.ImportJS("Field"), field.Name, gen.ToTypeValueJS(field.T, inPackageDef, 0), field.Optional))
|
||||
}
|
||||
return strings.Join(out, "\n")
|
||||
}
|
||||
fields := fmt.Sprintf("%s[\n%s\n%s]", ind(indent), flatten(desc.Fields), ind(indent))
|
||||
choices := fmt.Sprintf("%s[\n%s\n%s]", ind(indent), flatten(desc.Union), ind(indent))
|
||||
return fmt.Sprintf("%s('%s',\n%s,\n%s\n%s)", gen.ImportJS("makeStructType"), t.Name(), fields, choices, ind(indent-1))
|
||||
default:
|
||||
d.Chk.Fail("Unknown TypeDesc.", "%#v (%T)", desc, desc)
|
||||
}
|
||||
panic("Unreachable")
|
||||
}
|
||||
|
||||
// IsLast determines if |index| is the last index in |seq|.
|
||||
func (gen *Generator) IsLast(index int, seq interface{}) bool {
|
||||
return reflect.ValueOf(seq).Len() == index+1
|
||||
}
|
||||
|
||||
// ToTag replaces "-" characters in s with "_", so it can be used in a Go identifier.
|
||||
// TODO: replace other illegal chars as well?
|
||||
func ToTag(r ref.Ref) string {
|
||||
return strings.Replace(r.String()[0:12], "-", "_", -1)
|
||||
}
|
||||
|
||||
func kindToString(k types.NomsKind) (out string) {
|
||||
out = types.KindToString[k]
|
||||
d.Chk.NotEmpty(out, "Unknown NomsKind %d", k)
|
||||
return
|
||||
}
|
||||
|
||||
// ImportJS returns the name of the imported binding as well as registers the binding as imported so that we can later generate the right import declaration.
|
||||
func (gen *Generator) ImportJS(name string) string {
|
||||
if gen.ImportedJS == nil {
|
||||
gen.ImportedJS = map[string]bool{}
|
||||
}
|
||||
gen.ImportedJS[name] = true
|
||||
return fmt.Sprintf("_%s", name)
|
||||
}
|
||||
|
||||
// ImportJSType returns the name of the imported type as well as registers the type as imported so that we can later generate the right import type declaration.
|
||||
func (gen *Generator) ImportJSType(name string) string {
|
||||
if gen.ImportedJSTypes == nil {
|
||||
gen.ImportedJSTypes = map[string]bool{}
|
||||
}
|
||||
gen.ImportedJSTypes[name] = true
|
||||
return fmt.Sprintf("_%s", name)
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
package code
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type testResolver struct {
|
||||
assert *assert.Assertions
|
||||
deps map[ref.Ref]types.Package
|
||||
}
|
||||
|
||||
func (res *testResolver) Resolve(t *types.Type, pkg *types.Package) *types.Type {
|
||||
if !t.IsUnresolved() {
|
||||
return t
|
||||
}
|
||||
|
||||
if !t.HasPackageRef() {
|
||||
res.assert.Fail("Test does not handle local references")
|
||||
}
|
||||
|
||||
if t.PackageRef() == pkg.Ref() {
|
||||
return pkg.Types()[t.Ordinal()]
|
||||
}
|
||||
|
||||
dep, ok := res.deps[t.PackageRef()]
|
||||
res.assert.True(ok, "Package %s is referenced in %+v, but is not a dependency.", t.PackageRef().String(), t)
|
||||
return dep.Types()[t.Ordinal()]
|
||||
}
|
||||
|
||||
func TestUserName(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
imported := types.NewPackage([]*types.Type{
|
||||
types.MakeStructType("S1", []types.Field{
|
||||
types.Field{"f", types.BoolType, false},
|
||||
}, []types.Field{}),
|
||||
}, []ref.Ref{})
|
||||
|
||||
res := testResolver{assert, map[ref.Ref]types.Package{imported.Ref(): imported}}
|
||||
|
||||
localStructName := "Local"
|
||||
resolved := types.MakeStructType(localStructName, []types.Field{
|
||||
types.Field{"a", types.NumberType, false},
|
||||
}, []types.Field{})
|
||||
|
||||
g := Generator{R: &res, Package: &imported}
|
||||
assert.Equal(localStructName, g.UserName(resolved))
|
||||
}
|
||||
@@ -1,495 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"flag"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/attic-labs/noms/chunks"
|
||||
"github.com/attic-labs/noms/d"
|
||||
"github.com/attic-labs/noms/datas"
|
||||
"github.com/attic-labs/noms/dataset"
|
||||
"github.com/attic-labs/noms/nomdl/codegen/code"
|
||||
"github.com/attic-labs/noms/nomdl/pkg"
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
)
|
||||
|
||||
var (
|
||||
outDirFlag = flag.String("out-dir", ".", "Directory where generated code will be written")
|
||||
inFlag = flag.String("in", "", "The name of the noms file to read")
|
||||
pkgDSFlag = flag.String("package-ds", "", "The dataset to read/write packages from/to.")
|
||||
packageFlag = flag.String("package", "", "The name of the go package to write")
|
||||
|
||||
idRegexp = regexp.MustCompile(`[_\pL][_\pL\pN]*`)
|
||||
illegalRune = regexp.MustCompile(`[^_\pL\pN]`)
|
||||
)
|
||||
|
||||
const ext = ".noms"
|
||||
|
||||
type refSet map[ref.Ref]bool
|
||||
|
||||
func main() {
|
||||
flags := datas.NewFlags()
|
||||
flag.Parse()
|
||||
|
||||
ds, ok := flags.CreateDataStore()
|
||||
if !ok {
|
||||
ds = datas.NewDataStore(chunks.NewMemoryStore())
|
||||
}
|
||||
defer ds.Close()
|
||||
|
||||
if *pkgDSFlag != "" {
|
||||
if !ok {
|
||||
log.Print("Package dataset provided, but DataStore could not be opened.")
|
||||
flag.Usage()
|
||||
return
|
||||
}
|
||||
} else {
|
||||
log.Print("No package dataset provided; will be unable to process imports.")
|
||||
*pkgDSFlag = "default"
|
||||
}
|
||||
|
||||
pkgDS := dataset.NewDataset(ds, *pkgDSFlag)
|
||||
// Ensure that, if pkgDS has stuff in it, its head is a SetOfRefOfPackage.
|
||||
if h, ok := pkgDS.MaybeHead(); ok {
|
||||
d.Chk.IsType(types.NewSetOfRefOfPackage(), h.Get(datas.ValueField))
|
||||
}
|
||||
|
||||
localPkgs := refSet{}
|
||||
outDir, err := filepath.Abs(*outDirFlag)
|
||||
d.Chk.NoError(err, "Could not canonicalize -out-dir: %v", err)
|
||||
packageName := ""
|
||||
|
||||
if *inFlag != "" {
|
||||
out := getOutFileName(filepath.Base(*inFlag))
|
||||
p := parsePackageFile(packageName, *inFlag, pkgDS)
|
||||
localPkgs[p.Ref()] = true
|
||||
generate(packageName, *inFlag, filepath.Join(outDir, out), outDir, map[string]bool{}, p, localPkgs, pkgDS)
|
||||
return
|
||||
}
|
||||
|
||||
// Generate code from all .noms file in the current directory
|
||||
nomsFiles, err := filepath.Glob("*" + ext)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
written := map[string]bool{}
|
||||
packages := map[string]pkg.Parsed{}
|
||||
for _, inFile := range nomsFiles {
|
||||
p := parsePackageFile(packageName, inFile, pkgDS)
|
||||
localPkgs[p.Ref()] = true
|
||||
packages[inFile] = p
|
||||
}
|
||||
// Sort to have deterministic output.
|
||||
keys := make([]string, 0, len(packages))
|
||||
sort.Strings(keys)
|
||||
for inFile := range packages {
|
||||
keys = append(keys, inFile)
|
||||
}
|
||||
for _, inFile := range keys {
|
||||
p := packages[inFile]
|
||||
pkgDS = generate(packageName, inFile, filepath.Join(outDir, getOutFileName(inFile)), outDir, written, p, localPkgs, pkgDS)
|
||||
}
|
||||
}
|
||||
|
||||
func parsePackageFile(packageName string, in string, pkgDS dataset.Dataset) pkg.Parsed {
|
||||
inFile, err := os.Open(in)
|
||||
d.Chk.NoError(err)
|
||||
defer inFile.Close()
|
||||
|
||||
return pkg.ParseNomDL(packageName, inFile, filepath.Dir(in), pkgDS.Store())
|
||||
}
|
||||
|
||||
func generate(packageName, in, out, outDir string, written map[string]bool, parsed pkg.Parsed, localPkgs refSet, pkgDS dataset.Dataset) dataset.Dataset {
|
||||
// Generate code for all p's deps first.
|
||||
deps := generateDepCode(packageName, outDir, written, parsed.Package, localPkgs, pkgDS.Store())
|
||||
generateAndEmit(getBareFileName(in), out, written, deps, parsed)
|
||||
|
||||
// Since we're just building up a set of refs to all the packages in pkgDS, simply retrying is the logical response to commit failure.
|
||||
err := datas.ErrOptimisticLockFailed
|
||||
for ; err == datas.ErrOptimisticLockFailed; pkgDS, err = pkgDS.Commit(buildSetOfRefOfPackage(parsed, deps, pkgDS)) {
|
||||
}
|
||||
return pkgDS
|
||||
}
|
||||
|
||||
type depsMap map[ref.Ref]types.Package
|
||||
|
||||
func generateDepCode(packageName, outDir string, written map[string]bool, p types.Package, localPkgs refSet, vr types.ValueReader) depsMap {
|
||||
deps := depsMap{}
|
||||
for _, r := range p.Dependencies() {
|
||||
p := vr.ReadValue(r).(types.Package)
|
||||
pDeps := generateDepCode(packageName, outDir, written, p, localPkgs, vr)
|
||||
tag := code.ToTag(p.Ref())
|
||||
parsed := pkg.Parsed{Package: p, Name: packageName}
|
||||
if !localPkgs[parsed.Ref()] {
|
||||
generateAndEmit(tag, filepath.Join(outDir, tag+".js"), written, pDeps, parsed)
|
||||
localPkgs[parsed.Ref()] = true
|
||||
}
|
||||
for depRef, dep := range pDeps {
|
||||
deps[depRef] = dep
|
||||
}
|
||||
deps[r] = p
|
||||
}
|
||||
return deps
|
||||
}
|
||||
|
||||
func generateAndEmit(tag, out string, written map[string]bool, deps depsMap, p pkg.Parsed) {
|
||||
var buf bytes.Buffer
|
||||
gen := newCodeGen(&buf, tag, written, deps, p)
|
||||
gen.WritePackage()
|
||||
|
||||
d.Chk.NoError(os.MkdirAll(filepath.Dir(out), 0700))
|
||||
|
||||
outFile, err := os.OpenFile(out, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
|
||||
d.Chk.NoError(err)
|
||||
defer outFile.Close()
|
||||
|
||||
io.Copy(outFile, &buf)
|
||||
}
|
||||
|
||||
func buildSetOfRefOfPackage(pkg pkg.Parsed, deps depsMap, ds dataset.Dataset) types.Set {
|
||||
// Can do better once generated collections implement types.Value.
|
||||
s := types.NewSetOfRefOfPackage()
|
||||
if h, ok := ds.MaybeHead(); ok {
|
||||
s = h.Get(datas.ValueField).(types.Set)
|
||||
}
|
||||
for _, dep := range deps {
|
||||
// Writing the deps into ds should be redundant at this point, but do it to be sure.
|
||||
// TODO: consider moving all dataset work over into nomdl/pkg BUG 409
|
||||
s = s.Insert(ds.Store().WriteValue(dep))
|
||||
}
|
||||
r := ds.Store().WriteValue(pkg.Package)
|
||||
return s.Insert(r)
|
||||
}
|
||||
|
||||
func getOutFileName(in string) string {
|
||||
return in[:len(in)-len(ext)] + ".noms.js"
|
||||
}
|
||||
|
||||
func getBareFileName(in string) string {
|
||||
base := filepath.Base(in)
|
||||
return base[:len(base)-len(filepath.Ext(base))]
|
||||
}
|
||||
|
||||
type codeGen struct {
|
||||
w io.Writer
|
||||
pkg pkg.Parsed
|
||||
deps depsMap
|
||||
written map[string]bool
|
||||
toWrite []*types.Type
|
||||
generator *code.Generator
|
||||
templates *template.Template
|
||||
}
|
||||
|
||||
func newCodeGen(w io.Writer, fileID string, written map[string]bool, deps depsMap, pkg pkg.Parsed) *codeGen {
|
||||
gen := &codeGen{w, pkg, deps, written, []*types.Type{}, nil, nil}
|
||||
gen.generator = &code.Generator{
|
||||
R: gen,
|
||||
AliasNames: pkg.AliasNames,
|
||||
Package: &pkg.Package,
|
||||
}
|
||||
gen.templates = gen.readTemplates()
|
||||
return gen
|
||||
}
|
||||
|
||||
func (gen *codeGen) readTemplates() *template.Template {
|
||||
_, thisfile, _, _ := runtime.Caller(1)
|
||||
glob := path.Join(path.Dir(thisfile), "js", "*.tmpl")
|
||||
return template.Must(template.New("").Funcs(
|
||||
template.FuncMap{
|
||||
"defToUser": gen.generator.DefToUser,
|
||||
"defToValue": gen.generator.DefToValue,
|
||||
"defType": gen.generator.DefType,
|
||||
"importJS": gen.generator.ImportJS,
|
||||
"importJsType": gen.generator.ImportJSType,
|
||||
"isLast": gen.generator.IsLast,
|
||||
"mayHaveChunks": gen.generator.MayHaveChunks,
|
||||
"refToAliasName": gen.generator.RefToAliasName,
|
||||
"refToJSIdentfierName": gen.generator.RefToJSIdentfierName,
|
||||
"title": strings.Title,
|
||||
"toTypesType": gen.generator.ToTypesType,
|
||||
"toTypeValueJS": gen.generator.ToTypeValueJS,
|
||||
"userToDef": gen.generator.UserToDef,
|
||||
"userToValue": gen.generator.UserToValue,
|
||||
"userType": gen.generator.UserType,
|
||||
"userTypeJS": gen.generator.UserTypeJS,
|
||||
"userZero": gen.generator.UserZero,
|
||||
"valueToDef": gen.generator.ValueToDef,
|
||||
"valueToUser": gen.generator.ValueToUser,
|
||||
"valueZero": gen.generator.ValueZero,
|
||||
}).ParseGlob(glob))
|
||||
}
|
||||
|
||||
func (gen *codeGen) Resolve(t *types.Type, pkg *types.Package) *types.Type {
|
||||
if !t.IsUnresolved() {
|
||||
return t
|
||||
}
|
||||
if !t.HasPackageRef() {
|
||||
return gen.pkg.Types()[t.Ordinal()]
|
||||
}
|
||||
|
||||
if t.PackageRef() == pkg.Ref() {
|
||||
return pkg.Types()[t.Ordinal()]
|
||||
}
|
||||
|
||||
dep, ok := gen.deps[t.PackageRef()]
|
||||
d.Chk.True(ok, "Package %s is referenced in %+v, but is not a dependency.", t.PackageRef().String(), t)
|
||||
return dep.Types()[t.Ordinal()]
|
||||
}
|
||||
|
||||
func (gen *codeGen) WritePackage() {
|
||||
pkgTypes := gen.pkg.Types()
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
HasTypes bool
|
||||
Dependencies []ref.Ref
|
||||
Name string
|
||||
Types []*types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
len(pkgTypes) > 0,
|
||||
gen.pkg.Dependencies(),
|
||||
gen.pkg.Name,
|
||||
pkgTypes,
|
||||
}
|
||||
|
||||
// In JS we want to write the imports at the top of the file but we do not know what we need to import until we have written everything. We therefore write to a buffer and when everything is done we can write the imports and write the buffer into the writer.
|
||||
var buf bytes.Buffer
|
||||
w := gen.w
|
||||
|
||||
gen.w = &buf
|
||||
|
||||
err := gen.templates.ExecuteTemplate(gen.w, "package.tmpl", data)
|
||||
d.Exp.NoError(err)
|
||||
|
||||
for i, t := range pkgTypes {
|
||||
gen.writeTopLevel(t, i)
|
||||
}
|
||||
|
||||
for _, t := range gen.pkg.UsingDeclarations {
|
||||
gen.write(t)
|
||||
}
|
||||
|
||||
for len(gen.toWrite) > 0 {
|
||||
t := gen.toWrite[0]
|
||||
gen.toWrite = gen.toWrite[1:]
|
||||
gen.write(t)
|
||||
}
|
||||
|
||||
gen.w = w
|
||||
gen.WriteHeader()
|
||||
io.Copy(w, &buf)
|
||||
}
|
||||
|
||||
func (gen *codeGen) WriteHeader() {
|
||||
importedJS := make([]string, 0, len(gen.generator.ImportedJS))
|
||||
importedJSTypes := make([]string, 0, len(gen.generator.ImportedJSTypes))
|
||||
for name := range gen.generator.ImportedJS {
|
||||
importedJS = append(importedJS, name)
|
||||
}
|
||||
for name := range gen.generator.ImportedJSTypes {
|
||||
if _, ok := gen.generator.ImportedJS[name]; !ok {
|
||||
importedJSTypes = append(importedJSTypes, name)
|
||||
}
|
||||
}
|
||||
sort.Strings(importedJS)
|
||||
sort.Strings(importedJSTypes)
|
||||
|
||||
pkgTypes := gen.pkg.Types()
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
HasTypes bool
|
||||
Dependencies []ref.Ref
|
||||
Name string
|
||||
Types []*types.Type
|
||||
ImportedJS []string
|
||||
ImportedJSTypes []string
|
||||
AliasNames map[ref.Ref]string
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
len(pkgTypes) > 0,
|
||||
gen.pkg.Dependencies(),
|
||||
gen.pkg.Name,
|
||||
pkgTypes,
|
||||
importedJS,
|
||||
importedJSTypes,
|
||||
gen.pkg.AliasNames,
|
||||
}
|
||||
|
||||
err := gen.templates.ExecuteTemplate(gen.w, "header.tmpl", data)
|
||||
d.Exp.NoError(err)
|
||||
}
|
||||
|
||||
func (gen *codeGen) shouldBeWritten(t *types.Type) bool {
|
||||
if t.IsUnresolved() {
|
||||
return false
|
||||
}
|
||||
if t.Kind() == types.StructKind {
|
||||
name := gen.generator.UserName(t)
|
||||
d.Chk.False(gen.written[name], "Multiple definitions of type named %s", name)
|
||||
return true
|
||||
}
|
||||
return !gen.written[gen.generator.UserName(t)]
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeTopLevel(t *types.Type, ordinal int) {
|
||||
switch t.Kind() {
|
||||
case types.StructKind:
|
||||
gen.writeStruct(t, ordinal)
|
||||
default:
|
||||
gen.write(t)
|
||||
}
|
||||
}
|
||||
|
||||
// write generates the code for the given type.
|
||||
func (gen *codeGen) write(t *types.Type) {
|
||||
if !gen.shouldBeWritten(t) {
|
||||
return
|
||||
}
|
||||
k := t.Kind()
|
||||
switch k {
|
||||
case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.ValueKind, types.TypeKind:
|
||||
return
|
||||
case types.ListKind:
|
||||
gen.writeList(t)
|
||||
case types.MapKind:
|
||||
gen.writeMap(t)
|
||||
case types.RefKind:
|
||||
gen.writeRef(t)
|
||||
case types.SetKind:
|
||||
gen.writeSet(t)
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeLater(t *types.Type) {
|
||||
if !gen.shouldBeWritten(t) {
|
||||
return
|
||||
}
|
||||
gen.toWrite = append(gen.toWrite, t)
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeTemplate(tmpl string, t *types.Type, data interface{}) {
|
||||
err := gen.templates.ExecuteTemplate(gen.w, tmpl, data)
|
||||
d.Exp.NoError(err)
|
||||
gen.written[gen.generator.UserName(t)] = true
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeStruct(t *types.Type, ordinal int) {
|
||||
d.Chk.True(ordinal >= 0)
|
||||
desc := t.Desc.(types.StructDesc)
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
Name string
|
||||
Type *types.Type
|
||||
Ordinal int
|
||||
Fields []types.Field
|
||||
Choices []types.Field
|
||||
HasUnion bool
|
||||
UnionZeroType *types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
gen.generator.UserName(t),
|
||||
t,
|
||||
ordinal,
|
||||
desc.Fields,
|
||||
nil,
|
||||
len(desc.Union) != 0,
|
||||
types.NumberType,
|
||||
}
|
||||
|
||||
if data.HasUnion {
|
||||
data.Choices = desc.Union
|
||||
data.UnionZeroType = data.Choices[0].T
|
||||
}
|
||||
gen.writeTemplate("struct.tmpl", t, data)
|
||||
for _, f := range desc.Fields {
|
||||
gen.writeLater(f.T)
|
||||
}
|
||||
if data.HasUnion {
|
||||
for _, f := range desc.Union {
|
||||
gen.writeLater(f.T)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeList(t *types.Type) {
|
||||
elemTypes := t.Desc.(types.CompoundDesc).ElemTypes
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
Name string
|
||||
Type *types.Type
|
||||
ElemType *types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
gen.generator.UserName(t),
|
||||
t,
|
||||
elemTypes[0],
|
||||
}
|
||||
gen.writeTemplate("list.tmpl", t, data)
|
||||
gen.writeLater(elemTypes[0])
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeMap(t *types.Type) {
|
||||
elemTypes := t.Desc.(types.CompoundDesc).ElemTypes
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
Name string
|
||||
Type *types.Type
|
||||
KeyType *types.Type
|
||||
ValueType *types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
gen.generator.UserName(t),
|
||||
t,
|
||||
elemTypes[0],
|
||||
elemTypes[1],
|
||||
}
|
||||
gen.writeTemplate("map.tmpl", t, data)
|
||||
gen.writeLater(elemTypes[0])
|
||||
gen.writeLater(elemTypes[1])
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeRef(t *types.Type) {
|
||||
elemTypes := t.Desc.(types.CompoundDesc).ElemTypes
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
Name string
|
||||
Type *types.Type
|
||||
ElemType *types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
gen.generator.UserName(t),
|
||||
t,
|
||||
elemTypes[0],
|
||||
}
|
||||
gen.writeTemplate("ref.tmpl", t, data)
|
||||
gen.writeLater(elemTypes[0])
|
||||
}
|
||||
|
||||
func (gen *codeGen) writeSet(t *types.Type) {
|
||||
elemTypes := t.Desc.(types.CompoundDesc).ElemTypes
|
||||
data := struct {
|
||||
PackageRef ref.Ref
|
||||
Name string
|
||||
Type *types.Type
|
||||
ElemType *types.Type
|
||||
}{
|
||||
gen.pkg.Package.Ref(),
|
||||
gen.generator.UserName(t),
|
||||
t,
|
||||
elemTypes[0],
|
||||
}
|
||||
gen.writeTemplate("set.tmpl", t, data)
|
||||
gen.writeLater(elemTypes[0])
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/attic-labs/noms/chunks"
|
||||
"github.com/attic-labs/noms/d"
|
||||
"github.com/attic-labs/noms/datas"
|
||||
"github.com/attic-labs/noms/dataset"
|
||||
"github.com/attic-labs/noms/nomdl/codegen/code"
|
||||
"github.com/attic-labs/noms/nomdl/pkg"
|
||||
"github.com/attic-labs/noms/ref"
|
||||
"github.com/attic-labs/noms/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func assertOutput(inPath, goldenPath string, t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) // Will be DataStore containing imports
|
||||
|
||||
depsDir, err := ioutil.TempDir("", "")
|
||||
assert.NoError(err)
|
||||
defer os.RemoveAll(depsDir)
|
||||
|
||||
inFile, err := os.Open(inPath)
|
||||
assert.NoError(err)
|
||||
defer inFile.Close()
|
||||
|
||||
goldenFile, err := os.Open(goldenPath)
|
||||
assert.NoError(err)
|
||||
defer goldenFile.Close()
|
||||
goldenBytes, err := ioutil.ReadAll(goldenFile)
|
||||
d.Chk.NoError(err)
|
||||
|
||||
var buf bytes.Buffer
|
||||
pkg := pkg.ParseNomDL("gen", inFile, filepath.Dir(inPath), emptyDS)
|
||||
written := map[string]bool{}
|
||||
gen := newCodeGen(&buf, getBareFileName(inPath), written, depsMap{}, pkg)
|
||||
gen.WritePackage()
|
||||
|
||||
bs := buf.Bytes()
|
||||
assert.Equal(string(goldenBytes), string(bs), "%s did not generate the same string", inPath)
|
||||
}
|
||||
|
||||
func TestGeneratedFiles(t *testing.T) {
|
||||
files, err := filepath.Glob("test/*.noms")
|
||||
d.Chk.NoError(err)
|
||||
assert.NotEmpty(t, files)
|
||||
for _, n := range files {
|
||||
_, file := filepath.Split(n)
|
||||
if file == "struct_with_imports.noms" {
|
||||
// We are not writing deps in this test so lookup by ref does not work.
|
||||
continue
|
||||
}
|
||||
if file == "struct_with_list.noms" || file == "struct_with_dup_list.noms" {
|
||||
// These two files race to write ListOfNumber
|
||||
continue
|
||||
}
|
||||
assertOutput(n, filepath.Join("test", "gen", file+".js"), t)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSkipDuplicateTypes(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
dir, err := ioutil.TempDir("", "codegen_test_")
|
||||
assert.NoError(err)
|
||||
defer os.RemoveAll(dir)
|
||||
|
||||
leaf1 := types.NewPackage([]*types.Type{
|
||||
types.MakeStructType("S1", []types.Field{
|
||||
types.Field{"f", types.MakeListType(types.NumberType), false},
|
||||
types.Field{"e", types.MakeType(ref.Ref{}, 0), false},
|
||||
}, []types.Field{}),
|
||||
}, []ref.Ref{})
|
||||
leaf2 := types.NewPackage([]*types.Type{
|
||||
types.MakeStructType("S2", []types.Field{
|
||||
types.Field{"f", types.MakeListType(types.NumberType), false},
|
||||
}, []types.Field{}),
|
||||
}, []ref.Ref{})
|
||||
|
||||
written := map[string]bool{}
|
||||
tag1 := code.ToTag(leaf1.Ref())
|
||||
leaf1Path := filepath.Join(dir, tag1+".js")
|
||||
generateAndEmit(tag1, leaf1Path, written, depsMap{}, pkg.Parsed{Package: leaf1, Name: "p"})
|
||||
|
||||
tag2 := code.ToTag(leaf2.Ref())
|
||||
leaf2Path := filepath.Join(dir, tag2+".js")
|
||||
generateAndEmit(tag2, leaf2Path, written, depsMap{}, pkg.Parsed{Package: leaf2, Name: "p"})
|
||||
|
||||
code, err := ioutil.ReadFile(leaf2Path)
|
||||
assert.NoError(err)
|
||||
assert.NotContains(string(code), "type ListOfNumber")
|
||||
}
|
||||
|
||||
func TestCommitNewPackages(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
ds := datas.NewDataStore(chunks.NewMemoryStore())
|
||||
pkgDS := dataset.NewDataset(ds, "packages")
|
||||
|
||||
dir, err := ioutil.TempDir("", "")
|
||||
assert.NoError(err)
|
||||
defer os.RemoveAll(dir)
|
||||
inFile := filepath.Join(dir, "in.noms")
|
||||
err = ioutil.WriteFile(inFile, []byte("struct Simple{a:Bool}"), 0600)
|
||||
assert.NoError(err)
|
||||
|
||||
p := parsePackageFile("name", inFile, pkgDS)
|
||||
localPkgs := refSet{p.Ref(): true}
|
||||
pkgDS = generate("name", inFile, filepath.Join(dir, "out.js"), dir, map[string]bool{}, p, localPkgs, pkgDS)
|
||||
s := pkgDS.Head().Get(datas.ValueField).(types.Set)
|
||||
assert.EqualValues(1, s.Len())
|
||||
tr := s.First().(types.Ref).TargetValue(ds).(types.Package).Types()[0]
|
||||
assert.EqualValues(types.StructKind, tr.Kind())
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
{{$name := .Name}}
|
||||
export type {{.Name}} ={{range $index, $id := .Ids}}
|
||||
{{$index}}{{if isLast $index $.Ids | not}} |{{else}};{{end}} // {{$id}}{{end}}
|
||||
@@ -1,12 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
{{if (len .ImportedJS) ge 0}}
|
||||
import {{"{"}}{{range $name := .ImportedJS}}
|
||||
{{$name}} as _{{$name}},{{end}}
|
||||
} from '@attic/noms';{{end}}{{if (len .ImportedJSTypes) ge 0}}
|
||||
import type {{"{"}}{{range $name := .ImportedJSTypes}}
|
||||
{{$name}} as _{{$name}},{{end}}
|
||||
} from '@attic/noms';{{end}}{{if (len .Dependencies) ge 0}}{{$aliasNames := .AliasNames}}
|
||||
{{range $i, $r := .Dependencies}}import * as {{refToAliasName $r}} from './{{refToJSIdentfierName $r}}.js';
|
||||
{{end}}{{end}}
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
export function new{{userType .Type}}(values: Array<{{userTypeJS .ElemType}}>): Promise<{{importJsType "NomsList"}}<{{userTypeJS .ElemType}}>> {
|
||||
return {{importJS "newList"}}(values, {{importJS "makeListType"}}({{toTypeValueJS .ElemType false 0}}));
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
export function new{{userType .Type}}(values: Array<any>): Promise<{{importJsType "NomsMap"}}<{{userTypeJS .KeyType}}, {{userTypeJS .ValueType}}>> {
|
||||
return {{importJS "newMap"}}(values, {{importJS "makeMapType"}}({{toTypeValueJS .KeyType false 0}}, {{toTypeValueJS .ValueType false 0}}));
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{{if .HasTypes}}
|
||||
const _pkg = new {{importJS "Package"}}([{{range $i, $t := .Types}}
|
||||
{{toTypeValueJS $t true 2}},{{end}}
|
||||
], [{{range $deps := .Dependencies}}
|
||||
{{importJS "Ref"}}.parse('{{$deps}}'),{{end}}
|
||||
]);
|
||||
{{importJS "registerPackage"}}(_pkg);{{range $i, $t := .Types}}
|
||||
const {{userType $t}}$type = {{importJS "makeType"}}(_pkg.ref, {{$i}});
|
||||
const {{userType $t}}$typeDef = _pkg.types[{{$i}}];{{end}}
|
||||
{{end}}
|
||||
@@ -1 +0,0 @@
|
||||
{{/* Refs in JS needs no codegen */}}
|
||||
@@ -1,4 +0,0 @@
|
||||
|
||||
export function new{{userType .Type}}(values: Array<{{userTypeJS .ElemType}}>): Promise<{{importJsType "NomsSet"}}<{{userTypeJS .ElemType}}>> {
|
||||
return {{importJS "newSet"}}(values, {{importJS "makeSetType"}}({{toTypeValueJS .ElemType false 0}}));
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
{{$name := .Name}}
|
||||
type {{.Name}}$Data = {{"{"}}{{range $field := .Fields}}
|
||||
{{.Name}}{{if .Optional}}?{{end}}: {{userTypeJS .T}};{{end}}
|
||||
};
|
||||
|
||||
interface {{.Name}}$Interface extends {{importJsType "Struct"}} {
|
||||
constructor(data: {{.Name}}$Data): void;{{range $field := .Fields}}
|
||||
{{.Name}}: {{if .Optional}}?{{end}}{{userTypeJS .T}}; // readonly
|
||||
set{{title .Name}}(value: {{if .Optional}}?{{end}}{{userTypeJS .T}}): {{$name}}$Interface;{{end}}{{range $field := .Choices}}
|
||||
{{.Name}}: ?{{userTypeJS .T}}; // readonly
|
||||
set{{title .Name}}(value: {{userTypeJS .T}}): {{$name}}$Interface;{{end}}
|
||||
}
|
||||
|
||||
export const {{.Name}}: Class<{{.Name}}$Interface> = {{importJS "createStructClass"}}({{userType .Type}}$type, {{userType .Type}}$typeDef);
|
||||
@@ -1 +0,0 @@
|
||||
../../../js/.babelrc
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = require('@attic/eslintrc');
|
||||
@@ -1 +0,0 @@
|
||||
../../../js/.flowconfig
|
||||
1
nomdl/codegen/test/.gitignore
vendored
1
nomdl/codegen/test/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
node_modules
|
||||
@@ -1,2 +0,0 @@
|
||||
alias a = import "./clobber_a/a.noms"
|
||||
alias b = import "./clobber_b/b.noms"
|
||||
@@ -1,3 +0,0 @@
|
||||
struct A {
|
||||
A: List<List<Blob>>
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
alias a = import "../clobber_a/a.noms"
|
||||
@@ -1,8 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import * as a from './sha1_89fa11c.js';
|
||||
import * as b from './sha1_f2ea794.js';
|
||||
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
createStructClass as _createStructClass,
|
||||
emptyRef as _emptyRef,
|
||||
makeEnumType as _makeEnumType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeEnumType('Handedness', 'right', 'left', 'switch'),
|
||||
_makeStructType('EnumStruct',
|
||||
[
|
||||
new _Field('hand', _makeType(_emptyRef, 0), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const Handedness$type = _makeType(_pkg.ref, 0);
|
||||
const Handedness$typeDef = _pkg.types[0];
|
||||
const EnumStruct$type = _makeType(_pkg.ref, 1);
|
||||
const EnumStruct$typeDef = _pkg.types[1];
|
||||
|
||||
|
||||
export type Handedness =
|
||||
0 | // right
|
||||
1 | // left
|
||||
2; // switch
|
||||
|
||||
type EnumStruct$Data = {
|
||||
hand: Handedness;
|
||||
};
|
||||
|
||||
interface EnumStruct$Interface extends _Struct {
|
||||
constructor(data: EnumStruct$Data): void;
|
||||
hand: Handedness; // readonly
|
||||
setHand(value: Handedness): EnumStruct$Interface;
|
||||
}
|
||||
|
||||
export const EnumStruct: Class<EnumStruct$Interface> = _createStructClass(EnumStruct$type, EnumStruct$typeDef);
|
||||
@@ -1,18 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
makeListType as _makeListType,
|
||||
newList as _newList,
|
||||
numberType as _numberType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
|
||||
export function newListOfNumber(values: Array<_number>): Promise<_NomsList<_number>> {
|
||||
return _newList(values, _makeListType(_numberType));
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
boolType as _boolType,
|
||||
makeMapType as _makeMapType,
|
||||
newMap as _newMap,
|
||||
stringType as _stringType,
|
||||
valueType as _valueType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsMap as _NomsMap,
|
||||
Value as _Value,
|
||||
} from '@attic/noms';
|
||||
|
||||
|
||||
export function newMapOfBoolToString(values: Array<any>): Promise<_NomsMap<boolean, string>> {
|
||||
return _newMap(values, _makeMapType(_boolType, _stringType));
|
||||
}
|
||||
|
||||
export function newMapOfStringToValue(values: Array<any>): Promise<_NomsMap<string, _Value>> {
|
||||
return _newMap(values, _makeMapType(_stringType, _valueType));
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
createStructClass as _createStructClass,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeListType as _makeListType,
|
||||
makeSetType as _makeSetType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
newSet as _newSet,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
NomsSet as _NomsSet,
|
||||
RefValue as _RefValue,
|
||||
Struct as _Struct,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructWithRef',
|
||||
[
|
||||
new _Field('r', _makeCompoundType(_Kind.Ref, _makeCompoundType(_Kind.Set, _numberType)), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructWithRef$type = _makeType(_pkg.ref, 0);
|
||||
const StructWithRef$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type StructWithRef$Data = {
|
||||
r: _RefValue<_NomsSet<_number>>;
|
||||
};
|
||||
|
||||
interface StructWithRef$Interface extends _Struct {
|
||||
constructor(data: StructWithRef$Data): void;
|
||||
r: _RefValue<_NomsSet<_number>>; // readonly
|
||||
setR(value: _RefValue<_NomsSet<_number>>): StructWithRef$Interface;
|
||||
}
|
||||
|
||||
export const StructWithRef: Class<StructWithRef$Interface> = _createStructClass(StructWithRef$type, StructWithRef$typeDef);
|
||||
|
||||
export function newListOfRefOfNumber(values: Array<_RefValue<_number>>): Promise<_NomsList<_RefValue<_number>>> {
|
||||
return _newList(values, _makeListType(_makeCompoundType(_Kind.Ref, _numberType)));
|
||||
}
|
||||
|
||||
export function newListOfString(values: Array<string>): Promise<_NomsList<string>> {
|
||||
return _newList(values, _makeListType(_stringType));
|
||||
}
|
||||
|
||||
export function newSetOfNumber(values: Array<_number>): Promise<_NomsSet<_number>> {
|
||||
return _newSet(values, _makeSetType(_numberType));
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
boolType as _boolType,
|
||||
makeSetType as _makeSetType,
|
||||
newSet as _newSet,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsSet as _NomsSet,
|
||||
} from '@attic/noms';
|
||||
|
||||
|
||||
export function newSetOfBool(values: Array<boolean>): Promise<_NomsSet<boolean>> {
|
||||
return _newSet(values, _makeSetType(_boolType));
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
boolType as _boolType,
|
||||
createStructClass as _createStructClass,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('S',
|
||||
[
|
||||
new _Field('s', _stringType, false),
|
||||
new _Field('b', _boolType, false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const S$type = _makeType(_pkg.ref, 0);
|
||||
const S$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type S$Data = {
|
||||
s: string;
|
||||
b: boolean;
|
||||
};
|
||||
|
||||
interface S$Interface extends _Struct {
|
||||
constructor(data: S$Data): void;
|
||||
s: string; // readonly
|
||||
setS(value: string): S$Interface;
|
||||
b: boolean; // readonly
|
||||
setB(value: boolean): S$Interface;
|
||||
}
|
||||
|
||||
export const S: Class<S$Interface> = _createStructClass(S$type, S$typeDef);
|
||||
@@ -1,70 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
Ref as _Ref,
|
||||
createStructClass as _createStructClass,
|
||||
emptyRef as _emptyRef,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
import * as _sha1_068bb32 from './sha1_068bb32.js';
|
||||
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('D',
|
||||
[
|
||||
new _Field('structField', _makeType(_Ref.parse('sha1-068bb32c733bd940a0d758715bf05082f4c12fcb'), 0), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
_makeStructType('DUser',
|
||||
[
|
||||
new _Field('Dfield', _makeType(_emptyRef, 0), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
_Ref.parse('sha1-068bb32c733bd940a0d758715bf05082f4c12fcb'),
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const D$type = _makeType(_pkg.ref, 0);
|
||||
const D$typeDef = _pkg.types[0];
|
||||
const DUser$type = _makeType(_pkg.ref, 1);
|
||||
const DUser$typeDef = _pkg.types[1];
|
||||
|
||||
|
||||
type D$Data = {
|
||||
structField: _sha1_068bb32.S;
|
||||
};
|
||||
|
||||
interface D$Interface extends _Struct {
|
||||
constructor(data: D$Data): void;
|
||||
structField: _sha1_068bb32.S; // readonly
|
||||
setStructField(value: _sha1_068bb32.S): D$Interface;
|
||||
}
|
||||
|
||||
export const D: Class<D$Interface> = _createStructClass(D$type, D$typeDef);
|
||||
|
||||
type DUser$Data = {
|
||||
Dfield: D;
|
||||
};
|
||||
|
||||
interface DUser$Interface extends _Struct {
|
||||
constructor(data: DUser$Data): void;
|
||||
Dfield: D; // readonly
|
||||
setDfield(value: D): DUser$Interface;
|
||||
}
|
||||
|
||||
export const DUser: Class<DUser$Interface> = _createStructClass(DUser$type, DUser$typeDef);
|
||||
@@ -1,58 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
blobType as _blobType,
|
||||
createStructClass as _createStructClass,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeListType as _makeListType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Blob as _Blob,
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('A',
|
||||
[
|
||||
new _Field('A', _makeCompoundType(_Kind.List, _makeCompoundType(_Kind.List, _blobType)), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const A$type = _makeType(_pkg.ref, 0);
|
||||
const A$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type A$Data = {
|
||||
A: _NomsList<_NomsList<_Blob>>;
|
||||
};
|
||||
|
||||
interface A$Interface extends _Struct {
|
||||
constructor(data: A$Data): void;
|
||||
A: _NomsList<_NomsList<_Blob>>; // readonly
|
||||
setA(value: _NomsList<_NomsList<_Blob>>): A$Interface;
|
||||
}
|
||||
|
||||
export const A: Class<A$Interface> = _createStructClass(A$type, A$typeDef);
|
||||
|
||||
export function newListOfListOfBlob(values: Array<_NomsList<_Blob>>): Promise<_NomsList<_NomsList<_Blob>>> {
|
||||
return _newList(values, _makeListType(_makeCompoundType(_Kind.List, _blobType)));
|
||||
}
|
||||
|
||||
export function newListOfBlob(values: Array<_Blob>): Promise<_NomsList<_Blob>> {
|
||||
return _newList(values, _makeListType(_blobType));
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import * as _sha1_9c6e87c from './sha1_89fa11c.js';
|
||||
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
boolType as _boolType,
|
||||
createStructClass as _createStructClass,
|
||||
makeListType as _makeListType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('Struct',
|
||||
[
|
||||
new _Field('s', _stringType, false),
|
||||
new _Field('b', _boolType, false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const Struct$type = _makeType(_pkg.ref, 0);
|
||||
const Struct$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type Struct$Data = {
|
||||
s: string;
|
||||
b: boolean;
|
||||
};
|
||||
|
||||
interface Struct$Interface extends _Struct {
|
||||
constructor(data: Struct$Data): void;
|
||||
s: string; // readonly
|
||||
setS(value: string): Struct$Interface;
|
||||
b: boolean; // readonly
|
||||
setB(value: boolean): Struct$Interface;
|
||||
}
|
||||
|
||||
export const Struct: Class<Struct$Interface> = _createStructClass(Struct$type, Struct$typeDef);
|
||||
|
||||
export function newListOfStruct(values: Array<Struct>): Promise<_NomsList<Struct>> {
|
||||
return _newList(values, _makeListType(_makeType(_pkg.ref, 0)));
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
boolType as _boolType,
|
||||
createStructClass as _createStructClass,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('OptionalStruct',
|
||||
[
|
||||
new _Field('s', _stringType, true),
|
||||
new _Field('b', _boolType, true),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const OptionalStruct$type = _makeType(_pkg.ref, 0);
|
||||
const OptionalStruct$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type OptionalStruct$Data = {
|
||||
s?: string;
|
||||
b?: boolean;
|
||||
};
|
||||
|
||||
interface OptionalStruct$Interface extends _Struct {
|
||||
constructor(data: OptionalStruct$Data): void;
|
||||
s: ?string; // readonly
|
||||
setS(value: ?string): OptionalStruct$Interface;
|
||||
b: ?boolean; // readonly
|
||||
setB(value: ?boolean): OptionalStruct$Interface;
|
||||
}
|
||||
|
||||
export const OptionalStruct: Class<OptionalStruct$Interface> = _createStructClass(OptionalStruct$type, OptionalStruct$typeDef);
|
||||
@@ -1,67 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
blobType as _blobType,
|
||||
boolType as _boolType,
|
||||
createStructClass as _createStructClass,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
valueType as _valueType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Blob as _Blob,
|
||||
Struct as _Struct,
|
||||
Value as _Value,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructPrimitives',
|
||||
[
|
||||
new _Field('number', _numberType, false),
|
||||
new _Field('bool', _boolType, false),
|
||||
new _Field('string', _stringType, false),
|
||||
new _Field('blob', _blobType, false),
|
||||
new _Field('value', _valueType, false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructPrimitives$type = _makeType(_pkg.ref, 0);
|
||||
const StructPrimitives$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type StructPrimitives$Data = {
|
||||
number: _number;
|
||||
bool: boolean;
|
||||
string: string;
|
||||
blob: _Blob;
|
||||
value: _Value;
|
||||
};
|
||||
|
||||
interface StructPrimitives$Interface extends _Struct {
|
||||
constructor(data: StructPrimitives$Data): void;
|
||||
number: _number; // readonly
|
||||
setNumber(value: _number): StructPrimitives$Interface;
|
||||
bool: boolean; // readonly
|
||||
setBool(value: boolean): StructPrimitives$Interface;
|
||||
string: string; // readonly
|
||||
setString(value: string): StructPrimitives$Interface;
|
||||
blob: _Blob; // readonly
|
||||
setBlob(value: _Blob): StructPrimitives$Interface;
|
||||
value: _Value; // readonly
|
||||
setValue(value: _Value): StructPrimitives$Interface;
|
||||
}
|
||||
|
||||
export const StructPrimitives: Class<StructPrimitives$Interface> = _createStructClass(StructPrimitives$type, StructPrimitives$typeDef);
|
||||
@@ -1,53 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
createStructClass as _createStructClass,
|
||||
emptyRef as _emptyRef,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeListType as _makeListType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('Tree',
|
||||
[
|
||||
new _Field('children', _makeCompoundType(_Kind.List, _makeType(_emptyRef, 0)), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const Tree$type = _makeType(_pkg.ref, 0);
|
||||
const Tree$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type Tree$Data = {
|
||||
children: _NomsList<Tree>;
|
||||
};
|
||||
|
||||
interface Tree$Interface extends _Struct {
|
||||
constructor(data: Tree$Data): void;
|
||||
children: _NomsList<Tree>; // readonly
|
||||
setChildren(value: _NomsList<Tree>): Tree$Interface;
|
||||
}
|
||||
|
||||
export const Tree: Class<Tree$Interface> = _createStructClass(Tree$type, Tree$typeDef);
|
||||
|
||||
export function newListOfTree(values: Array<Tree>): Promise<_NomsList<Tree>> {
|
||||
return _newList(values, _makeListType(_makeType(_pkg.ref, 0)));
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
createStructClass as _createStructClass,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructWithDupList',
|
||||
[
|
||||
new _Field('l', _makeCompoundType(_Kind.List, _numberType), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructWithDupList$type = _makeType(_pkg.ref, 0);
|
||||
const StructWithDupList$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type StructWithDupList$Data = {
|
||||
l: _NomsList<_number>;
|
||||
};
|
||||
|
||||
interface StructWithDupList$Interface extends _Struct {
|
||||
constructor(data: StructWithDupList$Data): void;
|
||||
l: _NomsList<_number>; // readonly
|
||||
setL(value: _NomsList<_number>): StructWithDupList$Interface;
|
||||
}
|
||||
|
||||
export const StructWithDupList: Class<StructWithDupList$Interface> = _createStructClass(StructWithDupList$type, StructWithDupList$typeDef);
|
||||
@@ -1,54 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
Ref as _Ref,
|
||||
createStructClass as _createStructClass,
|
||||
makeListType as _makeListType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
registerPackage as _registerPackage,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
} from '@attic/noms';
|
||||
import * as dep from './sha1_6574913.js';
|
||||
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('ImportUser',
|
||||
[
|
||||
new _Field('importedStruct', _makeType(_Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'), 0), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
_Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'),
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const ImportUser$type = _makeType(_pkg.ref, 0);
|
||||
const ImportUser$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type ImportUser$Data = {
|
||||
importedStruct: dep.D;
|
||||
};
|
||||
|
||||
interface ImportUser$Interface extends _Struct {
|
||||
constructor(data: ImportUser$Data): void;
|
||||
importedStruct: dep.D; // readonly
|
||||
setImportedStruct(value: dep.D): ImportUser$Interface;
|
||||
}
|
||||
|
||||
export const ImportUser: Class<ImportUser$Interface> = _createStructClass(ImportUser$type, ImportUser$typeDef);
|
||||
|
||||
export function newListOfD(values: Array<dep.D>): Promise<_NomsList<dep.D>> {
|
||||
return _newList(values, _makeListType(_makeType(_Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'), 0)));
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
boolType as _boolType,
|
||||
createStructClass as _createStructClass,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeListType as _makeListType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newList as _newList,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
NomsList as _NomsList,
|
||||
Struct as _Struct,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructWithList',
|
||||
[
|
||||
new _Field('l', _makeCompoundType(_Kind.List, _number), false),
|
||||
new _Field('b', _boolType, false),
|
||||
new _Field('s', _stringType, false),
|
||||
new _Field('i', _number, false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructWithList$type = _makeType(_pkg.ref, 0);
|
||||
const StructWithList$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type StructWithList$Data = {
|
||||
l: _NomsList<_number>;
|
||||
b: boolean;
|
||||
s: string;
|
||||
i: _number;
|
||||
};
|
||||
|
||||
interface StructWithList$Interface extends _Struct {
|
||||
constructor(data: StructWithList$Data): void;
|
||||
l: _NomsList<_number>; // readonly
|
||||
setL(value: _NomsList<_number>): StructWithList$Interface;
|
||||
b: boolean; // readonly
|
||||
setB(value: boolean): StructWithList$Interface;
|
||||
s: string; // readonly
|
||||
setS(value: string): StructWithList$Interface;
|
||||
i: _int64; // readonly
|
||||
setI(value: _number): StructWithList$Interface;
|
||||
}
|
||||
|
||||
export const StructWithList: Class<StructWithList$Interface> = _createStructClass(StructWithList$type, StructWithList$typeDef);
|
||||
|
||||
export function newListOfNumber(values: Array<_number>): Promise<_NomsList<_number>> {
|
||||
return _newList(values, _makeListType(_numberType));
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Kind as _Kind,
|
||||
Package as _Package,
|
||||
blobType as _blobType,
|
||||
createStructClass as _createStructClass,
|
||||
makeCompoundType as _makeCompoundType,
|
||||
makeSetType as _makeSetType,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
newSet as _newSet,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
valueType as _valueType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Blob as _Blob,
|
||||
NomsSet as _NomsSet,
|
||||
Struct as _Struct,
|
||||
Value as _Value,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructWithUnionField',
|
||||
[
|
||||
new _Field('a', _numberType, false),
|
||||
],
|
||||
[
|
||||
new _Field('b', _numberType, false),
|
||||
new _Field('c', _stringType, false),
|
||||
new _Field('d', _blobType, false),
|
||||
new _Field('e', _valueType, false),
|
||||
new _Field('f', _makeCompoundType(_Kind.Set, _numberType), false),
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructWithUnionField$type = _makeType(_pkg.ref, 0);
|
||||
const StructWithUnionField$typeDef = _pkg.types[0];
|
||||
|
||||
|
||||
type StructWithUnionField$Data = {
|
||||
a: _number;
|
||||
};
|
||||
|
||||
interface StructWithUnionField$Interface extends _Struct {
|
||||
constructor(data: StructWithUnionField$Data): void;
|
||||
a: _number; // readonly
|
||||
setA(value: _number): StructWithUnionField$Interface;
|
||||
b: ?_number; // readonly
|
||||
setB(value: _number): StructWithUnionField$Interface;
|
||||
c: ?string; // readonly
|
||||
setC(value: string): StructWithUnionField$Interface;
|
||||
d: ?_Blob; // readonly
|
||||
setD(value: _Blob): StructWithUnionField$Interface;
|
||||
e: ?_Value; // readonly
|
||||
setE(value: _Value): StructWithUnionField$Interface;
|
||||
f: ?_NomsSet<_number>; // readonly
|
||||
setF(value: _NomsSet<_number>): StructWithUnionField$Interface;
|
||||
}
|
||||
|
||||
export const StructWithUnionField: Class<StructWithUnionField$Interface> = _createStructClass(StructWithUnionField$type, StructWithUnionField$typeDef);
|
||||
|
||||
export function newSetOfNumber(values: Array<_number>): Promise<_NomsSet<_number>> {
|
||||
return _newSet(values, _makeSetType(_numberType));
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
// This file was generated by nomdl/codegen.
|
||||
// @flow
|
||||
/* eslint-disable */
|
||||
|
||||
import {
|
||||
Field as _Field,
|
||||
Package as _Package,
|
||||
createStructClass as _createStructClass,
|
||||
emptyRef as _emptyRef,
|
||||
makeStructType as _makeStructType,
|
||||
makeType as _makeType,
|
||||
numberType as _numberType,
|
||||
registerPackage as _registerPackage,
|
||||
stringType as _stringType,
|
||||
} from '@attic/noms';
|
||||
import type {
|
||||
Struct as _Struct,
|
||||
number as _number,
|
||||
} from '@attic/noms';
|
||||
|
||||
const _pkg = new _Package([
|
||||
_makeStructType('StructWithUnions',
|
||||
[
|
||||
new _Field('a', _makeType(_emptyRef, 1), false),
|
||||
new _Field('d', _makeType(_emptyRef, 2), false),
|
||||
],
|
||||
[
|
||||
|
||||
]
|
||||
),
|
||||
_makeStructType('',
|
||||
[
|
||||
|
||||
],
|
||||
[
|
||||
new _Field('b', _numberType, false),
|
||||
new _Field('c', _stringType, false),
|
||||
]
|
||||
),
|
||||
_makeStructType('',
|
||||
[
|
||||
|
||||
],
|
||||
[
|
||||
new _Field('e', _numberType, false),
|
||||
new _Field('f', _stringType, false),
|
||||
]
|
||||
),
|
||||
], [
|
||||
]);
|
||||
_registerPackage(_pkg);
|
||||
const StructWithUnions$type = _makeType(_pkg.ref, 0);
|
||||
const StructWithUnions$typeDef = _pkg.types[0];
|
||||
const __unionOfBOfNumberAndCOfString$type = _makeType(_pkg.ref, 1);
|
||||
const __unionOfBOfNumberAndCOfString$typeDef = _pkg.types[1];
|
||||
const __unionOfEOfNumberAndFOfString$type = _makeType(_pkg.ref, 2);
|
||||
const __unionOfEOfNumberAndFOfString$typeDef = _pkg.types[2];
|
||||
|
||||
|
||||
type StructWithUnions$Data = {
|
||||
a: __unionOfBOfNumberAndCOfString;
|
||||
d: __unionOfEOfNumberAndFOfString;
|
||||
};
|
||||
|
||||
interface StructWithUnions$Interface extends _Struct {
|
||||
constructor(data: StructWithUnions$Data): void;
|
||||
a: __unionOfBOfNumberAndCOfString; // readonly
|
||||
setA(value: __unionOfBOfNumberAndCOfString): StructWithUnions$Interface;
|
||||
d: __unionOfEOfNumberAndFOfString; // readonly
|
||||
setD(value: __unionOfEOfNumberAndFOfString): StructWithUnions$Interface;
|
||||
}
|
||||
|
||||
export const StructWithUnions: Class<StructWithUnions$Interface> = _createStructClass(StructWithUnions$type, StructWithUnions$typeDef);
|
||||
|
||||
type __unionOfBOfNumberAndCOfString$Data = {
|
||||
};
|
||||
|
||||
interface __unionOfBOfNumberAndCOfString$Interface extends _Struct {
|
||||
constructor(data: __unionOfBOfNumberAndCOfString$Data): void;
|
||||
b: ?_number; // readonly
|
||||
setB(value: _number): __unionOfBOfNumberAndCOfString$Interface;
|
||||
c: ?string; // readonly
|
||||
setC(value: string): __unionOfBOfNumberAndCOfString$Interface;
|
||||
}
|
||||
|
||||
export const __unionOfBOfNumberAndCOfString: Class<__unionOfBOfNumberAndCOfString$Interface> = _createStructClass(__unionOfBOfNumberAndCOfString$type, __unionOfBOfNumberAndCOfString$typeDef);
|
||||
|
||||
type __unionOfEOfNumberAndFOfString$Data = {
|
||||
};
|
||||
|
||||
interface __unionOfEOfNumberAndFOfString$Interface extends _Struct {
|
||||
constructor(data: __unionOfEOfNumberAndFOfString$Data): void;
|
||||
e: ?_number; // readonly
|
||||
setE(value: _number): __unionOfEOfNumberAndFOfString$Interface;
|
||||
f: ?string; // readonly
|
||||
setF(value: string): __unionOfEOfNumberAndFOfString$Interface;
|
||||
}
|
||||
|
||||
export const __unionOfEOfNumberAndFOfString: Class<__unionOfEOfNumberAndFOfString$Interface> = _createStructClass(__unionOfEOfNumberAndFOfString$type, __unionOfEOfNumberAndFOfString$typeDef);
|
||||
@@ -1,14 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
import {newListOfNumber} from './gen/list_number.noms.js';
|
||||
import {makeListType, numberType} from '@attic/noms';
|
||||
|
||||
suite('list_number.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const l = await newListOfNumber([0, 1, 2, 3]);
|
||||
assert.equal(l.length, 4);
|
||||
assert.isTrue(l.type.equals(makeListType(numberType)));
|
||||
});
|
||||
});
|
||||
@@ -1 +0,0 @@
|
||||
using List<Number>
|
||||
@@ -1,13 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
import {newMapOfBoolToString} from './gen/map.noms.js';
|
||||
import {makeMapType, boolType, stringType} from '@attic/noms';
|
||||
|
||||
suite('map.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const s = await newMapOfBoolToString([true, 'yes', false, 'no']);
|
||||
assert.isTrue(s.type.equals(makeMapType(boolType, stringType)));
|
||||
});
|
||||
});
|
||||
@@ -1,2 +0,0 @@
|
||||
using Map<Bool, String>
|
||||
using Map<String, Value>
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"name": "nomdl-codegen-test",
|
||||
"version": "0.0.1",
|
||||
"description": "Tests for generated js code",
|
||||
"main": "index.js",
|
||||
"devDependencies": {
|
||||
"@attic/eslintrc": "^1.0.0",
|
||||
"@attic/noms": "^13.0.0",
|
||||
"@attic/webpack-config": "^2.1.0",
|
||||
"babel-cli": "6.6.5",
|
||||
"babel-core": "6.7.2",
|
||||
"babel-generator": "6.7.2",
|
||||
"babel-plugin-syntax-async-functions": "6.5.0",
|
||||
"babel-plugin-syntax-flow": "6.5.0",
|
||||
"babel-plugin-transform-async-to-generator": "6.7.0",
|
||||
"babel-plugin-transform-class-properties": "6.6.0",
|
||||
"babel-plugin-transform-es2015-destructuring": "6.6.5",
|
||||
"babel-plugin-transform-es2015-modules-commonjs": "6.7.0",
|
||||
"babel-plugin-transform-es2015-parameters": "6.7.0",
|
||||
"babel-plugin-transform-runtime": "^6.6.0",
|
||||
"babel-preset-es2015": "6.6.0",
|
||||
"babel-preset-react": "6.5.0",
|
||||
"chai": "3.5.0",
|
||||
"chokidar": "1.4.3",
|
||||
"commander": "2.9.0",
|
||||
"flow-bin": "0.23.0",
|
||||
"fs-extra": "0.26.7",
|
||||
"mocha": "2.4.5"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "eslint . && flow .",
|
||||
"test": "mocha --ui tdd --reporter dot --compilers js:babel-core/register ./*-test.js"
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {newSet, makeSetType, numberType, DataStore, MemoryStore} from '@attic/noms';
|
||||
import type {NomsSet} from '@attic/noms';
|
||||
import {StructWithRef} from './gen/ref.noms.js';
|
||||
|
||||
suite('ref.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const ds = new DataStore(new MemoryStore());
|
||||
const set: NomsSet<number> = await newSet([0, 1, 2, 3], makeSetType(numberType));
|
||||
const r = ds.writeValue(set);
|
||||
const struct = new StructWithRef({r});
|
||||
|
||||
assert.isTrue(struct.r.equals(r));
|
||||
const set2 = await ds.readValue(r.targetRef);
|
||||
assert.isTrue(set.equals(set2));
|
||||
});
|
||||
});
|
||||
@@ -1,6 +0,0 @@
|
||||
using Ref<List<String>>
|
||||
using List<Ref<Number>>
|
||||
|
||||
struct StructWithRef {
|
||||
r: Ref<Set<Number>>
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
package test
|
||||
|
||||
//go:generate rm -rf /tmp/depGenTest
|
||||
|
||||
//go:generate go run ../codegen.go -ldb=/tmp/depGenTest -package-ds=testDeps -in=../testDeps/leafDep/leafDep.noms -out-dir=../testDeps/leafDep
|
||||
|
||||
//go:generate go run ../codegen.go -out-dir=gen -ldb=/tmp/depGenTest -package-ds=testDeps
|
||||
|
||||
//go:generate rm -rf /tmp/depGenTest
|
||||
@@ -1,13 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
import {newSetOfBool} from './gen/set.noms.js';
|
||||
import {makeSetType, boolType} from '@attic/noms';
|
||||
|
||||
suite('set.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const s = await newSetOfBool([true]);
|
||||
assert.isTrue(s.type.equals(makeSetType(boolType)));
|
||||
});
|
||||
});
|
||||
@@ -1 +0,0 @@
|
||||
using Set<Bool>
|
||||
@@ -1,25 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {OptionalStruct} from './gen/struct_optional.noms.js';
|
||||
|
||||
suite('struct_optional.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const os = new OptionalStruct({});
|
||||
assert.isUndefined(os.s);
|
||||
assert.isUndefined(os.b);
|
||||
|
||||
const os2 = os.setS('hi');
|
||||
assert.equal(os2.s, 'hi');
|
||||
assert.isUndefined(os2.b);
|
||||
|
||||
const os3 = os2.setB(true);
|
||||
assert.equal(os3.s, 'hi');
|
||||
assert.equal(os3.b, true);
|
||||
|
||||
const os4 = os2.setB(undefined).setS(undefined);
|
||||
assert.isTrue(os4.equals(os));
|
||||
});
|
||||
});
|
||||
@@ -1,40 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai'; //eslint-disable-line
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {newBlob} from '@attic/noms';
|
||||
import {StructPrimitives} from './gen/struct_primitives.noms.js';
|
||||
|
||||
suite('struct-primitives.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const s: StructPrimitives = new StructPrimitives({ //eslint-disable-line
|
||||
number: 9,
|
||||
bool: true,
|
||||
string: 'hi',
|
||||
blob: await newBlob(new Uint8Array([0, 1, 2, 3])),
|
||||
value: 123,
|
||||
});
|
||||
|
||||
let s2;
|
||||
assert.equal(s.number, 9);
|
||||
s2 = s.setNumber(99);
|
||||
assert.equal(s2.number, 99);
|
||||
|
||||
assert.equal(s.bool, true);
|
||||
s2 = s.setBool(false);
|
||||
assert.equal(s2.bool, false);
|
||||
|
||||
assert.equal(s.string, 'hi');
|
||||
s2 = s.setString('bye');
|
||||
assert.equal(s2.string, 'bye');
|
||||
|
||||
assert.isTrue(s.blob.equals(await newBlob(new Uint8Array([0, 1, 2, 3]))));
|
||||
s2 = s.setBlob(await newBlob(new Uint8Array([4, 5, 6, 7])));
|
||||
assert.isTrue(s2.blob.equals(await newBlob(new Uint8Array([4, 5, 6, 7]))));
|
||||
|
||||
assert.equal(s.value, 123);
|
||||
s2 = s.setValue('x');
|
||||
assert.equal(s2.value, 'x');
|
||||
});
|
||||
});
|
||||
@@ -1,24 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
import {Tree, newListOfTree} from './gen/struct_recursive.noms.js';
|
||||
import {newList, makeListType} from '@attic/noms';
|
||||
|
||||
suite('struct_recursive.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const t: Tree = new Tree({children: await newListOfTree([
|
||||
new Tree({children: await newListOfTree([])}),
|
||||
new Tree({children: await newListOfTree([])}),
|
||||
])});
|
||||
assert.equal(t.children.length, 2);
|
||||
|
||||
const listOfTreeType = makeListType(t.type);
|
||||
const t2: Tree = new Tree({children: await newList([
|
||||
new Tree({children: await newList([], listOfTreeType)}),
|
||||
new Tree({children: await newList([], listOfTreeType)}),
|
||||
], listOfTreeType)});
|
||||
|
||||
assert.isTrue(t.equals(t2));
|
||||
});
|
||||
});
|
||||
@@ -1,20 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {Kind} from '@attic/noms';
|
||||
import {Struct} from './gen/struct.noms.js';
|
||||
|
||||
suite('struct.noms', () => {
|
||||
test('constructor', () => {
|
||||
const s: Struct = new Struct({s: 'hi', b: true});
|
||||
assert.equal(s.s, 'hi');
|
||||
assert.equal(s.b, true);
|
||||
});
|
||||
|
||||
test('type', () => {
|
||||
const s: Struct = new Struct({s: 'hi', b: true});
|
||||
assert.equal(s.type.kind, Kind.Unresolved);
|
||||
});
|
||||
});
|
||||
@@ -1,30 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {StructWithUnionField} from './gen/struct_with_union_field.noms.js';
|
||||
|
||||
suite('struct_optional.noms', () => {
|
||||
test('constructor', async () => {
|
||||
const swuf = new StructWithUnionField({a: 1, b: 2});
|
||||
assert.equal(swuf.a, 1);
|
||||
assert.equal(swuf.b, 2);
|
||||
assert.isUndefined(swuf.c);
|
||||
assert.isUndefined(swuf.d);
|
||||
assert.isUndefined(swuf.e);
|
||||
assert.isUndefined(swuf.f);
|
||||
|
||||
const swuf2 = swuf.setC('hi');
|
||||
assert.equal(swuf2.a, 1);
|
||||
assert.isUndefined(swuf2.b);
|
||||
assert.equal(swuf2.c, 'hi');
|
||||
assert.isUndefined(swuf2.d);
|
||||
assert.isUndefined(swuf2.e);
|
||||
assert.isUndefined(swuf2.f);
|
||||
|
||||
assert.throws(() => {
|
||||
swuf.setC(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,29 +0,0 @@
|
||||
// @flow
|
||||
|
||||
import {assert} from 'chai';
|
||||
import {suite, test} from 'mocha';
|
||||
|
||||
import {
|
||||
StructWithUnions,
|
||||
__unionOfBOfNumberAndCOfString,
|
||||
__unionOfEOfNumberAndFOfString,
|
||||
} from './gen/struct_with_unions.noms.js';
|
||||
|
||||
suite('struct_optional.noms', () => {
|
||||
test('constructor', async () => {
|
||||
// TODO: This needs to be cleaner.
|
||||
const swu = new StructWithUnions({
|
||||
a: new __unionOfBOfNumberAndCOfString({b: 1}),
|
||||
d: new __unionOfEOfNumberAndFOfString({f:'hi'}),
|
||||
});
|
||||
assert.equal(swu.a.b, 1);
|
||||
assert.equal(swu.d.f, 'hi');
|
||||
|
||||
const swu2 = swu.setA(swu.a.setC('bye'));
|
||||
const swu3 = new StructWithUnions({
|
||||
a: new __unionOfBOfNumberAndCOfString({c: 'bye'}),
|
||||
d: new __unionOfEOfNumberAndFOfString({f:'hi'}),
|
||||
});
|
||||
assert.isTrue(swu2.equals(swu3));
|
||||
});
|
||||
});
|
||||
@@ -1,6 +0,0 @@
|
||||
struct Struct {
|
||||
s: String
|
||||
b: Bool
|
||||
}
|
||||
|
||||
using List<Struct>
|
||||
@@ -1,4 +0,0 @@
|
||||
struct OptionalStruct {
|
||||
s: optional String
|
||||
b: optional Bool
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
struct StructPrimitives {
|
||||
number: Number
|
||||
bool: Bool
|
||||
string: String
|
||||
blob: Blob
|
||||
value: Value
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
struct Tree {
|
||||
children: List<Tree>
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
struct StructWithDupList {
|
||||
l: List<Number>
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
alias dep = import "../testDeps/dep.noms"
|
||||
|
||||
struct ImportUser {
|
||||
importedStruct :dep.D
|
||||
}
|
||||
|
||||
using List<dep.D>
|
||||
@@ -1,6 +0,0 @@
|
||||
struct StructWithList {
|
||||
l: List<Number>
|
||||
b: Bool
|
||||
s: String
|
||||
i: Number
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
struct StructWithUnionField {
|
||||
a: Number
|
||||
union {
|
||||
b: Number
|
||||
c: String
|
||||
d: Blob
|
||||
e: Value
|
||||
f: Set<Number>
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
struct StructWithUnions {
|
||||
a: union {
|
||||
b: Number
|
||||
c: String
|
||||
}
|
||||
d: union {
|
||||
e: Number
|
||||
f: String
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
alias leaf = import "sha1-068bb32c733bd940a0d758715bf05082f4c12fcb"
|
||||
|
||||
struct D {
|
||||
structField: leaf.S
|
||||
}
|
||||
|
||||
struct DUser {
|
||||
Dfield: D
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user