From 3ff6ee6add3490621a8886608cc8423dba3cf7ca Mon Sep 17 00:00:00 2001 From: Erik Arvidsson Date: Wed, 27 Apr 2016 20:39:51 -0700 Subject: [PATCH] Inline struct type declaration into chunk (#1324) Struct type definition is now inlined into the chunk. To break cycles we use back references. - Removes unresolved type refs - Removes packages Fixes #1164 Fixes #1165 --- .travis.yml | 6 - clients/csv/exporter/exporter_test.go | 12 +- clients/csv/importer/importer.go | 2 +- clients/csv/read.go | 22 +- clients/csv/read_test.go | 12 +- clients/csv/write.go | 10 +- datas/commit.go | 33 +- datas/datastore_test.go | 2 +- dataset/dataset_test.go | 2 +- js/package.json | 2 +- js/src/compare.js | 2 - js/src/data-store.js | 39 +- js/src/decode-test.js | 177 +++--- js/src/decode.js | 223 +++----- js/src/defs-test.js | 66 +-- js/src/defs.js | 34 +- js/src/describe-type.js | 16 - js/src/encode-human-readable-test.js | 122 ++++ js/src/encode-human-readable.js | 186 +++++++ js/src/encode-test.js | 126 ++--- js/src/encode.js | 244 +++----- js/src/fixup-type.js | 64 --- js/src/list-test.js | 56 +- js/src/map-test.js | 47 +- js/src/noms-kind.js | 12 +- js/src/noms.js | 6 +- js/src/package.js | 71 --- js/src/set-test.js | 49 +- js/src/struct-test.js | 212 +++---- js/src/struct.js | 39 +- js/src/type-test.js | 73 +-- js/src/type.js | 240 ++------ js/src/validate-type-test.js | 20 +- js/src/validate-type.js | 12 +- nomdl/codegen/.gitignore | 1 - nomdl/codegen/README.md | 60 -- nomdl/codegen/code/generate.go | 491 ----------------- nomdl/codegen/code/generate_test.go | 52 -- nomdl/codegen/codegen.go | 495 ----------------- nomdl/codegen/codegen_test.go | 118 ---- nomdl/codegen/js/enum.tmpl | 3 - nomdl/codegen/js/header.tmpl | 12 - nomdl/codegen/js/list.tmpl | 4 - nomdl/codegen/js/map.tmpl | 4 - nomdl/codegen/js/package.tmpl | 10 - nomdl/codegen/js/ref.tmpl | 1 - nomdl/codegen/js/set.tmpl | 4 - nomdl/codegen/js/struct.tmpl | 14 - nomdl/codegen/test/.babelrc | 1 - nomdl/codegen/test/.eslintrc.js | 1 - nomdl/codegen/test/.flowconfig | 1 - nomdl/codegen/test/.gitignore | 1 - nomdl/codegen/test/clobber.noms | 2 - nomdl/codegen/test/clobber_a/a.noms | 3 - nomdl/codegen/test/clobber_b/b.noms | 1 - nomdl/codegen/test/gen/clobber.noms.js | 8 - nomdl/codegen/test/gen/enum_struct.noms.js | 53 -- nomdl/codegen/test/gen/list_number.noms.js | 18 - nomdl/codegen/test/gen/map.noms.js | 24 - nomdl/codegen/test/gen/ref.noms.js | 67 --- nomdl/codegen/test/gen/set.noms.js | 17 - nomdl/codegen/test/gen/sha1_068bb32.js | 49 -- nomdl/codegen/test/gen/sha1_6574913.js | 70 --- nomdl/codegen/test/gen/sha1_89fa11c.js | 58 -- nomdl/codegen/test/gen/sha1_f2ea794.js | 7 - nomdl/codegen/test/gen/struct.noms.js | 56 -- .../codegen/test/gen/struct_optional.noms.js | 49 -- .../test/gen/struct_primitives.noms.js | 67 --- .../codegen/test/gen/struct_recursive.noms.js | 53 -- .../test/gen/struct_with_dup_list.noms.js | 48 -- .../test/gen/struct_with_imports.noms.js | 54 -- .../codegen/test/gen/struct_with_list.noms.js | 68 --- .../test/gen/struct_with_union_field.noms.js | 73 --- .../test/gen/struct_with_unions.noms.js | 99 ---- nomdl/codegen/test/list-number-test.js | 14 - nomdl/codegen/test/list_number.noms | 1 - nomdl/codegen/test/map-test.js | 13 - nomdl/codegen/test/map.noms | 2 - nomdl/codegen/test/package.json | 34 -- nomdl/codegen/test/ref-test.js | 21 - nomdl/codegen/test/ref.noms | 6 - nomdl/codegen/test/rungen.go | 9 - nomdl/codegen/test/set-test.js | 13 - nomdl/codegen/test/set.noms | 1 - nomdl/codegen/test/struct-optional-test.js | 25 - nomdl/codegen/test/struct-primitives-test.js | 40 -- nomdl/codegen/test/struct-recursive-test.js | 24 - nomdl/codegen/test/struct-test.js | 20 - .../test/struct-with-union-field-test.js | 30 - nomdl/codegen/test/struct-with-unions-test.js | 29 - nomdl/codegen/test/struct.noms | 6 - nomdl/codegen/test/struct_optional.noms | 4 - nomdl/codegen/test/struct_primitives.noms | 7 - nomdl/codegen/test/struct_recursive.noms | 3 - nomdl/codegen/test/struct_with_dup_list.noms | 3 - nomdl/codegen/test/struct_with_imports.noms | 7 - nomdl/codegen/test/struct_with_list.noms | 6 - .../codegen/test/struct_with_union_field.noms | 10 - nomdl/codegen/test/struct_with_unions.noms | 10 - nomdl/codegen/testDeps/dep.noms | 9 - nomdl/codegen/testDeps/leafDep/leafDep.noms | 4 - .../codegen/testDeps/leafDep/leafDep.noms.js | 49 -- nomdl/pkg/grammar.peg | 32 +- nomdl/pkg/grammar.peg.go | 519 ++++++++---------- nomdl/pkg/imports.go | 46 +- nomdl/pkg/imports_test.go | 215 -------- nomdl/pkg/parse.go | 220 ++------ nomdl/pkg/parse_test.go | 330 +++-------- nomdl/pkg/unresolved_desc.go | 27 + types/compound_list_test.go | 13 +- types/compound_map_test.go | 14 +- types/compound_set_test.go | 12 +- types/decode_noms_value.go | 204 +++---- types/decode_noms_value_test.go | 123 ++--- types/encode_human_readable.go | 224 ++++---- types/encode_human_readable_test.go | 174 +++--- types/encode_noms_value.go | 188 +++---- types/encode_noms_value_test.go | 153 +++--- types/equals_test.go | 9 - types/fixup_type.go | 38 -- types/noms_kind.go | 5 +- types/package.go | 113 ---- types/package_test.go | 16 - types/primitives.go | 8 +- types/struct.go | 31 +- types/struct_test.go | 90 +-- types/type.go | 151 ++--- types/type_desc.go | 36 +- types/type_test.go | 64 +-- types/value_store.go | 6 - types/value_store_test.go | 38 -- types/write_value.go | 15 - 132 files changed, 1975 insertions(+), 5995 deletions(-) delete mode 100644 js/src/describe-type.js create mode 100644 js/src/encode-human-readable-test.js create mode 100644 js/src/encode-human-readable.js delete mode 100644 js/src/fixup-type.js delete mode 100644 js/src/package.js delete mode 100644 nomdl/codegen/.gitignore delete mode 100644 nomdl/codegen/README.md delete mode 100644 nomdl/codegen/code/generate.go delete mode 100644 nomdl/codegen/code/generate_test.go delete mode 100644 nomdl/codegen/codegen.go delete mode 100644 nomdl/codegen/codegen_test.go delete mode 100644 nomdl/codegen/js/enum.tmpl delete mode 100644 nomdl/codegen/js/header.tmpl delete mode 100644 nomdl/codegen/js/list.tmpl delete mode 100644 nomdl/codegen/js/map.tmpl delete mode 100644 nomdl/codegen/js/package.tmpl delete mode 100644 nomdl/codegen/js/ref.tmpl delete mode 100644 nomdl/codegen/js/set.tmpl delete mode 100644 nomdl/codegen/js/struct.tmpl delete mode 120000 nomdl/codegen/test/.babelrc delete mode 100644 nomdl/codegen/test/.eslintrc.js delete mode 120000 nomdl/codegen/test/.flowconfig delete mode 100644 nomdl/codegen/test/.gitignore delete mode 100644 nomdl/codegen/test/clobber.noms delete mode 100644 nomdl/codegen/test/clobber_a/a.noms delete mode 100644 nomdl/codegen/test/clobber_b/b.noms delete mode 100644 nomdl/codegen/test/gen/clobber.noms.js delete mode 100644 nomdl/codegen/test/gen/enum_struct.noms.js delete mode 100644 nomdl/codegen/test/gen/list_number.noms.js delete mode 100644 nomdl/codegen/test/gen/map.noms.js delete mode 100644 nomdl/codegen/test/gen/ref.noms.js delete mode 100644 nomdl/codegen/test/gen/set.noms.js delete mode 100644 nomdl/codegen/test/gen/sha1_068bb32.js delete mode 100644 nomdl/codegen/test/gen/sha1_6574913.js delete mode 100644 nomdl/codegen/test/gen/sha1_89fa11c.js delete mode 100644 nomdl/codegen/test/gen/sha1_f2ea794.js delete mode 100644 nomdl/codegen/test/gen/struct.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_optional.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_primitives.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_recursive.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_with_dup_list.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_with_imports.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_with_list.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_with_union_field.noms.js delete mode 100644 nomdl/codegen/test/gen/struct_with_unions.noms.js delete mode 100644 nomdl/codegen/test/list-number-test.js delete mode 100644 nomdl/codegen/test/list_number.noms delete mode 100644 nomdl/codegen/test/map-test.js delete mode 100644 nomdl/codegen/test/map.noms delete mode 100644 nomdl/codegen/test/package.json delete mode 100644 nomdl/codegen/test/ref-test.js delete mode 100644 nomdl/codegen/test/ref.noms delete mode 100644 nomdl/codegen/test/rungen.go delete mode 100644 nomdl/codegen/test/set-test.js delete mode 100644 nomdl/codegen/test/set.noms delete mode 100644 nomdl/codegen/test/struct-optional-test.js delete mode 100644 nomdl/codegen/test/struct-primitives-test.js delete mode 100644 nomdl/codegen/test/struct-recursive-test.js delete mode 100644 nomdl/codegen/test/struct-test.js delete mode 100644 nomdl/codegen/test/struct-with-union-field-test.js delete mode 100644 nomdl/codegen/test/struct-with-unions-test.js delete mode 100644 nomdl/codegen/test/struct.noms delete mode 100644 nomdl/codegen/test/struct_optional.noms delete mode 100644 nomdl/codegen/test/struct_primitives.noms delete mode 100644 nomdl/codegen/test/struct_recursive.noms delete mode 100644 nomdl/codegen/test/struct_with_dup_list.noms delete mode 100644 nomdl/codegen/test/struct_with_imports.noms delete mode 100644 nomdl/codegen/test/struct_with_list.noms delete mode 100644 nomdl/codegen/test/struct_with_union_field.noms delete mode 100644 nomdl/codegen/test/struct_with_unions.noms delete mode 100644 nomdl/codegen/testDeps/dep.noms delete mode 100644 nomdl/codegen/testDeps/leafDep/leafDep.noms delete mode 100644 nomdl/codegen/testDeps/leafDep/leafDep.noms.js delete mode 100644 nomdl/pkg/imports_test.go create mode 100644 nomdl/pkg/unresolved_desc.go delete mode 100644 types/fixup_type.go delete mode 100644 types/package.go delete mode 100644 types/package_test.go diff --git a/.travis.yml b/.travis.yml index 6947dcced5..7ff800a8ce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,11 +20,6 @@ before_script: - ./build.py - npm test - popd -- pushd nomdl/codegen/test -- npm prune -- npm install -- npm test -- popd script: - export GODEBUG=invalidptr=0 - export GO15VENDOREXPERIMENT=1 @@ -40,7 +35,6 @@ cache: directories: - js/node_modules - clients/splore/node_modules - - nomdl/codegen/test/node_modules deploy: provider: script script: tools/publish-js-sdk.py diff --git a/clients/csv/exporter/exporter_test.go b/clients/csv/exporter/exporter_test.go index f42b0cf14e..79687e5f7b 100644 --- a/clients/csv/exporter/exporter_test.go +++ b/clients/csv/exporter/exporter_test.go @@ -11,7 +11,6 @@ import ( "github.com/attic-labs/noms/d" "github.com/attic-labs/noms/datas" "github.com/attic-labs/noms/dataset" - "github.com/attic-labs/noms/ref" "github.com/attic-labs/noms/types" "github.com/stretchr/testify/suite" ) @@ -49,11 +48,8 @@ func (s *testSuite) TestCSVExporter() { }) } - typeDef := types.MakeStructType(structName, f, []types.Field{}) - pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{}) - pkgRef := types.RegisterPackage(&pkg) - typeRef := types.MakeType(pkgRef, 0) - structFields := typeDef.Desc.(types.StructDesc).Fields + typ := types.MakeStructType(structName, f, []types.Field{}) + structFields := typ.Desc.(types.StructDesc).Fields // Build data rows structs := make([]types.Value, len(payload)) @@ -62,10 +58,10 @@ func (s *testSuite) TestCSVExporter() { for j, v := range row { fields[structFields[j].Name] = types.NewString(v) } - structs[i] = types.NewStruct(typeRef, typeDef, fields) + structs[i] = types.NewStruct(typ, fields) } - listType := types.MakeListType(typeRef) + listType := types.MakeListType(typ) ds.Commit(types.NewTypedList(listType, structs...)) ds.Store().Close() diff --git a/clients/csv/importer/importer.go b/clients/csv/importer/importer.go index e46259232f..d9f276a9cd 100644 --- a/clients/csv/importer/importer.go +++ b/clients/csv/importer/importer.go @@ -90,7 +90,7 @@ func main() { kinds = csv.StringsToKinds(strings.Split(*columnTypes, ",")) } - value, _, _ := csv.Read(r, *name, headers, kinds, ds.Store()) + value, _ := csv.Read(r, *name, headers, kinds, ds.Store()) _, err = ds.Commit(value) d.Exp.NoError(err) } diff --git a/clients/csv/read.go b/clients/csv/read.go index dfc930950b..001b7e451b 100644 --- a/clients/csv/read.go +++ b/clients/csv/read.go @@ -5,7 +5,6 @@ import ( "io" "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" "github.com/attic-labs/noms/types" ) @@ -66,7 +65,7 @@ func ReportValidFieldTypes(r *csv.Reader, headers []string) []KindSlice { } // MakeStructTypeFromHeaders creates a struct type from the headers using |kinds| as the type of each field. If |kinds| is empty, default to strings. -func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSlice) (typeRef, typeDef *types.Type) { +func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSlice) *types.Type { useStringType := len(kinds) == 0 d.Chk.True(useStringType || len(headers) == len(kinds)) fields := make([]types.Field, len(headers)) @@ -82,24 +81,19 @@ func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSl Optional: false, } } - typeDef = types.MakeStructType(structName, fields, []types.Field{}) - pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{}) - pkgRef := types.RegisterPackage(&pkg) - typeRef = types.MakeType(pkgRef, 0) - - return + return types.MakeStructType(structName, fields, []types.Field{}) } // Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row. // If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields. // In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs. -func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, typeRef, typeDef *types.Type) { - typeRef, typeDef = MakeStructTypeFromHeaders(headers, structName, kinds) +func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) { + t = MakeStructTypeFromHeaders(headers, structName, kinds) valueChan := make(chan types.Value, 128) // TODO: Make this a function param? - listType := types.MakeListType(typeRef) + listType := types.MakeListType(t) listChan := types.NewStreamingTypedList(listType, vrw, valueChan) - structFields := typeDef.Desc.(types.StructDesc).Fields + structFields := t.Desc.(types.StructDesc).Fields for { row, err := r.Read() @@ -117,8 +111,8 @@ func Read(r *csv.Reader, structName string, headers []string, kinds KindSlice, v fields[f.Name] = StringToType(v, f.T.Kind()) } } - valueChan <- types.NewStruct(typeRef, typeDef, fields) + valueChan <- types.NewStruct(t, fields) } - return <-listChan, typeRef, typeDef + return <-listChan, t } diff --git a/clients/csv/read_test.go b/clients/csv/read_test.go index 66f1a33f91..3fd8cb99ea 100644 --- a/clients/csv/read_test.go +++ b/clients/csv/read_test.go @@ -22,13 +22,13 @@ b,2,false headers := []string{"A", "B", "C"} kinds := KindSlice{types.StringKind, types.NumberKind, types.BoolKind} - l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) + l, typ := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(2), l.Len()) - assert.True(typeRef.IsUnresolved()) + assert.Equal(types.StructKind, typ.Kind()) - desc, ok := typeDef.Desc.(types.StructDesc) + desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 3) assert.Equal("A", desc.Fields[0].Name) @@ -53,13 +53,13 @@ func testTrailingHelper(t *testing.T, dataString string) { headers := []string{"A", "B"} kinds := KindSlice{types.StringKind, types.StringKind} - l, typeRef, typeDef := Read(r, "test", headers, kinds, ds) + l, typ := Read(r, "test", headers, kinds, ds) assert.Equal(uint64(3), l.Len()) - assert.True(typeRef.IsUnresolved()) + assert.Equal(types.StructKind, typ.Kind()) - desc, ok := typeDef.Desc.(types.StructDesc) + desc, ok := typ.Desc.(types.StructDesc) assert.True(ok) assert.Len(desc.Fields, 2) assert.Equal("A", desc.Fields[0].Name) diff --git a/clients/csv/write.go b/clients/csv/write.go index 448d4b84ea..4770808d2e 100644 --- a/clients/csv/write.go +++ b/clients/csv/write.go @@ -14,15 +14,7 @@ func ValueToListAndElemDesc(v types.Value, vr types.ValueReader) (types.List, ty d.Exp.Equal(types.ListKind, v.Type().Kind(), "Dataset must be List<>, found: %s", v.Type().Describe()) - u := v.Type().Desc.(types.CompoundDesc).ElemTypes[0] - d.Exp.Equal(types.UnresolvedKind, u.Kind(), - "List<> must be UnresolvedKind, found: %s", u.Describe()) - - pkg := types.ReadPackage(u.PackageRef(), vr) - d.Exp.Equal(types.PackageKind, pkg.Type().Kind(), - "Failed to read package: %s", pkg.Type().Describe()) - - t := pkg.Types()[u.Ordinal()] + t := v.Type().Desc.(types.CompoundDesc).ElemTypes[0] d.Exp.Equal(types.StructKind, t.Kind(), "Did not find Struct: %s", t.Describe()) return v.(types.List), t.Desc.(types.StructDesc) } diff --git a/datas/commit.go b/datas/commit.go index 44c904859c..dfe2ffd558 100644 --- a/datas/commit.go +++ b/datas/commit.go @@ -1,12 +1,8 @@ package datas -import ( - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" -) +import "github.com/attic-labs/noms/types" -var __typeForCommit *types.Type -var __typeDef *types.Type +var commitType *types.Type const ( ParentsField = "parents" @@ -16,16 +12,17 @@ const ( func init() { structName := "Commit" - fieldTypes := []types.Field{ - types.Field{Name: ValueField, T: types.MakePrimitiveType(types.ValueKind)}, - types.Field{Name: ParentsField, T: types.MakeSetType(types.MakeRefType(types.MakeType(ref.Ref{}, 0)))}, - } + // struct Commit { + // value: Value + // parents: Set> + // } - typeDef := types.MakeStructType(structName, fieldTypes, []types.Field{}) - pkg := types.NewPackage([]*types.Type{typeDef}, []ref.Ref{}) - __typeDef = pkg.Types()[0] - pkgRef := types.RegisterPackage(&pkg) - __typeForCommit = types.MakeType(pkgRef, 0) + fieldTypes := []types.Field{ + types.Field{Name: ValueField, T: types.ValueType}, + types.Field{Name: ParentsField, T: nil}, + } + commitType = types.MakeStructType(structName, fieldTypes, []types.Field{}) + commitType.Desc.(types.StructDesc).Fields[1].T = types.MakeSetType(types.MakeRefType(commitType)) } func NewCommit() types.Struct { @@ -34,11 +31,11 @@ func NewCommit() types.Struct { ParentsField: NewSetOfRefOfCommit(), } - return types.NewStruct(__typeForCommit, __typeDef, initialFields) + return types.NewStruct(commitType, initialFields) } func typeForMapOfStringToRefOfCommit() *types.Type { - return types.MakeMapType(types.StringType, types.MakeRefType(__typeForCommit)) + return types.MakeMapType(types.StringType, types.MakeRefType(commitType)) } func NewMapOfStringToRefOfCommit() types.Map { @@ -46,7 +43,7 @@ func NewMapOfStringToRefOfCommit() types.Map { } func typeForSetOfRefOfCommit() *types.Type { - return types.MakeSetType(types.MakeRefType(__typeForCommit)) + return types.MakeSetType(types.MakeRefType(commitType)) } func NewSetOfRefOfCommit() types.Set { diff --git a/datas/datastore_test.go b/datas/datastore_test.go index 35f9b903f4..e6e13a0aef 100644 --- a/datas/datastore_test.go +++ b/datas/datastore_test.go @@ -10,7 +10,7 @@ import ( ) // writesOnCommit allows tests to adjust for how many writes dataStoreCommon performs on Commit() -const writesOnCommit = 3 +const writesOnCommit = 2 func TestLocalDataStore(t *testing.T) { suite.Run(t, &LocalDataStoreSuite{}) diff --git a/dataset/dataset_test.go b/dataset/dataset_test.go index 809aa8527a..040b54c4a0 100644 --- a/dataset/dataset_test.go +++ b/dataset/dataset_test.go @@ -30,7 +30,7 @@ func TestDatasetCommitTracker(t *testing.T) { assert.False(ds2.Head().Get(datas.ValueField).Equals(ds1Commit)) assert.False(ds1.Head().Get(datas.ValueField).Equals(ds2Commit)) - assert.Equal("sha1-59bf8cf4ce01e5630fe93de07464ad2a02c232ab", cs.Root().String()) + assert.Equal("sha1-7c7a614a758ea33792755c5e91d745fc2503b602", cs.Root().String()) } func newDS(id string, cs *chunks.MemoryStore) Dataset { diff --git a/js/package.json b/js/package.json index e3c0c27b5a..530cbbd13f 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "@attic/noms", - "version": "13.0.0", + "version": "14.0.0", "description": "Noms JS SDK", "repository": "https://github.com/attic-labs/noms", "main": "dist/commonjs/noms.js", diff --git a/js/src/compare.js b/js/src/compare.js index 45d52df6b4..fc13f8db68 100644 --- a/js/src/compare.js +++ b/js/src/compare.js @@ -94,8 +94,6 @@ export function getCompareFunction(t: Type): (v1: any, v2: any) => number { case Kind.Set: case Kind.Struct: case Kind.Type: - case Kind.Unresolved: - case Kind.Package: return compareObjects; case Kind.Bool: diff --git a/js/src/data-store.js b/js/src/data-store.js index 01ef982cae..58fc201d1c 100644 --- a/js/src/data-store.js +++ b/js/src/data-store.js @@ -1,7 +1,7 @@ // @flow import Chunk from './chunk.js'; -import {default as Ref, emptyRef} from './ref.js'; +import Ref from './ref.js'; import RefValue from './ref-value.js'; import {newStruct} from './struct.js'; import type {ChunkStore} from './chunk-store.js'; @@ -10,27 +10,24 @@ import type {NomsSet} from './set.js'; import type {valueOrPrimitive} from './value.js'; import { Field, - makeCompoundType, makeRefType, makeStructType, - makeType, + makeSetType, + makeMapType, Type, stringType, boolType, valueType, + StructDesc, } from './type.js'; -import {Kind} from './noms-kind.js'; import {newMap} from './map.js'; import {newSet} from './set.js'; -import {Package, registerPackage} from './package.js'; import {decodeNomsValue} from './decode.js'; import {invariant} from './assert.js'; import {encodeNomsValue} from './encode.js'; import type {Commit} from './commit.js'; type DatasTypes = { - commitTypeDef: Type, - datasPackage: Package, commitType: Type, commitSetType: Type, refOfCommitType: Type, @@ -48,23 +45,19 @@ function getEmptyCommitMap(): Promise>> { let datasTypes: DatasTypes; export function getDatasTypes(): DatasTypes { if (!datasTypes) { - const datasPackage = new Package([ - makeStructType('Commit', [ - new Field('value', valueType, false), - new Field('parents', makeCompoundType(Kind.Set, - makeCompoundType(Kind.Ref, makeType(emptyRef, 0))), false), - ], []), + // struct Commit { + // value: Value + // parents: Set> + // } + const commitType = makeStructType('Commit', [ + new Field('value', valueType, false), ], []); - registerPackage(datasPackage); - const [commitTypeDef] = datasPackage.types; - - const commitType = makeType(datasPackage.ref, 0); - const refOfCommitType = makeCompoundType(Kind.Ref, commitType); - const commitSetType = makeCompoundType(Kind.Set, refOfCommitType); - const commitMapType = makeCompoundType(Kind.Map, stringType, refOfCommitType); + const refOfCommitType = makeRefType(commitType); + const commitSetType = makeSetType(refOfCommitType); + invariant(commitType.desc instanceof StructDesc); + commitType.desc.fields.push(new Field('parents', commitSetType, false)); + const commitMapType = makeMapType(stringType, refOfCommitType); datasTypes = { - commitTypeDef, - datasPackage, commitType, refOfCommitType, commitSetType, @@ -212,7 +205,7 @@ export function newCommit(value: valueOrPrimitive, parents: Array = []): Pr const types = getDatasTypes(); const parentRefs = parents.map(r => new RefValue(r, types.refOfCommitType)); return newSet(parentRefs, types.commitSetType).then(parents => - newStruct(types.commitType, types.commitTypeDef, {value, parents})); + newStruct(types.commitType, {value, parents})); } class CacheEntry { diff --git a/js/src/decode-test.js b/js/src/decode-test.js index 39d65ce953..0d58f40d41 100644 --- a/js/src/decode-test.js +++ b/js/src/decode-test.js @@ -13,14 +13,17 @@ import {decodeNomsValue, JsonArrayReader} from './decode.js'; import { boolType, Field, - makeCompoundType, makeStructType, - makeType, + makeListType, + makeMapType, + makeSetType, + makeRefType, numberType, stringType, Type, typeType, valueType, + StructDesc, } from './type.js'; import {encode as encodeBase64} from './base64.js'; import {IndexedMetaSequence, MetaTuple, OrderedMetaSequence} from './meta-sequence.js'; @@ -30,7 +33,6 @@ import {ListLeafSequence, NomsList} from './list.js'; import {MapLeafSequence, NomsMap} from './map.js'; import {NomsBlob, newBlob} from './blob.js'; import {NomsSet, SetLeafSequence} from './set.js'; -import {registerPackage, Package} from './package.js'; import {suite, test} from 'mocha'; suite('Decode', () => { @@ -63,19 +65,17 @@ suite('Decode', () => { const ds = new DataStore(ms); function doTest(expected: Type, a: Array) { const r = new JsonArrayReader(a, ds); - const tr = r.readTypeAsTag(); + const tr = r.readTypeAsTag([]); assert.isTrue(expected.equals(tr)); } doTest(boolType, [Kind.Bool, true]); doTest(typeType, [Kind.Type, Kind.Bool]); - doTest(makeCompoundType(Kind.List, boolType), + doTest(makeListType(boolType), [Kind.List, Kind.Bool, true, false]); - const pkgRef = Ref.parse('sha1-a9993e364706816aba3e25717850c26c9cd0d89d'); - doTest(makeType(pkgRef, 42), [Kind.Unresolved, pkgRef.toString(), '42']); - - doTest(typeType, [Kind.Type, Kind.Type, pkgRef.toString()]); + doTest(makeStructType('S', [new Field('x', boolType, false)], []), + [Kind.Struct, 'S', ['x', Kind.Bool, false], []]); }); test('read primitives', async () => { @@ -104,10 +104,10 @@ suite('Decode', () => { const ds = new DataStore(ms); const a = [Kind.List, Kind.Number, false, ['0', '1', '2', '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsList = await r.readTopLevelValue(); + const v: NomsList = await r.readTopLevelValue(); invariant(v instanceof NomsList); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2, 3])); assert.isTrue(l.equals(v)); }); @@ -119,10 +119,10 @@ suite('Decode', () => { const a = [Kind.List, Kind.Value, false, [Kind.Number, '1', Kind.String, 'hi', Kind.Bool, true]]; const r = new JsonArrayReader(a, ds); - const v:NomsList = await r.readTopLevelValue(); + const v: NomsList = await r.readTopLevelValue(); invariant(v instanceof NomsList); - const tr = makeCompoundType(Kind.List, valueType); + const tr = makeListType(valueType); assert.isTrue(v.type.equals(tr)); assert.strictEqual(1, await v.get(0)); assert.strictEqual('hi', await v.get(1)); @@ -137,7 +137,7 @@ suite('Decode', () => { const v = await r.readTopLevelValue(); invariant(v instanceof NomsList); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l = new NomsList(tr, new ListLeafSequence(ds, tr, [0, 1, 2])); assert.isTrue(l.equals(v)); }); @@ -145,7 +145,7 @@ suite('Decode', () => { test('read compound list', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const ltr = makeCompoundType(Kind.List, numberType); + const ltr = makeListType(numberType); const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0]))).targetRef; const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [1, 2]))).targetRef; const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [3, 4, 5]))).targetRef; @@ -169,11 +169,10 @@ suite('Decode', () => { const ds = new DataStore(ms); const a = [Kind.Map, Kind.Number, Kind.Number, false, ['0', '1', '2', '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsMap = await r.readTopLevelValue(); + const v: NomsMap = await r.readTopLevelValue(); invariant(v instanceof NomsMap); - const t = makeCompoundType(Kind.Map, numberType, - numberType); + const t = makeMapType(numberType, numberType); const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}])); assert.isTrue(v.equals(m)); }); @@ -185,11 +184,11 @@ suite('Decode', () => { ['sha1-0000000000000000000000000000000000000001', '2', 'sha1-0000000000000000000000000000000000000002', '4']]; const r = new JsonArrayReader(a, ds); - const v:NomsMap, number> = await r.readTopLevelValue(); + const v: NomsMap, number> = await r.readTopLevelValue(); invariant(v instanceof NomsMap); - const refOfValueType = makeCompoundType(Kind.Ref, valueType); - const mapType = makeCompoundType(Kind.Map, refOfValueType, numberType); + const refOfValueType = makeRefType(valueType); + const mapType = makeMapType(refOfValueType, numberType); const rv1 = new RefValue(new Ref('sha1-0000000000000000000000000000000000000001'), refOfValueType); const rv2 = new RefValue(new Ref('sha1-0000000000000000000000000000000000000002'), @@ -204,11 +203,10 @@ suite('Decode', () => { const ds = new DataStore(ms); const a = [Kind.Value, Kind.Map, Kind.Number, Kind.Number, false, ['0', '1', '2', '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsMap = await r.readTopLevelValue(); + const v: NomsMap = await r.readTopLevelValue(); invariant(v instanceof NomsMap); - const t = makeCompoundType(Kind.Map, numberType, - numberType); + const t = makeMapType(numberType, numberType); const m = new NomsMap(t, new MapLeafSequence(ds, t, [{key: 0, value: 1}, {key: 2, value: 3}])); assert.isTrue(v.equals(m)); }); @@ -218,10 +216,10 @@ suite('Decode', () => { const ds = new DataStore(ms); const a = [Kind.Set, Kind.Number, false, ['0', '1', '2', '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsSet = await r.readTopLevelValue(); + const v: NomsSet = await r.readTopLevelValue(); invariant(v instanceof NomsSet); - const t = makeCompoundType(Kind.Set, numberType); + const t = makeSetType(numberType); const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3])); assert.isTrue(v.equals(s)); }); @@ -229,7 +227,7 @@ suite('Decode', () => { test('read compound set', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const ltr = makeCompoundType(Kind.Set, numberType); + const ltr = makeSetType(numberType); const r1 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [0]))).targetRef; const r2 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [1, 2]))).targetRef; const r3 = ds.writeValue(new NomsSet(ltr, new SetLeafSequence(ds, ltr, [3, 4, 5]))).targetRef; @@ -253,10 +251,10 @@ suite('Decode', () => { const ds = new DataStore(ms); const a = [Kind.Value, Kind.Set, Kind.Number, false, ['0', '1', '2', '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsSet = await r.readTopLevelValue(); + const v: NomsSet = await r.readTopLevelValue(); invariant(v instanceof NomsSet); - const t = makeCompoundType(Kind.Set, numberType); + const t = makeSetType(numberType); const s = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3])); assert.isTrue(v.equals(s)); }); @@ -281,10 +279,11 @@ suite('Decode', () => { new Field('b', boolType, false), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true]; + const a = [Kind.Struct, 'A1', [ + 'x', Kind.Number, false, + 's', Kind.String, false, + 'b', Kind.Bool, false, + ], [], '42', 'hi', true]; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -305,10 +304,8 @@ suite('Decode', () => { new Field('s', stringType, false), ]); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', '1', 'hi']; + const a = [Kind.Struct, 'A2', ['x', Kind.Number, false], + ['b', Kind.Bool, false, 's', Kind.String, false], '42', '1', 'hi']; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -327,10 +324,9 @@ suite('Decode', () => { new Field('b', boolType, true), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Unresolved, pkg.ref.toString(), '0', '42', false, true, false]; + const a = [Kind.Struct, 'A3', + ['x', Kind.Number, false, 's', Kind.String, true, 'b', Kind.Bool, true], [], + '42', false, true, false]; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -343,17 +339,18 @@ suite('Decode', () => { test('test read struct with list', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const ltr = makeCompoundType(Kind.List, numberType); + const ltr = makeListType(numberType); const tr = makeStructType('A4', [ new Field('b', boolType, false), new Field('l', ltr, false), new Field('s', stringType, false), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, false, ['0', '1', '2'], 'hi']; + const a = [Kind.Struct, 'A4', [ + 'b', Kind.Bool, false, + 'l', Kind.List, Kind.Number, false, + 's', Kind.String, false, + ], [], true, false, ['0', '1', '2'], 'hi']; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -373,10 +370,9 @@ suite('Decode', () => { new Field('s', stringType, false), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Unresolved, pkg.ref.toString(), '0', true, Kind.Number, '42', 'hi']; + const a = [Kind.Struct, 'A5', + ['b', Kind.Bool, false, 'v', Kind.Value, false, 's', Kind.String, false], [], + true, Kind.Number, '42', 'hi']; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -396,10 +392,9 @@ suite('Decode', () => { new Field('b', boolType, false), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Value, Kind.Unresolved, pkg.ref.toString(), '0', '42', 'hi', true]; + const a = [Kind.Value, Kind.Struct, 'A1', + ['x', Kind.Number, false, 's', Kind.String, false, 'b', Kind.Bool, false], [], + '42', 'hi', true]; const r = new JsonArrayReader(a, ds); const v = await r.readTopLevelValue(); @@ -418,14 +413,12 @@ suite('Decode', () => { new Field('i', numberType, false), ], []); - const pkg = new Package([tr], []); - registerPackage(pkg); - - const a = [Kind.Value, Kind.Map, Kind.String, Kind.Unresolved, pkg.ref.toString(), '0', false, - ['bar', false, '2', 'baz', false, '1', 'foo', true, '3']]; + const a = [Kind.Value, Kind.Map, Kind.String, + Kind.Struct, 's', ['b', Kind.Bool, false, 'i', Kind.Number, false], [], + false, ['bar', false, '2', 'baz', false, '1', 'foo', true, '3']]; const r = new JsonArrayReader(a, ds); - const v:NomsMap = await r.readTopLevelValue(); + const v: NomsMap = await r.readTopLevelValue(); invariant(v instanceof NomsMap); assert.strictEqual(3, v.size); @@ -439,9 +432,9 @@ suite('Decode', () => { const ds = new DataStore(ms); const chunk = Chunk.fromString( `t [${Kind.Value}, ${Kind.Set}, ${Kind.Number}, false, ["0", "1", "2", "3"]]`); - const v:NomsSet = await decodeNomsValue(chunk, new DataStore(new MemoryStore())); + const v: NomsSet = await decodeNomsValue(chunk, new DataStore(new MemoryStore())); - const t = makeCompoundType(Kind.Set, numberType); + const t = makeSetType(numberType); const s:NomsSet = new NomsSet(t, new SetLeafSequence(ds, t, [0, 1, 2, 3])); assert.isTrue(v.equals(s)); }); @@ -452,28 +445,23 @@ suite('Decode', () => { const makeChunk = a => Chunk.fromString(`t ${JSON.stringify(a)}`); - // Package containing Commit def - const packageArray = [Kind.Package, [Kind.Struct, 'Commit', [ - 'value', Kind.Value, false, - 'parents', Kind.Set, [ - Kind.Ref, [ - Kind.Unresolved, 'sha1-0000000000000000000000000000000000000000','0', - ], - ], false, - ], []],[]]; - const pkgChunk = makeChunk(packageArray); - const pkgRef = pkgChunk.ref; - ms.put(pkgChunk); + // struct Commit { + // value: Value + // parents: Set> + // } // Commit value const commitChunk = makeChunk( - [Kind.Unresolved, pkgRef.toString(), '0', Kind.Number, '1', false, []]); + [Kind.Struct, 'Commit', + ['value', Kind.Value, false, 'parents', Kind.Set, Kind.Ref, Kind.BackRef, 0, false], [], + Kind.Number, '1', false, []]); const commitRef = commitChunk.ref; ms.put(commitChunk); // Root - const rootChunk = makeChunk([Kind.Map, Kind.String, Kind.Ref, Kind.Unresolved, - pkgRef.toString(), '0', false, ['counter', commitRef.toString()]]); + const rootChunk = makeChunk([Kind.Map, Kind.String, Kind.Ref, Kind.Struct, 'Commit', + ['value', Kind.Value, false, 'parents', Kind.Set, Kind.Ref, Kind.BackRef, 0, false], [], + false, ['counter', commitRef.toString()]]); const rootRef = rootChunk.ref; ms.put(rootChunk); @@ -542,10 +530,41 @@ suite('Decode', () => { const reader = v.getReader(); assert.deepEqual(await reader.read(), {done: false, value: stringToUint8Array('hi')}); - // console.log(stringToUint8Array('world')); const x = await reader.read(); - // console.log(x); assert.deepEqual(x, {done: false, value: stringToUint8Array('world')}); assert.deepEqual(await reader.read(), {done: true}); }); + + test('recursive struct', async () => { + const ms = new MemoryStore(); + const ds = new DataStore(ms); + + // struct A { + // b: struct B { + // a: List + // b: List + // } + // } + + const ta = makeStructType('A', [], []); + const tb = makeStructType('B', [], []); + invariant(ta.desc instanceof StructDesc); + ta.desc.fields.push(new Field('b', tb, false)); + + invariant(tb.desc instanceof StructDesc); + const {fields} = tb.desc; + fields.push(new Field('a', makeListType(ta), false), new Field('b', makeListType(tb), false)); + + const a = [Kind.Struct, 'A', + ['b', Kind.Struct, 'B', [ + 'a', Kind.List, Kind.BackRef, 1, false, + 'b', Kind.List, Kind.BackRef, 0, false, + ], [], false], [], + false, [], false, []]; + const r = new JsonArrayReader(a, ds); + const v = await r.readTopLevelValue(); + + assert.isTrue(v.type.equals(ta)); + assert.isTrue(v.b.type.equals(tb)); + }); }); diff --git a/js/src/decode.js b/js/src/decode.js index 27e22da981..6103cdd9af 100644 --- a/js/src/decode.js +++ b/js/src/decode.js @@ -14,18 +14,15 @@ import { getPrimitiveType, makeCompoundType, makeStructType, - makeType, - makeUnresolvedType, StructDesc, Type, typeType, numberType, } from './type.js'; import {indexTypeForMetaSequence, MetaTuple, newMetaSequenceFromData} from './meta-sequence.js'; -import {invariant, notNull} from './assert.js'; +import {invariant} from './assert.js'; import {isPrimitiveKind, Kind} from './noms-kind.js'; import {ListLeafSequence, NomsList} from './list.js'; -import {lookupPackage, Package, readPackage} from './package.js'; import {NomsMap, MapLeafSequence} from './map.js'; import {NomsSet, SetLeafSequence} from './set.js'; import {IndexedMetaSequence} from './meta-sequence.js'; @@ -33,14 +30,6 @@ import {IndexedMetaSequence} from './meta-sequence.js'; const typedTag = 't '; const blobTag = 'b '; -class UnresolvedPackage { - pkgRef: Ref; - - constructor(pkgRef: Ref) { - this.pkgRef = pkgRef; - } -} - export class JsonArrayReader { _a: Array; _i: number; @@ -84,6 +73,12 @@ export class JsonArrayReader { return v; } + readUint8(): number { + const v = this.read(); + invariant((v & 0xff) === v); + return v; + } + readFloat(): number { const next = this.read(); invariant(typeof next === 'string'); @@ -111,26 +106,27 @@ export class JsonArrayReader { return Ref.parse(next); } - readTypeAsTag(): Type { + readTypeAsTag(backRefs: Type[]): Type { const kind = this.readKind(); switch (kind) { case Kind.List: case Kind.Set: case Kind.Ref: { - const elemType = this.readTypeAsTag(); + const elemType = this.readTypeAsTag(backRefs); return makeCompoundType(kind, elemType); } case Kind.Map: { - const keyType = this.readTypeAsTag(); - const valueType = this.readTypeAsTag(); + const keyType = this.readTypeAsTag(backRefs); + const valueType = this.readTypeAsTag(backRefs); return makeCompoundType(kind, keyType, valueType); } case Kind.Type: return typeType; - case Kind.Unresolved: { - const pkgRef = this.readRef(); - const ordinal = this.readOrdinal(); - return makeType(pkgRef, ordinal); + case Kind.Struct: + return this.readStructType(backRefs); + case Kind.BackRef: { + const i = this.readUint8(); + return backRefs[backRefs.length - 1 - i]; } } @@ -147,100 +143,65 @@ export class JsonArrayReader { return new BlobLeafSequence(this._ds, bytes); } - readSequence(t: Type, pkg: ?Package): Array { + readSequence(t: Type): Array { const elemType = t.elemTypes[0]; const list = []; while (!this.atEnd()) { - const v = this.readValueWithoutTag(elemType, pkg); + const v = this.readValueWithoutTag(elemType); list.push(v); } return list; } - readListLeafSequence(t: Type, pkg: ?Package): ListLeafSequence { - const seq = this.readSequence(t, pkg); + readListLeafSequence(t: Type): ListLeafSequence { + const seq = this.readSequence(t); return new ListLeafSequence(this._ds, t, seq); } - readSetLeafSequence(t: Type, pkg: ?Package): SetLeafSequence { - const seq = this.readSequence(t, pkg); + readSetLeafSequence(t: Type): SetLeafSequence { + const seq = this.readSequence(t); return new SetLeafSequence(this._ds, t, seq); } - readMapLeafSequence(t: Type, pkg: ?Package): MapLeafSequence { + readMapLeafSequence(t: Type): MapLeafSequence { const keyType = t.elemTypes[0]; const valueType = t.elemTypes[1]; const entries = []; while (!this.atEnd()) { - const k = this.readValueWithoutTag(keyType, pkg); - const v = this.readValueWithoutTag(valueType, pkg); + const k = this.readValueWithoutTag(keyType); + const v = this.readValueWithoutTag(valueType); entries.push({key: k, value: v}); } return new MapLeafSequence(this._ds, t, entries); } - readMetaSequence(t: Type, pkg: ?Package): any { + readMetaSequence(t: Type): any { const data: Array = []; const indexType = indexTypeForMetaSequence(t); while (!this.atEnd()) { const ref = this.readRef(); - const v = this.readValueWithoutTag(indexType, pkg); - const numLeaves = this.readValueWithoutTag(numberType, pkg); + const v = this.readValueWithoutTag(indexType); + const numLeaves = this.readValueWithoutTag(numberType); data.push(new MetaTuple(ref, v, numLeaves)); } return newMetaSequenceFromData(this._ds, t, data); } - readPackage(t: Type, pkg: ?Package): Package { - const r2 = new JsonArrayReader(this.readArray(), this._ds); - const types = []; - while (!r2.atEnd()) { - types.push(r2.readTypeAsValue(pkg)); - } - - const r3 = new JsonArrayReader(this.readArray(), this._ds); - const deps = []; - while (!r3.atEnd()) { - deps.push(r3.readRef()); - } - - return new Package(types, deps); - } - readRefValue(t: Type): RefValue { const ref = this.readRef(); return new RefValue(ref, t); } readTopLevelValue(): Promise { - return new Promise((resolve, reject) => { - const t = this.readTypeAsTag(); - const doRead = () => { - const i = this._i; - - try { - const v = this.readValueWithoutTag(t); - resolve(v); - } catch (ex) { - if (ex instanceof UnresolvedPackage) { - readPackage(ex.pkgRef, this._ds).then(() => { - this._i = i; - doRead(); - }); - } else { - reject(ex); - } - } - }; - - doRead(); - }); + const t = this.readTypeAsTag([]); + const v = this.readValueWithoutTag(t); + return Promise.resolve(v); } - readValueWithoutTag(t: Type, pkg: ?Package = null): any { + readValueWithoutTag(t: Type): any { // TODO: Verify read values match tagged kinds. switch (t.kind) { case Kind.Blob: { @@ -248,7 +209,7 @@ export class JsonArrayReader { let sequence; if (isMeta) { const r2 = new JsonArrayReader(this.readArray(), this._ds); - sequence = r2.readMetaSequence(t, pkg); + sequence = r2.readMetaSequence(t); invariant(sequence instanceof IndexedMetaSequence); } else { sequence = this.readBlobLeafSequence(); @@ -262,67 +223,45 @@ export class JsonArrayReader { case Kind.String: return this.readString(); case Kind.Value: { - const t2 = this.readTypeAsTag(); - return this.readValueWithoutTag(t2, pkg); + const t2 = this.readTypeAsTag([]); + return this.readValueWithoutTag(t2); } case Kind.List: { const isMeta = this.readBool(); const r2 = new JsonArrayReader(this.readArray(), this._ds); const sequence = isMeta ? - r2.readMetaSequence(t, pkg) : - r2.readListLeafSequence(t, pkg); + r2.readMetaSequence(t) : + r2.readListLeafSequence(t); return new NomsList(t, sequence); } case Kind.Map: { const isMeta = this.readBool(); const r2 = new JsonArrayReader(this.readArray(), this._ds); const sequence = isMeta ? - r2.readMetaSequence(t, pkg) : - r2.readMapLeafSequence(t, pkg); + r2.readMetaSequence(t) : + r2.readMapLeafSequence(t); return new NomsMap(t, sequence); } - case Kind.Package: - return this.readPackage(t, pkg); case Kind.Ref: return this.readRefValue(t); case Kind.Set: { const isMeta = this.readBool(); const r2 = new JsonArrayReader(this.readArray(), this._ds); const sequence = isMeta ? - r2.readMetaSequence(t, pkg) : - r2.readSetLeafSequence(t, pkg); + r2.readMetaSequence(t) : + r2.readSetLeafSequence(t); return new NomsSet(t, sequence); } case Kind.Struct: - throw new Error('Not allowed'); + return this.readStruct(t); case Kind.Type: - return this.readTypeAsValue(pkg); - case Kind.Unresolved: - return this.readUnresolvedKindToValue(t, pkg); + return this.readTypeAsValue([]); } throw new Error('Unreached'); } - readUnresolvedKindToValue(t: Type, pkg: ?Package = null): any { - const pkgRef = t.packageRef; - const ordinal = t.ordinal; - if (!pkgRef.isEmpty()) { - pkg = lookupPackage(pkgRef); - if (!pkg) { - throw new UnresolvedPackage(pkgRef); - } - invariant(pkg); - } - - pkg = notNull(pkg); - const typeDef = pkg.types[ordinal]; - - invariant(typeDef.kind === Kind.Struct); - return this.readStruct(typeDef, t, pkg); - } - - readTypeAsValue(pkg: ?Package): Type { + readTypeAsValue(backRefs: Type[]): Type { const k = this.readKind(); switch (k) { @@ -333,48 +272,24 @@ export class JsonArrayReader { const r2 = new JsonArrayReader(this.readArray(), this._ds); const elemTypes: Array = []; while (!r2.atEnd()) { - elemTypes.push(r2.readTypeAsValue()); + elemTypes.push(r2.readTypeAsValue(backRefs)); } return makeCompoundType(k, ...elemTypes); } - case Kind.Struct: { - const name = this.readString(); - const readFields = () => { - const fields: Array = []; - const fieldReader = new JsonArrayReader(this.readArray(), this._ds); - while (!fieldReader.atEnd()) { - const fieldName = fieldReader.readString(); - const fieldType = fieldReader.readTypeAsValue(pkg); - const optional = fieldReader.readBool(); - fields.push(new Field(fieldName, fieldType, optional)); - } - return fields; - }; + case Kind.Struct: + return this.readStructType(backRefs); - const fields = readFields(); - const choices = readFields(); - return makeStructType(name, fields, choices); - } - case Kind.Unresolved: { - const pkgRef = this.readRef(); - const ordinal = this.readOrdinal(); - if (ordinal === -1) { - const namespace = this.readString(); - const name = this.readString(); - return makeUnresolvedType(namespace, name); - } - - return makeType(pkgRef, ordinal); - } + case Kind.BackRef: + throw new Error('not reachable'); } invariant(isPrimitiveKind(k)); return getPrimitiveType(k); } - readStruct(typeDef: Type, type: Type, pkg: Package): T { - const desc = typeDef.desc; + readStruct(type: Type): T { + const desc = type.desc; invariant(desc instanceof StructDesc); const data: {[key: string]: any} = Object.create(null); @@ -384,11 +299,11 @@ export class JsonArrayReader { if (field.optional) { const b = this.readBool(); if (b) { - const v = this.readValueWithoutTag(field.t, pkg); + const v = this.readValueWithoutTag(field.t); data[field.name] = v; } } else { - const v = this.readValueWithoutTag(field.t, pkg); + const v = this.readValueWithoutTag(field.t); data[field.name] = v; } } @@ -397,11 +312,39 @@ export class JsonArrayReader { if (desc.union.length > 0) { unionIndex = this.readUint(); const unionField = desc.union[unionIndex]; - const v = this.readValueWithoutTag(unionField.t, pkg); + const v = this.readValueWithoutTag(unionField.t); data[unionField.name] = v; } - return newStruct(type, typeDef, data); + return newStruct(type, data); + } + + readStructType(backRefs: Type[]): Type { + const name = this.readString(); + const fields = []; + const choices = []; + const structType = makeStructType(name, fields, choices); + backRefs = backRefs.concat(structType); // needs to be a copy. + const readFields = () => { + const fields: Array = []; + const fieldReader = new JsonArrayReader(this.readArray(), this._ds); + while (!fieldReader.atEnd()) { + const fieldName = fieldReader.readString(); + const fieldType = fieldReader.readTypeAsTag(backRefs); + const optional = fieldReader.readBool(); + fields.push(new Field(fieldName, fieldType, optional)); + } + return fields; + }; + + const newFields = readFields(); + const newChoices = readFields(); + + // Update the existing structType to keep object identity. + invariant(structType.desc instanceof StructDesc); + structType.desc.fields = newFields; + structType.desc.union = newChoices; + return structType; } } diff --git a/js/src/defs-test.js b/js/src/defs-test.js index 26ab78b9b5..c9afc0da7f 100644 --- a/js/src/defs-test.js +++ b/js/src/defs-test.js @@ -2,7 +2,6 @@ import {suite, test} from 'mocha'; import {assert} from 'chai'; -import {Package, registerPackage} from './package.js'; import { boolType, Field, @@ -10,16 +9,16 @@ import { makeMapType, makeSetType, makeStructType, - makeType, - numberType, stringType, + numberType, + valueType, + StructDesc, } from './type.js'; import {defToNoms} from './defs.js'; import {newList} from './list.js'; import {newStruct} from './struct.js'; import {newSet} from './set.js'; import {newMap} from './map.js'; -import {emptyRef} from './ref.js'; import {ValueBase} from './value.js'; import {invariant} from './assert.js'; @@ -92,17 +91,12 @@ suite('defs', () => { }); test('struct', async () => { - let typeDef; - const pkg = new Package([ - typeDef = makeStructType('Struct', [ - new Field('b', boolType, false), - new Field('s', stringType, false), - ], []), + const type = makeStructType('Struct', [ + new Field('b', boolType, false), + new Field('s', stringType, false), ], []); - registerPackage(pkg); - const type = makeType(pkg.ref, 0); - const s1 = newStruct(type, typeDef, { + const s1 = newStruct(type, { b: true, s: 'hi', }); @@ -115,17 +109,12 @@ suite('defs', () => { }); test('struct with list', async () => { - let typeDef; const listOfNumberType = makeListType(numberType); - const pkg = new Package([ - typeDef = makeStructType('StructWithList', [ - new Field('l', listOfNumberType, false), - ], []), + const type = makeStructType('StructWithList', [ + new Field('l', listOfNumberType, false), ], []); - registerPackage(pkg); - const type = makeType(pkg.ref, 0); - const s1 = newStruct(type, typeDef, { + const s1 = newStruct(type, { l: await newList([0, 1, 2, 3], listOfNumberType), }); @@ -138,55 +127,44 @@ suite('defs', () => { }); test('list of struct', async () => { - let typeDef; - const pkg = new Package([ - typeDef = makeStructType('Struct', [ - new Field('i', numberType, false), - ], []), + const structType = makeStructType('Struct', [ + new Field('i', numberType, false), ], []); - registerPackage(pkg); - const structType = makeType(pkg.ref, 0); const listType = makeListType(structType); const l1 = await newList([ - newStruct(structType, typeDef, {i: 1}), - newStruct(structType, typeDef, {i: 2}), + newStruct(structType, {i: 1}), + newStruct(structType, {i: 2}), ], listType); const l2 = await defToNoms([{i: 1}, {i: 2}], listType); invariant(l2 instanceof ValueBase); assert.isTrue(l1.equals(l2)); - const l3 = await defToNoms([newStruct(structType, typeDef, {i: 1}), {i: 2}], listType); + const l3 = await defToNoms([newStruct(structType, {i: 1}), {i: 2}], listType); invariant(l3 instanceof ValueBase); assert.isTrue(l1.equals(l3)); }); test('recursive struct', async () => { - const pkg = new Package([ - makeStructType('Struct', [ - new Field('children', makeListType(makeType(emptyRef, 0)), false), - ], []), + const type = makeStructType('Struct', [ + new Field('children', valueType /* placeholder */, false), ], []); - registerPackage(pkg); - const type = makeType(pkg.ref, 0); - const typeDef = makeStructType('Struct', [ - new Field('children', makeListType(makeType(pkg.ref, 0)), false), - ], []); - const listType = makeListType(type); + invariant(type.desc instanceof StructDesc); + type.desc.fields[0].t = listType; const a = await newList([], listType); const b = await newList([], listType); - const x = newStruct(type, typeDef, { + const x = newStruct(type, { children: a, }); - const y = newStruct(type, typeDef, { + const y = newStruct(type, { children: b, }); const c = await newList([x, y], listType); - const t1 = newStruct(type, typeDef, { + const t1 = newStruct(type, { children: c, }); diff --git a/js/src/defs.js b/js/src/defs.js index 7a76072c30..f3ed0fa54a 100644 --- a/js/src/defs.js +++ b/js/src/defs.js @@ -2,23 +2,21 @@ import type {valueOrPrimitive} from './value.js'; import {ValueBase} from './value.js'; -import {Type, CompoundDesc, StructDesc, makeType} from './type.js'; +import {Type, CompoundDesc, StructDesc} from './type.js'; import type {Field} from './type.js'; -import {invariant, notNull} from './assert.js'; +import {invariant} from './assert.js'; import {Kind} from './noms-kind.js'; import {newList} from './list.js'; import {newSet} from './set.js'; import {newMap} from './map.js'; import {newBlob} from './blob.js'; -import {lookupPackage} from './package.js'; -import type {Package} from './package.js'; import type Struct from './struct.js'; import {newStruct} from './struct.js'; type StructDefType = {[name: string]: DefType}; type DefType = number | string | boolean | Array | StructDefType | Uint8Array | ValueBase; -export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise { +export async function defToNoms(v: DefType, t: Type): Promise { switch (typeof v) { case 'number': case 'boolean': @@ -41,7 +39,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise defToNoms(e, vt, pkg))); + const vs = await Promise.all(v.map(e => defToNoms(e, vt))); return newList(vs, t); } @@ -49,7 +47,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise defToNoms(e, vt, pkg))); + const vs = await Promise.all(v.map(e => defToNoms(e, vt))); return newSet(vs, t); } @@ -57,7 +55,7 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise defToNoms(e, ets[i % 2], pkg))); + const vs = await Promise.all(v.map((e, i) => defToNoms(e, ets[i % 2]))); return newMap(vs, t); } @@ -65,16 +63,9 @@ export async function defToNoms(v: DefType, t: Type, pkg: ?Package): Promise(data: StructDefType, type: Type, typeDef: Type, - pkg: ?Package): Promise { - const {desc} = typeDef; +async function structDefToNoms(data: StructDefType, type: Type): Promise { + const {desc} = type; invariant(desc instanceof StructDesc); const keys = []; const ps: Array> = []; @@ -92,7 +82,7 @@ async function structDefToNoms(data: StructDefType, type: Type, typeD const v = data[f.name]; if (v !== undefined) { keys.push(f.name); - ps.push(defToNoms(v, f.t, pkg)); + ps.push(defToNoms(v, f.t)); } }; desc.fields.forEach(add); @@ -103,5 +93,5 @@ async function structDefToNoms(data: StructDefType, type: Type, typeD for (let i = 0; i < keys.length; i++) { newData[keys[i]] = vals[i]; } - return newStruct(type, typeDef, newData); + return newStruct(type, newData); } diff --git a/js/src/describe-type.js b/js/src/describe-type.js deleted file mode 100644 index bebb96ee72..0000000000 --- a/js/src/describe-type.js +++ /dev/null @@ -1,16 +0,0 @@ -// @flow - -import {ValueBase} from './value.js'; - -export default function describeType(v: any): string { - const t = typeof v; - if (t === 'object') { - if (v === null) { - return 'null'; - } - if (v instanceof ValueBase) { - return v.type.describe(); - } - } - return t; -} diff --git a/js/src/encode-human-readable-test.js b/js/src/encode-human-readable-test.js new file mode 100644 index 0000000000..92104a4c10 --- /dev/null +++ b/js/src/encode-human-readable-test.js @@ -0,0 +1,122 @@ +// @flow + +import {assert} from 'chai'; +import {suite, test} from 'mocha'; + +import {TypeWriter} from './encode-human-readable.js'; +import {invariant} from './assert.js'; +import { + blobType, + boolType, + Field, + numberType, + makeRefType, + makeListType, + makeMapType, + makeSetType, + makeStructType, + stringType, + valueType, + Type, + StructDesc, +} from './type.js'; + +suite('Encode human readable types', () => { + function assertWriteType(expected: string, t: Type) { + let actual = ''; + const w = { + write(s: string) { + actual += s; + }, + }; + const tw = new TypeWriter(w); + tw.writeType(t); + assert.equal(actual, expected); + } + + test('primitives', () => { + assertWriteType('Bool', boolType); + assertWriteType('Blob', blobType); + assertWriteType('String', stringType); + assertWriteType('Number', numberType); + }); + + test('compound', () => { + assertWriteType('List', makeListType(numberType)); + assertWriteType('Set', makeSetType(numberType)); + assertWriteType('Ref', makeRefType(numberType)); + assertWriteType('Map', makeMapType(numberType, stringType)); + }); + + test('struct', () => { + const type = makeStructType('S1', [ + new Field('x', numberType, false), + new Field('y', numberType, true), + ], []); + assertWriteType('struct S1 {\n x: Number\n y: optional Number\n}', type); + }); + + test('struct with union', () => { + const type = makeStructType('S1', [], [ + new Field('x', numberType, false), + new Field('y', numberType, true), + ]); + assertWriteType('struct S1 {\n union {\n x: Number\n y: optional Number\n }\n}', type); + }); + + test('list of struct', () => { + const type = makeStructType('S3', [ + new Field('x', numberType, false), + ], []); + assertWriteType('List', makeListType(type)); + }); + + test('recursive struct', () => { + // struct A { + // b: A + // c: List + // d: struct D { + // e: D + // f: A + // } + // } + + const a = makeStructType('A', [ + new Field('b', valueType /* placeholder */, false), + new Field('c', valueType /* placeholder */, false), + new Field('d', valueType /* placeholder */, false), + ], []); + const d = makeStructType('D', [ + new Field('e', valueType /* placeholder */, false), + new Field('f', a, false), + ], []); + const aDesc = a.desc; + invariant(aDesc instanceof StructDesc); + const dDesc = d.desc; + invariant(dDesc instanceof StructDesc); + aDesc.fields[0].t = a; + aDesc.fields[2].t = d; + dDesc.fields[0].t = d; + dDesc.fields[1].t = a; + aDesc.fields[1].t = makeListType(a); + + + assertWriteType(`struct A { + b: BackRef<0> + c: List> + d: struct D { + e: BackRef<0> + f: BackRef<1> + } +}`, a); + + assertWriteType(`struct D { + e: BackRef<0> + f: struct A { + b: BackRef<0> + c: List> + d: BackRef<1> + } +}`, d); + }); +}); diff --git a/js/src/encode-human-readable.js b/js/src/encode-human-readable.js new file mode 100644 index 0000000000..a00e4827b4 --- /dev/null +++ b/js/src/encode-human-readable.js @@ -0,0 +1,186 @@ +// @flow + +import {StructDesc, BackRefDesc, CompoundDesc} from './type.js'; +import type {Field, Type} from './type.js'; +import {Kind, kindToString} from './noms-kind.js'; +import type {NomsKind} from './noms-kind.js'; +import {invariant} from './assert.js'; +import type {Value} from './value.js'; +import {ValueBase} from './value.js'; + +export interface StringWriter { + write(s: string): void; +} + +class Writer { + ind: number; + w: StringWriter; + lineLength: number; + + constructor(w: StringWriter) { + this.ind = 0; + this.w = w; + this.lineLength = 0; + } + + maybeWriteIndentation() { + if (this.lineLength === 0) { + for (let i = 0; i < this.ind; i++) { + this.w.write(' '); + } + this.lineLength = 2 * this.ind; + } + } + + write(s: string) { + this.maybeWriteIndentation(); + this.w.write(s); + this.lineLength += s.length; + } + + indent() { + this.ind++; + } + + outdent() { + this.ind--; + } + + newLine() { + this.write('\n'); + this.lineLength = 0; + } + + writeKind(k: NomsKind) { + this.write(kindToString(k)); + } +} + +export class TypeWriter { + _w: Writer; + + constructor(w: StringWriter) { + this._w = new Writer(w); + } + + writeType(t: Type) { + this._writeType(t, []); + } + + _writeType(t: Type, backRefs: Type[]) { + switch (t.kind) { + case Kind.Blob: + case Kind.Bool: + case Kind.Number: + case Kind.String: + case Kind.Type: + case Kind.Value: + this._w.writeKind(t.kind); + break; + case Kind.List: + case Kind.Ref: + case Kind.Set: + this._w.writeKind(t.kind); + this._w.write('<'); + invariant(t.desc instanceof CompoundDesc); + this._writeType(t.desc.elemTypes[0], backRefs); + this._w.write('>'); + break; + case Kind.Map: { + this._w.writeKind(t.kind); + this._w.write('<'); + invariant(t.desc instanceof CompoundDesc); + const [keyType, valueType] = t.desc.elemTypes; + this._writeType(keyType, backRefs); + this._w.write(', '); + this._writeType(valueType, backRefs); + this._w.write('>'); + break; + } + case Kind.Struct: + this._writeStructType(t, backRefs); + break; + case Kind.BackRef: + invariant(t.desc instanceof BackRefDesc); + this._writeBackRef(t.desc.value); + break; + default: + throw new Error('unreachable'); + } + } + + _writeBackRef(i: number) { + this._w.write(`BackRef<${i}>`); + } + + _writeStructType(t: Type, backRefs: Type[]) { + const idx = backRefs.indexOf(t); + if (idx !== -1) { + this._writeBackRef(backRefs.length - idx - 1); + return; + } + backRefs = backRefs.concat(t); + + const desc = t.desc; + invariant(desc instanceof StructDesc); + this._w.write('struct '); + this._w.write(desc.name); + this._w.write(' {'); + this._w.indent(); + let i = 0; + const writeField = (f: Field) => { + if (i === 0) { + this._w.newLine(); + } + this._w.write(f.name); + this._w.write(': '); + if (f.optional) { + this._w.write('optional '); + } + this._writeType(f.t, backRefs); + this._w.newLine(); + i++; + }; + desc.fields.forEach(writeField); + + if (desc.union.length > 0) { + if (i === 0) { + this._w.newLine(); + i++; + } + this._w.write('union {'); + this._w.indent(); + i = 0; + desc.union.forEach(writeField); + this._w.outdent(); + this._w.write('}'); + this._w.newLine(); + } + this._w.outdent(); + this._w.write('}'); + } +} + +export function describeType(t: Type) { + let s = ''; + const w = new TypeWriter({ + write(s2: string) { + s += s2; + }, + }); + w.writeType(t); + return s; +} + +export function describeTypeOfValue(v: Value) { + const t = typeof v; + if (t === 'object') { + if (v === null) { + return 'null'; + } + if (v instanceof ValueBase) { + return describeType(v.type); + } + } + return t; +} diff --git a/js/src/encode-test.js b/js/src/encode-test.js index 50594a9aed..b875fb7709 100644 --- a/js/src/encode-test.js +++ b/js/src/encode-test.js @@ -4,7 +4,7 @@ import {assert} from 'chai'; import {suite, test} from 'mocha'; import MemoryStore from './memory-store.js'; -import {default as Ref, emptyRef} from './ref.js'; +import Ref from './ref.js'; import RefValue from './ref-value.js'; import {newStruct} from './struct.js'; import type {NomsKind} from './noms-kind.js'; @@ -18,20 +18,20 @@ import { makeMapType, makeSetType, makeStructType, - makeType, numberType, stringType, Type, valueType, + StructDesc, } from './type.js'; import {IndexedMetaSequence, MetaTuple, OrderedMetaSequence} from './meta-sequence.js'; import {Kind} from './noms-kind.js'; import {newList, ListLeafSequence, NomsList} from './list.js'; import {newMap, MapLeafSequence, NomsMap} from './map.js'; import {newSet, NomsSet, SetLeafSequence} from './set.js'; -import {Package, registerPackage} from './package.js'; import {newBlob} from './blob.js'; import DataStore from './data-store.js'; +import {invariant} from './assert.js'; suite('Encode', () => { test('write primitives', () => { @@ -189,16 +189,11 @@ suite('Encode', () => { const ds = new DataStore(ms); const w = new JsonArrayWriter(ds); - const typeDef = makeStructType('S', [], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const v = newStruct(type, typeDef, {}); + const type = makeStructType('S', [], []); + const v = newStruct(type, {}); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0'], w.array); + assert.deepEqual([Kind.Struct, 'S', [], []], w.array); }); test('write struct', async() => { @@ -206,19 +201,16 @@ suite('Encode', () => { const ds = new DataStore(ms); const w = new JsonArrayWriter(ds); - const typeDef = makeStructType('S', [ + const type = makeStructType('S', [ new Field('x', numberType, false), new Field('b', boolType, false), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - const v = newStruct(type, typeDef, {x: 42, b: true}); + const v = newStruct(type, {x: 42, b: true}); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', true], w.array); + assert.deepEqual([Kind.Struct, 'S', ['x', Kind.Number, false, 'b', Kind.Bool, false], [], + '42', true], w.array); }); test('write struct optional field', async() => { @@ -226,23 +218,21 @@ suite('Encode', () => { const ds = new DataStore(ms); let w = new JsonArrayWriter(ds); - const typeDef = makeStructType('S', [ + const type = makeStructType('S', [ new Field('x', numberType, true), new Field('b', boolType, false), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - let v = newStruct(type, typeDef, {x: 42, b: true}); + let v = newStruct(type, {x: 42, b: true}); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', true, '42', true], w.array); + assert.deepEqual([Kind.Struct, 'S', + ['x', Kind.Number, true, 'b', Kind.Bool, false], [], true, '42', true], w.array); - v = newStruct(type, typeDef, {b: true}); + v = newStruct(type, {b: true}); w = new JsonArrayWriter(ds); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, true], w.array); + assert.deepEqual([Kind.Struct, 'S', ['x', Kind.Number, true, 'b', Kind.Bool, false], [], + false, true], w.array); }); test('write struct with union', async() => { @@ -250,25 +240,25 @@ suite('Encode', () => { const ds = new DataStore(ms); let w = new JsonArrayWriter(ds); - const typeDef = makeStructType('S', [ + const type = makeStructType('S', [ new Field('x', numberType, false), ], [ new Field('b', boolType, false), new Field('s', stringType, false), ]); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - let v = newStruct(type, typeDef, {x: 42, s: 'hi'}); + let v = newStruct(type, {x: 42, s: 'hi'}); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '1', 'hi'], w.array); + assert.deepEqual([Kind.Struct, 'S', + ['x', Kind.Number, false], ['b', Kind.Bool, false, 's', Kind.String, false], '42', '1', 'hi'], + w.array); - v = newStruct(type, typeDef, {x: 42, b: true}); + v = newStruct(type, {x: 42, b: true}); w = new JsonArrayWriter(ds); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', '42', '0', true], w.array); + assert.deepEqual([Kind.Struct, 'S', + ['x', Kind.Number, false], ['b', Kind.Bool, false, 's', Kind.String, false], '42', '0', true], + w.array); }); test('write struct with list', async() => { @@ -277,23 +267,20 @@ suite('Encode', () => { let w = new JsonArrayWriter(ds); const ltr = makeCompoundType(Kind.List, stringType); - const typeDef = makeStructType('S', [ + const type = makeStructType('S', [ new Field('l', ltr, false), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - let v = newStruct(type, typeDef, {l: new NomsList(ltr, - new ListLeafSequence(ds, ltr, ['a', 'b']))}); + let v = newStruct(type, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, ['a', 'b']))}); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, ['a', 'b']], w.array); + assert.deepEqual([Kind.Struct, 'S', + ['l', Kind.List, Kind.String, false], [], false, ['a', 'b']], w.array); - v = newStruct(type, typeDef, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, []))}); + v = newStruct(type, {l: new NomsList(ltr, new ListLeafSequence(ds, ltr, []))}); w = new JsonArrayWriter(ds); w.writeTopLevel(type, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '0', false, []], w.array); + assert.deepEqual([Kind.Struct, 'S', ['l', Kind.List, Kind.String, false], [], false, []], + w.array); }); test('write struct with struct', async () => { @@ -301,30 +288,24 @@ suite('Encode', () => { const ds = new DataStore(ms); const w = new JsonArrayWriter(ds); - const s2TypeDef = makeStructType('S2', [ + const s2Type = makeStructType('S2', [ new Field('x', numberType, false), ], []); - let sTypeDef = makeStructType('S', [ - new Field('s', makeType(emptyRef, 0), false), + const sType = makeStructType('S', [ + new Field('s', s2Type, false), ], []); - const pkg = new Package([s2TypeDef, sTypeDef], []); - registerPackage(pkg); - sTypeDef = pkg.types[1]; - const pkgRef = pkg.ref; - const s2Type = makeType(pkgRef, 0); - const sType = makeType(pkgRef, 1); - - const v = newStruct(sType, sTypeDef, {s: newStruct(s2Type, s2TypeDef, {x: 42})}); + const v = newStruct(sType, {s: newStruct(s2Type, {x: 42})}); w.writeTopLevel(sType, v); - assert.deepEqual([Kind.Unresolved, pkgRef.toString(), '1', '42'], w.array); + assert.deepEqual([Kind.Struct, 'S', + ['s', Kind.Struct, 'S2', ['x', Kind.Number, false], [], false], [], '42'], w.array); }); test('write compound list', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); const w = new JsonArrayWriter(ds); - const ltr = makeCompoundType(Kind.List, numberType); + const ltr = makeListType(numberType); const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0]))).targetRef; const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [1, 2]))).targetRef; const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [3, 4, 5]))).targetRef; @@ -347,7 +328,7 @@ suite('Encode', () => { const test = (expected: Array, v: Type) => { const w = new JsonArrayWriter(ds); w.writeTopLevel(v.type, v); - assert.deepEqual(expected, w.array); + assert.deepEqual(w.array, expected); }; test([Kind.Type, Kind.Number], numberType); @@ -366,18 +347,29 @@ suite('Encode', () => { new Field('v', valueType, false), ])); - const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567'); - test([Kind.Type, Kind.Unresolved, pkgRef.toString(), '123'], makeType(pkgRef, 123)); - test([Kind.Type, Kind.Struct, 'S', - ['e', Kind.Unresolved, pkgRef.toString(), '123', false, 'x', Kind.Number, false], []], + ['e', Kind.Bool, true, 'x', Kind.Number, false], []], makeStructType('S', [ - new Field('e', makeType(pkgRef, 123), false), + new Field('e', boolType, true), new Field('x', numberType, false), ], [])); - // test([Kind.Type, Kind.Unresolved, new Ref().toString(), -1, 'ns', 'n'], - // makeUnresolvedType('ns', 'n')); + + // struct A6 { + // v: Number + // cs: List + // } + + const st = makeStructType('A6', [ + new Field('v', numberType, false), + new Field('cs', valueType /* placeholder */, false), + ], []); + const lt = makeListType(st); + invariant(st.desc instanceof StructDesc); + st.desc.fields[1].t = lt; + + test([Kind.Type, Kind.Struct, 'A6', + ['v', Kind.Number, false, 'cs', Kind.List, Kind.BackRef, 0, false], []], st); }); test('top level blob', async () => { diff --git a/js/src/encode.js b/js/src/encode.js index 410f803c57..4db4c74aac 100644 --- a/js/src/encode.js +++ b/js/src/encode.js @@ -2,24 +2,22 @@ import Chunk from './chunk.js'; import type Ref from './ref.js'; -import {emptyRef} from './ref.js'; import RefValue from './ref-value.js'; import {default as Struct, StructMirror} from './struct.js'; import type DataStore from './data-store.js'; import type {NomsKind} from './noms-kind.js'; import {encode as encodeBase64} from './base64.js'; -import {boolType, stringType, StructDesc, Type, typeType, numberType} from './type.js'; +import {StructDesc, Type, numberType, getTypeOfValue} from './type.js'; import {indexTypeForMetaSequence, MetaTuple} from './meta-sequence.js'; -import {invariant, notNull} from './assert.js'; +import {invariant} from './assert.js'; import {isPrimitiveKind, Kind} from './noms-kind.js'; import {ListLeafSequence, NomsList} from './list.js'; -import {lookupPackage, Package} from './package.js'; import {MapLeafSequence, NomsMap} from './map.js'; import {NomsSet, SetLeafSequence} from './set.js'; import {Sequence} from './sequence.js'; import {setEncodeNomsValue} from './get-ref.js'; import {NomsBlob, BlobLeafSequence} from './blob.js'; -import describeType from './describe-type.js'; +import {describeTypeOfValue} from './encode-human-readable.js'; const typedTag = 't '; @@ -52,6 +50,10 @@ export class JsonArrayWriter { this.write(n.toFixed(0)); } + writeUint8(n: number) { + this.write(n); + } + writeKind(k: NomsKind) { this.write(k); } @@ -60,40 +62,30 @@ export class JsonArrayWriter { this.write(r.toString()); } - writeTypeAsTag(t: Type) { + writeTypeAsTag(t: Type, backRefs: Type[]) { const k = t.kind; - this.writeKind(k); switch (k) { - case Kind.Struct: - throw new Error('Unreachable'); case Kind.List: case Kind.Map: case Kind.Ref: - case Kind.Set: { - t.elemTypes.forEach(elemType => this.writeTypeAsTag(elemType)); + case Kind.Set: + this.writeKind(k); + t.elemTypes.forEach(elemType => this.writeTypeAsTag(elemType, backRefs)); break; - } - case Kind.Unresolved: { - const pkgRef = t.packageRef; - invariant(!pkgRef.isEmpty()); - this.writeRef(pkgRef); - this.writeInt(t.ordinal); - - const pkg = lookupPackage(pkgRef); - if (pkg && this._ds) { - this._ds.writeValue(pkg); - } + case Kind.Struct: + this.writeStructType(t, backRefs); break; - } + default: + this.writeKind(k); } } writeTopLevel(t: Type, v: any) { - this.writeTypeAsTag(t); + this.writeTypeAsTag(t, []); this.writeValue(v, t); } - maybeWriteMetaSequence(v: Sequence, t: Type, pkg: ?Package): boolean { + maybeWriteMetaSequence(v: Sequence, t: Type): boolean { if (!v.isMeta) { this.write(false); return false; @@ -110,21 +102,21 @@ export class JsonArrayWriter { this._ds.writeValue(child); } w2.writeRef(tuple.ref); - w2.writeValue(tuple.value, indexType, pkg); - w2.writeValue(tuple.numLeaves, numberType, pkg); + w2.writeValue(tuple.value, indexType); + w2.writeValue(tuple.numLeaves, numberType); } this.write(w2.array); return true; } - writeValue(v: any, t: Type, pkg: ?Package) { + writeValue(v: any, t: Type) { switch (t.kind) { case Kind.Blob: { invariant(v instanceof NomsBlob || v instanceof Sequence, - `Failed to write Blob. Invalid type: ${describeType(v)}`); + `Failed to write Blob. Invalid type: ${describeTypeOfValue(v)}`); const sequence: Sequence = v instanceof NomsBlob ? v.sequence : v; - if (this.maybeWriteMetaSequence(sequence, t, pkg)) { + if (this.maybeWriteMetaSequence(sequence, t)) { break; } @@ -133,41 +125,42 @@ export class JsonArrayWriter { break; } case Kind.Bool: - invariant(typeof v === 'boolean', `Failed to write Bool. Invalid type: ${describeType(v)}`); + invariant(typeof v === 'boolean', + `Failed to write Bool. Invalid type: ${describeTypeOfValue(v)}`); this.write(v); break; case Kind.String: invariant(typeof v === 'string', - `Failed to write String. Invalid type: ${describeType(v)}`); + `Failed to write String. Invalid type: ${describeTypeOfValue(v)}`); this.write(v); break; case Kind.Number: invariant(typeof v === 'number', - `Failed to write ${t.describe()}. Invalid type: ${describeType(v)}`); - this.writeFloat(v); // TODO: Verify value fits in type + `Failed to write Number. Invalid type: ${describeTypeOfValue(v)}`); + this.writeFloat(v); break; case Kind.List: { invariant(v instanceof NomsList || v instanceof Sequence, - `Failed to write List. Invalid type: ${describeType(v)}`); + `Failed to write List. Invalid type: ${describeTypeOfValue(v)}`); const sequence: Sequence = v instanceof NomsList ? v.sequence : v; - if (this.maybeWriteMetaSequence(sequence, t, pkg)) { + if (this.maybeWriteMetaSequence(sequence, t)) { break; } invariant(sequence instanceof ListLeafSequence); const w2 = new JsonArrayWriter(this._ds); const elemType = t.elemTypes[0]; - sequence.items.forEach(sv => w2.writeValue(sv, elemType, pkg)); + sequence.items.forEach(sv => w2.writeValue(sv, elemType)); this.write(w2.array); break; } case Kind.Map: { invariant(v instanceof NomsMap || v instanceof Sequence, - `Failed to write Map. Invalid type: ${describeType(v)}`); + `Failed to write Map. Invalid type: ${describeTypeOfValue(v)}`); const sequence: Sequence = v instanceof NomsMap ? v.sequence : v; - if (this.maybeWriteMetaSequence(sequence, t, pkg)) { + if (this.maybeWriteMetaSequence(sequence, t)) { break; } @@ -176,35 +169,24 @@ export class JsonArrayWriter { const keyType = t.elemTypes[0]; const valueType = t.elemTypes[1]; sequence.items.forEach(entry => { - w2.writeValue(entry.key, keyType, pkg); - w2.writeValue(entry.value, valueType, pkg); + w2.writeValue(entry.key, keyType); + w2.writeValue(entry.value, valueType); }); this.write(w2.array); break; } - case Kind.Package: { - invariant(v instanceof Package, - `Failed to write Package. Invalid type: ${describeType(v)}`); - const w2 = new JsonArrayWriter(this._ds); - v.types.forEach(type => w2.writeValue(type, typeType, v)); - this.write(w2.array); - const w3 = new JsonArrayWriter(this._ds); - v.dependencies.forEach(ref => w3.writeRef(ref)); - this.write(w3.array); - break; - } case Kind.Ref: { invariant(v instanceof RefValue, - `Failed to write Ref. Invalid type: ${describeType(v)}`); + `Failed to write Ref. Invalid type: ${describeTypeOfValue(v)}`); this.writeRef(v.targetRef); break; } case Kind.Set: { invariant(v instanceof NomsSet || v instanceof Sequence, - `Failed to write Set. Invalid type: ${describeType(v)}`); + `Failed to write Set. Invalid type: ${describeTypeOfValue(v)}`); const sequence: Sequence = v instanceof NomsSet ? v.sequence : v; - if (this.maybeWriteMetaSequence(sequence, t, pkg)) { + if (this.maybeWriteMetaSequence(sequence, t)) { break; } @@ -215,166 +197,118 @@ export class JsonArrayWriter { sequence.items.forEach(v => { elems.push(v); }); - elems.forEach(elem => w2.writeValue(elem, elemType, pkg)); + elems.forEach(elem => w2.writeValue(elem, elemType)); this.write(w2.array); break; } case Kind.Type: { invariant(v instanceof Type, - `Failed to write Type. Invalid type: ${describeType(v)}`); - this.writeTypeAsValue(v, pkg); - break; - } - case Kind.Unresolved: { - if (t.hasPackageRef) { - pkg = lookupPackage(t.packageRef); - } - pkg = notNull(pkg); - this.writeUnresolvedKindValue(v, t, pkg); + `Failed to write Type. Invalid type: ${describeTypeOfValue(v)}`); + this.writeTypeAsValue(v, []); break; } case Kind.Value: { const valueType = getTypeOfValue(v); - this.writeTypeAsTag(valueType); - this.writeValue(v, valueType, pkg); + this.writeTypeAsTag(valueType, []); + this.writeValue(v, valueType); break; } + case Kind.Struct: + this.writeStruct(v); + break; default: throw new Error(`Not implemented: ${t.kind} ${v}`); } } - writeTypeAsValue(t: Type, pkg: ?Package) { + writeTypeAsValue(t: Type, backRefs: Type[]) { const k = t.kind; - this.writeKind(k); switch (k) { case Kind.List: case Kind.Map: case Kind.Ref: case Kind.Set: { + this.writeKind(k); const w2 = new JsonArrayWriter(this._ds); - t.elemTypes.forEach(elem => w2.writeTypeAsValue(elem, pkg)); + t.elemTypes.forEach(elem => w2.writeTypeAsValue(elem, backRefs)); this.write(w2.array); break; } case Kind.Struct: { - const desc = t.desc; - invariant(desc instanceof StructDesc); - this.write(t.name); - const fieldWriter = new JsonArrayWriter(this._ds); - desc.fields.forEach(field => { - fieldWriter.write(field.name); - fieldWriter.writeTypeAsValue(field.t, pkg); - fieldWriter.write(field.optional); - }); - this.write(fieldWriter.array); - const choiceWriter = new JsonArrayWriter(this._ds); - desc.union.forEach(choice => { - choiceWriter.write(choice.name); - choiceWriter.writeTypeAsValue(choice.t, pkg); - choiceWriter.write(choice.optional); - }); - this.write(choiceWriter.array); - break; - } - case Kind.Unresolved: { - const pkgRef = t.packageRef; - // When we compute the ref for the package the first time it does not have a ref. - const isCurrentPackage = pkg && pkg.ref && pkg.ref.equals(pkgRef); - if (isCurrentPackage) { - this.writeRef(emptyRef); - } else { - this.writeRef(pkgRef); - } - const ordinal = t.ordinal; - this.writeInt(ordinal); - if (ordinal === -1) { - this.write(t.namespace); - this.write(t.name); - } - - if (!isCurrentPackage) { - const pkg = lookupPackage(pkgRef); - if (this._ds && pkg) { - this._ds.writeValue(pkg); - } - } - - break; - } - - default: { - invariant(isPrimitiveKind(k)); - } - } - } - - writeUnresolvedKindValue(v: any, t: Type, pkg: Package) { - const typeDef = pkg.types[t.ordinal]; - switch (typeDef.kind) { - case Kind.Struct: { - invariant(v instanceof Struct, - `Failed to write ${typeDef.describe()}. Invalid type: ${describeType(v)}`); - this.writeStruct(v, t, typeDef, pkg); + this.writeStructType(t, backRefs); break; } default: - throw new Error('Not reached'); + invariant(isPrimitiveKind(k)); + this.writeKind(k); } } + writeStructType(t: Type, backRefs: Type[]) { + const i = backRefs.indexOf(t); + if (i !== -1) { + this.writeBackRef(backRefs.length - i - 1); + return; + } + + + backRefs = backRefs.concat(t); // we want a new array here. + const desc = t.desc; + invariant(desc instanceof StructDesc); + this.writeKind(t.kind); + this.write(t.name); + const fieldWriter = new JsonArrayWriter(this._ds); + desc.fields.forEach(field => { + fieldWriter.write(field.name); + fieldWriter.writeTypeAsTag(field.t, backRefs); + fieldWriter.write(field.optional); + }); + this.write(fieldWriter.array); + const choiceWriter = new JsonArrayWriter(this._ds); + desc.union.forEach(choice => { + choiceWriter.write(choice.name); + choiceWriter.writeTypeAsTag(choice.t, backRefs); + choiceWriter.write(choice.optional); + }); + this.write(choiceWriter.array); + } + + writeBackRef(i: number) { + this.write(Kind.BackRef); + this.writeUint8(i); + } + writeBlob(seq: BlobLeafSequence) { // HACK: The items property is declared as Array in Flow. invariant(seq.items instanceof Uint8Array); this.write(encodeBase64(seq.items)); } - writeStruct(s: Struct, type: Type, typeDef: Type, pkg: Package) { + writeStruct(s: Struct) { const mirror = new StructMirror(s); mirror.forEachField(field => { if (field.optional) { if (field.present) { this.writeBoolean(true); - this.writeValue(field.value, field.type, pkg); + this.writeValue(field.value, field.type); } else { this.writeBoolean(false); } } else { invariant(field.present); - this.writeValue(field.value, field.type, pkg); + this.writeValue(field.value, field.type); } }); if (mirror.hasUnion) { const {unionField} = mirror; this.writeInt(mirror.unionIndex); - this.writeValue(unionField.value, unionField.type, pkg); + this.writeValue(unionField.value, unionField.type); } } } -function getTypeOfValue(v: any): Type { - switch (typeof v) { - case 'object': - return v.type; - case 'string': - return stringType; - case 'boolean': - return boolType; - case 'number': - throw new Error('Encoding untagged numbers is not supported'); - default: - throw new Error('Unknown type'); - } -} - function encodeEmbeddedNomsValue(v: any, t: Type, ds: ?DataStore): Chunk { - if (v instanceof Package) { - // if (v.dependencies.length > 0) { - // throw new Error('Not implemented'); - // } - } - const w = new JsonArrayWriter(ds); w.writeTopLevel(t, v); return Chunk.fromString(typedTag + JSON.stringify(w.array)); diff --git a/js/src/fixup-type.js b/js/src/fixup-type.js deleted file mode 100644 index ac131a432e..0000000000 --- a/js/src/fixup-type.js +++ /dev/null @@ -1,64 +0,0 @@ -// @flow - -import { - CompoundDesc, - Field, - makeCompoundType, - makeStructType, - makeType, - PrimitiveDesc, - StructDesc, - Type, - UnresolvedDesc, -} from './type.js'; -import {Package} from './package.js'; -import {invariant, notNull} from './assert.js'; - -/** - * Goes through the type and returns a new type where all the empty refs have been replaced by - * the package ref. - */ -export default function fixupType(t: Type, pkg: ?Package): Type { - const desc = t.desc; - - if (desc instanceof CompoundDesc) { - let changed = false; - const newTypes = desc.elemTypes.map(t => { - const newT = fixupType(t, pkg); - if (newT === t) { - return t; - } - changed = true; - return newT; - }); - - return changed ? makeCompoundType(t.kind, ...newTypes) : t; - } - - if (desc instanceof UnresolvedDesc) { - if (t.hasPackageRef) { - return t; - } - - return makeType(notNull(pkg).ref, t.ordinal); - } - - if (desc instanceof StructDesc) { - let changed = false; - const fixField = f => { - const newT = fixupType(f.t, pkg); - if (newT === t) { - return f; - } - changed = true; - return new Field(f.name, newT, f.optional); - }; - - const newFields = desc.fields.map(fixField); - const newUnion = desc.union.map(fixField); - return changed ? makeStructType(t.name, newFields, newUnion) : t; - } - - invariant(desc instanceof PrimitiveDesc); - return t; -} diff --git a/js/src/list-test.js b/js/src/list-test.js index 92e3e23b78..a9e00aeee9 100644 --- a/js/src/list-test.js +++ b/js/src/list-test.js @@ -10,9 +10,9 @@ import {newStruct} from './struct.js'; import {calcSplices} from './edit-distance.js'; import { Field, - makeCompoundType, makeStructType, - makeType, + makeRefType, + makeListType, numberType, stringType, valueType, @@ -20,9 +20,7 @@ import { import {flatten, flattenParallel} from './test-util.js'; import {IndexedMetaSequence, MetaTuple} from './meta-sequence.js'; import {invariant} from './assert.js'; -import {Kind} from './noms-kind.js'; import {ListLeafSequence, newList, NomsList} from './list.js'; -import {Package, registerPackage} from './package.js'; import type {Type} from './type.js'; const testListSize = 5000; @@ -52,7 +50,7 @@ suite('BuildList', () => { test('LONG: set of n numbers, length', async () => { const nums = firstNNumbers(testListSize); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const s = await newList(nums, tr); assert.strictEqual(s.ref.toString(), listOfNRef); assert.strictEqual(testListSize, s.length); @@ -61,30 +59,26 @@ suite('BuildList', () => { test('LONG: list of ref, set of n numbers, length', async () => { const nums = firstNNumbers(testListSize); - const structTypeDef = makeStructType('num', [ + const structType = makeStructType('num', [ new Field('n', numberType, false), ], []); - const pkg = new Package([structTypeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const structType = makeType(pkgRef, 0); - const refOfStructType = makeCompoundType(Kind.Ref, structType); - const tr = makeCompoundType(Kind.List, refOfStructType); + const refOfStructType = makeRefType(structType); + const tr = makeListType(refOfStructType); const refs = nums.map(n => { - const s = newStruct(structType, structTypeDef, {n}); + const s = newStruct(structType, {n}); const r = s.ref; return new RefValue(r, refOfStructType); }); const s = await newList(refs, tr); - assert.strictEqual(s.ref.toString(), 'sha1-f2e6c3aae6e8ac4c3776830e2d8141fc527c55c5'); + assert.strictEqual(s.ref.toString(), 'sha1-f2db6a2f8026ee6e12bb584cd38c813604774a69'); assert.strictEqual(testListSize, s.length); }); test('LONG: toJS', async () => { const nums = firstNNumbers(5000); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const s = await newList(nums, tr); assert.strictEqual(s.ref.toString(), listOfNRef); assert.strictEqual(testListSize, s.length); @@ -104,7 +98,7 @@ suite('BuildList', () => { test('LONG: insert', async () => { const nums = firstNNumbers(testListSize - 10); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); let s = await newList(nums, tr); for (let i = testListSize - 10; i < testListSize; i++) { @@ -116,7 +110,7 @@ suite('BuildList', () => { test('LONG: append', async () => { const nums = firstNNumbers(testListSize - 10); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); let s = await newList(nums, tr); for (let i = testListSize - 10; i < testListSize; i++) { @@ -128,7 +122,7 @@ suite('BuildList', () => { test('LONG: remove', async () => { const nums = firstNNumbers(testListSize + 10); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); let s = await newList(nums, tr); let count = 10; @@ -141,7 +135,7 @@ suite('BuildList', () => { test('LONG: splice', async () => { const nums = firstNNumbers(testListSize); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); let s = await newList(nums, tr); const splice500At = async (idx: number) => { @@ -162,7 +156,7 @@ suite('BuildList', () => { const ds = new DataStore(ms); const nums = firstNNumbers(testListSize); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const s = await newList(nums, tr); const r = ds.writeValue(s).targetRef; const s2 = await ds.readValue(r); @@ -181,7 +175,7 @@ suite('ListLeafSequence', () => { test('isEmpty', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, stringType); + const tr = makeListType(stringType); const newList = items => new NomsList(tr, new ListLeafSequence(ds, tr, items)); assert.isTrue(newList([]).isEmpty()); assert.isFalse(newList(['z', 'x', 'a', 'b']).isEmpty()); @@ -190,7 +184,7 @@ suite('ListLeafSequence', () => { test('get', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, stringType); + const tr = makeListType(stringType); const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['z', 'x', 'a', 'b'])); assert.strictEqual('z', await l.get(0)); assert.strictEqual('x', await l.get(1)); @@ -201,7 +195,7 @@ suite('ListLeafSequence', () => { test('forEach', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l = new NomsList(tr, new ListLeafSequence(ds, tr, [4, 2, 10, 16])); const values = []; @@ -212,7 +206,7 @@ suite('ListLeafSequence', () => { test('iterator', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const test = async items => { const l = new NomsList(tr, new ListLeafSequence(ds, tr, items)); @@ -228,7 +222,7 @@ suite('ListLeafSequence', () => { test('iteratorAt', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const test = async items => { const l = new NomsList(tr, new ListLeafSequence(ds, tr, items)); @@ -247,7 +241,7 @@ suite('ListLeafSequence', () => { function testChunks(elemType: Type) { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, elemType); + const tr = makeListType(elemType); const r1 = ds.writeValue('x'); const r2 = ds.writeValue('a'); const r3 = ds.writeValue('b'); @@ -271,7 +265,7 @@ suite('CompoundList', () => { function build(): NomsList { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.List, stringType); + const tr = makeListType(stringType); const l1 = new NomsList(tr, new ListLeafSequence(ds, tr, ['a', 'b'])); const r1 = ds.writeValue(l1).targetRef; const l2 = new NomsList(tr, new ListLeafSequence(ds, tr, ['e', 'f'])); @@ -406,7 +400,7 @@ suite('Diff List', () => { const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l1 = await newList(nums1, tr); const l2 = await newList(nums2, tr); @@ -425,7 +419,7 @@ suite('Diff List', () => { const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l1 = await newList(nums1, tr); const l2 = await newList(nums2, tr); @@ -444,7 +438,7 @@ suite('Diff List', () => { } const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l1 = await newList(nums1, tr); const l2 = await newList(nums2, tr); @@ -457,7 +451,7 @@ suite('Diff List', () => { const nums2 = firstNNumbers(5000); const directDiff = calcSplices(nums1.length, nums2.length, (i, j) => nums1[i] === nums2[j]); - const tr = makeCompoundType(Kind.List, numberType); + const tr = makeListType(numberType); const l1 = await newList(nums1, tr); const l2 = await newList(nums2, tr); diff --git a/js/src/map-test.js b/js/src/map-test.js index 504f30658e..5d1d825723 100644 --- a/js/src/map-test.js +++ b/js/src/map-test.js @@ -11,9 +11,8 @@ import { boolType, Field, makeMapType, - makeCompoundType, + makeRefType, makeStructType, - makeType, numberType, stringType, valueType, @@ -21,10 +20,8 @@ import { import {flatten, flattenParallel} from './test-util.js'; import {invariant} from './assert.js'; import Chunk from './chunk.js'; -import {Kind} from './noms-kind.js'; import {MapLeafSequence, newMap, NomsMap} from './map.js'; import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js'; -import {Package, registerPackage} from './package.js'; import Ref from './ref.js'; import type {Type} from './type.js'; @@ -80,7 +77,7 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const tr = makeCompoundType(Kind.Map, numberType, numberType); + const tr = makeMapType(numberType, numberType); const m = await newMap(kvs, tr); assert.strictEqual(m.ref.toString(), mapOfNRef); @@ -102,24 +99,20 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const structTypeDef = makeStructType('num', [ + const structType = makeStructType('num', [ new Field('n', numberType, false), ], []); - const pkg = new Package([structTypeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const structType = makeType(pkgRef, 0); - const refOfStructType = makeCompoundType(Kind.Ref, structType); - const tr = makeCompoundType(Kind.Map, refOfStructType, refOfStructType); + const refOfStructType = makeRefType(structType); + const tr = makeMapType(refOfStructType, refOfStructType); const kvRefs = kvs.map(n => { - const s = newStruct(structType, structTypeDef, {n}); + const s = newStruct(structType, {n}); const r = s.ref; return new RefValue(r, refOfStructType); }); const m = await newMap(kvRefs, tr); - assert.strictEqual(m.ref.toString(), 'sha1-f440a024602218f2373063281d233f69e449a64a'); + assert.strictEqual(m.ref.toString(), 'sha1-d10e24f082d6f8270ef5809168f2f2466b126dc6'); }); test('LONG: set', async () => { @@ -128,7 +121,7 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const tr = makeCompoundType(Kind.Map, numberType, numberType); + const tr = makeMapType(numberType, numberType); let m = await newMap(kvs, tr); for (let i = testMapSize - 10; i < testMapSize; i++) { m = await m.set(i, i + 1); @@ -144,7 +137,7 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const tr = makeCompoundType(Kind.Map, numberType, numberType); + const tr = makeMapType(numberType, numberType); let m = await newMap(kvs, tr); for (let i = 0; i < testMapSize; i++) { m = await m.set(i, i + 1); @@ -160,7 +153,7 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const tr = makeCompoundType(Kind.Map, numberType, numberType); + const tr = makeMapType(numberType, numberType); let m = await newMap(kvs, tr); for (let i = testMapSize; i < testMapSize + 10; i++) { m = await m.remove(i); @@ -179,7 +172,7 @@ suite('BuildMap', () => { kvs.push(i, i + 1); } - const tr = makeCompoundType(Kind.Map, numberType, numberType); + const tr = makeMapType(numberType, numberType); const m = await newMap(kvs, tr); const r = ds.writeValue(m).targetRef; @@ -203,7 +196,7 @@ suite('MapLeaf', () => { test('isEmpty/size', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, boolType); + const tr = makeMapType(stringType, boolType); const newMap = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries)); let m = newMap([]); assert.isTrue(m.isEmpty()); @@ -216,7 +209,7 @@ suite('MapLeaf', () => { test('has', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, boolType); + const tr = makeMapType(stringType, boolType); const m = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false}, {key:'k', value:true}])); assert.isTrue(await m.has('a')); @@ -228,7 +221,7 @@ suite('MapLeaf', () => { test('first/last/get', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, numberType); + const tr = makeMapType(stringType, numberType); const m = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}])); @@ -244,7 +237,7 @@ suite('MapLeaf', () => { test('forEach', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, numberType); + const tr = makeMapType(stringType, numberType); const m = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: 4}, {key:'k', value:8}])); @@ -256,7 +249,7 @@ suite('MapLeaf', () => { test('iterator', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, numberType); + const tr = makeMapType(stringType, numberType); const test = async entries => { const m = new NomsMap(tr, new MapLeafSequence(ds, tr, entries)); @@ -272,7 +265,7 @@ suite('MapLeaf', () => { test('LONG: iteratorAt', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, stringType, numberType); + const tr = makeMapType(stringType, numberType); const build = entries => new NomsMap(tr, new MapLeafSequence(ds, tr, entries)); assert.deepEqual([], await flatten(build([]).iteratorAt('a'))); @@ -297,7 +290,7 @@ suite('MapLeaf', () => { function testChunks(keyType: Type, valueType: Type) { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Map, keyType, valueType); + const tr = makeMapType(keyType, valueType); const r1 = ds.writeValue('x'); const r2 = ds.writeValue(true); const r3 = ds.writeValue('b'); @@ -322,7 +315,7 @@ suite('MapLeaf', () => { suite('CompoundMap', () => { function build(ds: DataStore): Array { - const tr = makeCompoundType(Kind.Map, stringType, + const tr = makeMapType(stringType, boolType); const l1 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false}, {key:'b', value:false}])); @@ -492,7 +485,7 @@ suite('CompoundMap', () => { async function testRandomDiff(mapSize: number, inM1: number, inM2: number, inBoth: number) { invariant(inM1 + inM2 + inBoth <= 1); - const tr = makeCompoundType(Kind.Map, numberType, stringType); + const tr = makeMapType(numberType, stringType); const kv1 = [], kv2 = [], added = [], removed = [], modified = []; // Randomly populate kv1/kv2 which will be the contents of m1/m2 respectively, and record which diff --git a/js/src/noms-kind.js b/js/src/noms-kind.js index 1dd56c6df8..40c7760edb 100644 --- a/js/src/noms-kind.js +++ b/js/src/noms-kind.js @@ -14,8 +14,7 @@ export const Kind: { Set: NomsKind, Struct: NomsKind, Type: NomsKind, - Unresolved: NomsKind, - Package: NomsKind, + BackRef: NomsKind, } = { Bool: 0, Number: 1, @@ -28,8 +27,7 @@ export const Kind: { Set: 8, Struct: 9, Type: 10, - Unresolved: 11, - Package: 12, + BackRef: 11, }; const kindToStringMap: { [key: number]: string } = Object.create(null); @@ -44,10 +42,9 @@ kindToStringMap[Kind.Ref] = 'Ref'; kindToStringMap[Kind.Set] = 'Set'; kindToStringMap[Kind.Struct] = 'Struct'; kindToStringMap[Kind.Type] = 'Type'; -kindToStringMap[Kind.Unresolved] = 'Unresolved'; -kindToStringMap[Kind.Package] = 'Package'; +kindToStringMap[Kind.BackRef] = 'BackRef'; -export function kindToString(kind: number): string { +export function kindToString(kind: NomsKind): string { return kindToStringMap[kind]; } @@ -59,7 +56,6 @@ export function isPrimitiveKind(k: NomsKind): boolean { case Kind.Blob: case Kind.Value: case Kind.Type: - case Kind.Package: return true; default: return false; diff --git a/js/src/noms.js b/js/src/noms.js index 94b2a80b5f..2dfdf65036 100644 --- a/js/src/noms.js +++ b/js/src/noms.js @@ -20,7 +20,6 @@ export { export {encodeNomsValue} from './encode.js'; export {invariant, notNull} from './assert.js'; export {isPrimitiveKind, Kind, kindToString} from './noms-kind.js'; -export {lookupPackage, Package, readPackage, registerPackage} from './package.js'; export {newList, ListLeafSequence, NomsList} from './list.js'; export {newMap, NomsMap, MapLeafSequence} from './map.js'; export {newSet, NomsSet, SetLeafSequence} from './set.js'; @@ -38,17 +37,14 @@ export { makeRefType, makeSetType, makeStructType, - makeType, - makeUnresolvedType, numberType, - packageType, PrimitiveDesc, stringType, StructDesc, Type, typeType, - UnresolvedDesc, valueType, + getTypeOfValue, } from './type.js'; export {equals, less} from './compare.js'; diff --git a/js/src/package.js b/js/src/package.js deleted file mode 100644 index 5f229de719..0000000000 --- a/js/src/package.js +++ /dev/null @@ -1,71 +0,0 @@ -// @flow - -import type Ref from './ref.js'; -import RefValue from './ref-value.js'; -import {invariant} from './assert.js'; -import type {Type} from './type.js'; -import {packageType, packageRefType} from './type.js'; -import {ValueBase} from './value.js'; -import type DataStore from './data-store.js'; -import {getRef} from './get-ref.js'; -import fixupType from './fixup-type.js'; - -export class Package extends ValueBase { - types: Array; - dependencies: Array; - _ref: Ref; - - constructor(types: Array, dependencies: Array) { - super(); - this.types = types; - this.dependencies = dependencies; - this._ref = getRef(this, this.type); - this.types = types.map(t => fixupType(t, this)); - } - - get ref(): Ref { - return this._ref; - } - - get type(): Type { - return packageType; - } - - get chunks(): Array { - const chunks = []; - for (let i = 0; i < this.types.length; i++) { - chunks.push(...this.types[i].chunks); - } - for (let i = 0; i < this.dependencies.length; i++) { - chunks.push(new RefValue(this.dependencies[i], packageRefType)); - } - return chunks; - } -} - -const packageRegistry: {[key: string]: Package} = Object.create(null); - -export function lookupPackage(r: Ref): ?Package { - return packageRegistry[r.toString()]; -} - -export function registerPackage(p: Package) { - packageRegistry[p.ref.toString()] = p; -} - -const pendingPackages: {[key: string]: Promise} = Object.create(null); - -export function readPackage(r: Ref, ds: DataStore): Promise { - const refStr = r.toString(); - const p = pendingPackages[refStr]; - if (p) { - return p; - } - - return pendingPackages[refStr] = ds.readValue(r).then(p => { - invariant(p instanceof Package); - registerPackage(p); - delete pendingPackages[refStr]; - return p; - }); -} diff --git a/js/src/set-test.js b/js/src/set-test.js index a2a15060d6..00fc94b04e 100644 --- a/js/src/set-test.js +++ b/js/src/set-test.js @@ -11,21 +11,18 @@ import {newStruct} from './struct.js'; import { boolType, Field, - makeCompoundType, makeSetType, + makeRefType, makeStructType, - makeType, numberType, stringType, valueType, } from './type.js'; import {flatten, flattenParallel} from './test-util.js'; import {invariant, notNull} from './assert.js'; -import {Kind} from './noms-kind.js'; import {MetaTuple, OrderedMetaSequence} from './meta-sequence.js'; import {newSet, NomsSet, SetLeafSequence} from './set.js'; import {OrderedSequence} from './ordered-sequence.js'; -import {Package, registerPackage} from './package.js'; import Ref from './ref.js'; import type {Type} from './type.js'; @@ -59,7 +56,7 @@ function firstNNumbers(n: number): Array { suite('BuildSet', () => { test('unique keys - strings', async () => { const strs = ['hello', 'world', 'hello']; - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const s = await newSet(strs, tr); assert.strictEqual(2, s.size); assert.isTrue(await s.has('hello')); @@ -81,7 +78,7 @@ suite('BuildSet', () => { test('LONG: set of n numbers', async () => { const nums = firstNNumbers(testSetSize); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); const s = await newSet(nums, tr); assert.strictEqual(s.ref.toString(), setOfNRef); @@ -94,30 +91,26 @@ suite('BuildSet', () => { test('LONG: set of ref, set of n numbers', async () => { const nums = firstNNumbers(testSetSize); - const structTypeDef = makeStructType('num', [ + const structType = makeStructType('num', [ new Field('n', numberType, false), ], []); - const pkg = new Package([structTypeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const structType = makeType(pkgRef, 0); - const refOfStructType = makeCompoundType(Kind.Ref, structType); - const tr = makeCompoundType(Kind.Set, refOfStructType); + const refOfStructType = makeRefType(structType); + const tr = makeSetType(refOfStructType); const refs = nums.map(n => { - const s = newStruct(structType, structTypeDef, {n}); + const s = newStruct(structType, {n}); const r = s.ref; return new RefValue(r, refOfStructType); }); const s = await newSet(refs, tr); - assert.strictEqual(s.ref.toString(), 'sha1-4c2b0e159ae443ec99299b6ea266d9a408f7987d'); + assert.strictEqual(s.ref.toString(), 'sha1-b06811c4abafef5e2198c04a81d3a300a709fd02'); }); test('LONG: insert', async () => { const nums = firstNNumbers(testSetSize - 10); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); let s = await newSet(nums, tr); for (let i = testSetSize - 10; i < testSetSize; i++) { s = await s.insert(i); @@ -129,7 +122,7 @@ suite('BuildSet', () => { test('LONG: remove', async () => { const nums = firstNNumbers(testSetSize + 10); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); let s = await newSet(nums, tr); let count = 10; while (count-- > 0) { @@ -145,7 +138,7 @@ suite('BuildSet', () => { const ds = new DataStore(ms); const nums = firstNNumbers(testSetSize); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); const s = await newSet(nums, tr); const r = ds.writeValue(s).targetRef; const s2 = await ds.readValue(r); @@ -168,7 +161,7 @@ suite('SetLeaf', () => { test('isEmpty/size', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const newSet = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items)); let s = newSet([]); assert.isTrue(s.isEmpty()); @@ -181,7 +174,7 @@ suite('SetLeaf', () => { test('first/last/has', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const s = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'k'])); assert.strictEqual('a', await s.first()); @@ -196,7 +189,7 @@ suite('SetLeaf', () => { test('forEach', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const m = new NomsSet(tr, new SetLeafSequence(ds, tr, ['a', 'b'])); const values = []; @@ -207,7 +200,7 @@ suite('SetLeaf', () => { test('iterator', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const test = async items => { const m = new NomsSet(tr, new SetLeafSequence(ds, tr, items)); @@ -223,7 +216,7 @@ suite('SetLeaf', () => { test('LONG: iteratorAt', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); const build = items => new NomsSet(tr, new SetLeafSequence(ds, tr, items)); assert.deepEqual([], await flatten(build([]).iteratorAt('a'))); @@ -242,7 +235,7 @@ suite('SetLeaf', () => { function testChunks(elemType: Type) { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, elemType); + const tr = makeSetType(elemType); const r1 = ds.writeValue('x'); const r2 = ds.writeValue('a'); const r3 = ds.writeValue('b'); @@ -264,7 +257,7 @@ suite('SetLeaf', () => { suite('CompoundSet', () => { function build(ds: DataStore, values: Array): NomsSet { - const tr = makeCompoundType(Kind.Set, stringType); + const tr = makeSetType(stringType); assert.isTrue(values.length > 1 && Math.log2(values.length) % 1 === 0); let tuples = []; @@ -498,7 +491,7 @@ suite('CompoundSet', () => { test('iterator at 0', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); const test = async (expected, items) => { const set = new NomsSet(tr, new SetLeafSequence(ds, tr, items)); @@ -522,7 +515,7 @@ suite('CompoundSet', () => { test('LONG: canned set diff', async () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); const s1 = await newSet( firstNNumbers(testSetSize), tr).then(s => ds.readValue(ds.writeValue(s).targetRef)); @@ -549,7 +542,7 @@ suite('CompoundSet', () => { async function testRandomDiff(setSize: number, inS1: number, inS2: number): Promise { invariant(inS1 + inS2 <= 1); - const tr = makeCompoundType(Kind.Set, numberType); + const tr = makeSetType(numberType); const nums1 = [], nums2 = [], added = [], removed = []; // Randomly populate nums1/nums2 which will be the contents of s1/s2 respectively, and record diff --git a/js/src/struct-test.js b/js/src/struct-test.js index a4b8a70f43..b1d2178c22 100644 --- a/js/src/struct-test.js +++ b/js/src/struct-test.js @@ -6,33 +6,27 @@ import {assert} from 'chai'; import { boolType, Field, - makeCompoundType, - makeStructType, - makeType, numberType, + makeStructType, + makeRefType, stringType, + valueType, + StructDesc, } from './type.js'; -import {Kind} from './noms-kind.js'; -import {Package, registerPackage} from './package.js'; import {suite, test} from 'mocha'; import DataStore from './data-store.js'; -import {emptyRef} from './ref.js'; +import {invariant} from './assert.js'; suite('Struct', () => { test('equals', () => { - const typeDef = makeStructType('S1', [ + const type = makeStructType('S1', [ new Field('x', boolType, false), new Field('o', stringType, true), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - const data1 = {x: true}; - const s1 = newStruct(type, typeDef, data1); - const s2 = newStruct(type, typeDef, data1); + const s1 = newStruct(type, data1); + const s2 = newStruct(type, data1); assert.isTrue(s1.equals(s2)); }); @@ -42,139 +36,102 @@ suite('Struct', () => { const ds = new DataStore(ms); const bt = boolType; - const refOfBoolType = makeCompoundType(Kind.Ref, bt); - const typeDef = makeStructType('S1', [ + const refOfBoolType = makeRefType(bt); + const type = makeStructType('S1', [ new Field('r', refOfBoolType, false), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - const b = true; const r = ds.writeValue(b); - const s1 = newStruct(type, typeDef, {r: r}); - assert.strictEqual(2, s1.chunks.length); - assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef)); - assert.isTrue(r.equals(s1.chunks[1])); + const s1 = newStruct(type, {r: r}); + assert.strictEqual(1, s1.chunks.length); + assert.isTrue(r.equals(s1.chunks[0])); }); test('chunks optional', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const bt = boolType; - const refOfBoolType = makeCompoundType(Kind.Ref, bt); - const typeDef = makeStructType('S1', [ + const refOfBoolType = makeRefType(boolType); + const type = makeStructType('S1', [ new Field('r', refOfBoolType, true), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); + const s1 = newStruct(type, {}); - const s1 = newStruct(type, typeDef, {}); - - assert.strictEqual(1, s1.chunks.length); - assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef)); + assert.strictEqual(0, s1.chunks.length); const b = true; const r = ds.writeValue(b); - const s2 = newStruct(type, typeDef, {r: r}); - assert.strictEqual(2, s2.chunks.length); - assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef)); - assert.isTrue(r.equals(s2.chunks[1])); + const s2 = newStruct(type, {r: r}); + assert.strictEqual(1, s2.chunks.length); + assert.isTrue(r.equals(s2.chunks[0])); }); test('chunks union', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const bt = boolType; - const refOfBoolType = makeCompoundType(Kind.Ref, bt); - const typeDef = makeStructType('S1', [], [ + const refOfBoolType = makeRefType(boolType); + const type = makeStructType('S1', [], [ new Field('r', refOfBoolType, false), new Field('s', stringType, false), ]); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const s1 = newStruct(type, typeDef, {s: 'hi'}); - assert.strictEqual(1, s1.chunks.length); - assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef)); + const s1 = newStruct(type, {s: 'hi'}); + assert.strictEqual(0, s1.chunks.length); const b = true; const r = ds.writeValue(b); - const s2 = newStruct(type, typeDef, {r: r}); - assert.strictEqual(2, s2.chunks.length); - assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef)); - assert.isTrue(r.equals(s2.chunks[1])); + const s2 = newStruct(type, {r}); + assert.strictEqual(1, s2.chunks.length); + assert.isTrue(r.equals(s2.chunks[0])); }); test('new', () => { - const typeDef = makeStructType('S2', [ + const type = makeStructType('S2', [ new Field('b', boolType, false), new Field('o', stringType, true), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const s1 = newStruct(type, typeDef, {b: true}); + const s1 = newStruct(type, {b: true}); assert.strictEqual(true, s1.b); assert.strictEqual(s1.o, undefined); - const s2 = newStruct(type, typeDef, {b: false, o: 'hi'}); + const s2 = newStruct(type, {b: false, o: 'hi'}); assert.strictEqual(false, s2.b); assert.strictEqual('hi', s2.o); assert.throws(() => { - newStruct(type, typeDef, {o: 'hi'}); // missing required field + newStruct(type, {o: 'hi'}); // missing required field }); assert.throws(() => { - newStruct(type, typeDef, {x: 'hi'}); // unknown field + newStruct(type, {x: 'hi'}); // unknown field }); - const s3 = newStruct(type, typeDef, {b: true, o: undefined}); + const s3 = newStruct(type, {b: true, o: undefined}); assert.isTrue(s1.equals(s3)); }); test('new union', () => { - const typeDef = makeStructType('S3', [], [ + const type = makeStructType('S3', [], [ new Field('b', boolType, false), new Field('o', stringType, false), ]); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const s1 = newStruct(type, typeDef, {b: true}); + const s1 = newStruct(type, {b: true}); assert.strictEqual(true, s1.b); assert.strictEqual(s1.o, undefined); }); test('struct set', () => { - const typeDef = makeStructType('S3', [ + const type = makeStructType('S3', [ new Field('b', boolType, false), new Field('o', stringType, true), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const s1 = newStruct(type, typeDef, {b: true}); + const s1 = newStruct(type, {b: true}); const s2 = s1.setB(false); // TODO: assert throws on set wrong type @@ -195,17 +152,12 @@ suite('Struct', () => { }); test('struct set union', () => { - const typeDef = makeStructType('S3', [], [ + const type = makeStructType('S3', [], [ new Field('b', boolType, false), new Field('s', stringType, false), ]); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - - const s1 = newStruct(type, typeDef, {b: true}); + const s1 = newStruct(type, {b: true}); const m1 = new StructMirror(s1); assert.strictEqual(0, m1.unionIndex); assert.strictEqual(true, m1.unionValue); @@ -223,50 +175,28 @@ suite('Struct', () => { }); test('type assertion on construct', () => { - const typeDef = makeStructType('S3', [ - new Field('b', boolType, false), - ], []); - - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - assert.throws(() => { - newStruct(type, type, {b: true}); - }); - - assert.throws(() => { - newStruct(typeDef, typeDef, {b: true}); + newStruct(boolType, {b: true}); }); }); test('named union', () => { - - const pkg = new Package([ - makeStructType('StructWithUnions', [ - new Field('a', makeType(emptyRef, 1), false), - new Field('d', makeType(emptyRef, 2), false), - ], []), - makeStructType('', [], [ - new Field('b', numberType, false), - new Field('c', stringType, false), - ]), - makeStructType('', [], [ - new Field('e', numberType, false), - new Field('f', stringType, false), - ]), + const typeA = makeStructType('', [], [ + new Field('b', numberType, false), + new Field('c', stringType, false), + ]); + const typeD = makeStructType('', [], [ + new Field('e', numberType, false), + new Field('f', stringType, false), + ]); + const type = makeStructType('StructWithUnions', [ + new Field('a', typeA, false), + new Field('d', typeD, false), ], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const [typeDef, typeDefA, typeDefD] = pkg.types; - const type = makeType(pkgRef, 0); - const typeA = makeType(pkgRef, 1); - const typeD = makeType(pkgRef, 2); - const StructWithUnions = createStructClass(type, typeDef); - const A = createStructClass(typeA, typeDefA); - const D = createStructClass(typeD, typeDefD); + const StructWithUnions = createStructClass(type); + const A = createStructClass(typeA); + const D = createStructClass(typeD); const s = new StructWithUnions({ a: new A({b: 1}), @@ -291,24 +221,38 @@ suite('Struct', () => { }); test('type validation', () => { - const typeDef = makeStructType('S1', [ + const type = makeStructType('S1', [ new Field('x', boolType, false), new Field('o', stringType, true), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - assert.throws(() => { - newStruct(type, typeDef, {x: 1}); + newStruct(type, {x: 1}); }); assert.throws(() => { - newStruct(type, typeDef, {o: 1}); + newStruct(type, {o: 1}); }); - newStruct(type, typeDef, {x: true, o: undefined}); - newStruct(type, typeDef, {x: true}); + newStruct(type, {x: true, o: undefined}); + newStruct(type, {x: true}); + }); + + test('type validation cyclic', () => { + const type = makeStructType('S', [ + new Field('b', boolType, false), + new Field('o', valueType /* placeholder */, true), + ], []); + invariant(type.desc instanceof StructDesc); + type.desc.fields[1].t = type; + + newStruct(type, {b: true}); + newStruct(type, {b: true, o: newStruct(type, {b: false})}); + + assert.throws(() => { + newStruct(type, {b: 1}); + }); + assert.throws(() => { + newStruct(type, {b: true, o: 1}); + }); }); }); diff --git a/js/src/struct.js b/js/src/struct.js index 3417c55b5a..bd927eace9 100644 --- a/js/src/struct.js +++ b/js/src/struct.js @@ -36,22 +36,18 @@ type StructData = {[key: string]: ?valueOrPrimitive}; export default class Struct extends ValueBase { _data: StructData; _type: Type; - _typeDef: Type; - - constructor(type: Type, typeDef: Type, data: StructData) { + constructor(type: Type, data: StructData) { super(); - invariant(type.kind === Kind.Unresolved); - invariant(typeDef.kind === Kind.Struct); + invariant(type.kind === Kind.Struct); // TODO: Even in dev mode there are paths where the passed in data has already been validated. if (process.env.NODE_ENV !== 'production') { - validate(typeDef, data); + validate(type, data); } this._type = type; - this._typeDef = typeDef; this._data = data; } @@ -62,7 +58,6 @@ export default class Struct extends ValueBase { get chunks(): Array { const mirror = new StructMirror(this); const chunks = []; - chunks.push(...this.type.chunks); const add = field => { if (!field.present) { @@ -83,9 +78,9 @@ export default class Struct extends ValueBase { } } -function validate(typeDef: Type, data: StructData): void { +function validate(type: Type, data: StructData): void { // TODO: Validate field values match field types. - const {desc} = typeDef; + const {desc} = type; invariant(desc instanceof StructDesc); const {fields} = desc; let dataCount = Object.keys(data).length; @@ -159,18 +154,16 @@ type FieldCallback = (f: StructFieldMirror) => void; export class StructMirror { _data: StructData; - _type :Type; - typeDef: Type; + type :Type; constructor(s: Struct) { this._data = s._data; - this._type = s.type; - this.typeDef = s._typeDef; + this.type = s.type; } get desc(): StructDesc { - invariant(this.typeDef.desc instanceof StructDesc); - return this.typeDef.desc; + invariant(this.type.desc instanceof StructDesc); + return this.type.desc; } forEachField(cb: FieldCallback) { @@ -195,7 +188,7 @@ export class StructMirror { } get name(): string { - return this.typeDef.name; + return this.type.name; } get(name: string): ?valueOrPrimitive { @@ -208,7 +201,7 @@ export class StructMirror { set(name: string, value: ?valueOrPrimitive): T { const data = addProperty(this, name, value); - return newStruct(this._type, this.typeDef, data); + return newStruct(this.type, data); } } @@ -218,7 +211,7 @@ function setterName(name) { return `set${name[0].toUpperCase()}${name.slice(1)}`; } -export function createStructClass(type: Type, typeDef: Type): Class { +export function createStructClass(type: Type): Class { const k = type.ref.toString(); if (cache[k]) { return cache[k]; @@ -226,11 +219,11 @@ export function createStructClass(type: Type, typeDef: Type): Class(type: Type, typeDef: Type, data: StructData): T { - const c = createStructClass(type, typeDef); +export function newStruct(type: Type, data: StructData): T { + const c = createStructClass(type); return new c(data); } diff --git a/js/src/type-test.js b/js/src/type-test.js index 5593f63e70..dcda34786d 100644 --- a/js/src/type-test.js +++ b/js/src/type-test.js @@ -1,20 +1,17 @@ // @flow import MemoryStore from './memory-store.js'; -import {default as Ref, emptyRef} from './ref.js'; import {assert} from 'chai'; import { boolType, Field, - makeCompoundType, + makeMapType, + makeSetType, makeStructType, - makeType, numberType, stringType, typeType, } from './type.js'; -import {Kind} from './noms-kind.js'; -import {Package, registerPackage} from './package.js'; import {suite, test} from 'mocha'; import DataStore from './data-store.js'; @@ -23,8 +20,8 @@ suite('Type', () => { const ms = new MemoryStore(); const ds = new DataStore(ms); - const mapType = makeCompoundType(Kind.Map, stringType, numberType); - const setType = makeCompoundType(Kind.Set, stringType); + const mapType = makeMapType(stringType, numberType); + const setType = makeSetType(stringType); const mahType = makeStructType('MahStruct', [ new Field('Field1', stringType, false), new Field('Field2', boolType, true), @@ -34,80 +31,18 @@ suite('Type', () => { new Field('StringField', stringType, false), ]); - const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567'); - const trType = makeType(pkgRef, 42); - const otherRef = ds.writeValue(otherType).targetRef; const mapRef = ds.writeValue(mapType).targetRef; const setRef = ds.writeValue(setType).targetRef; const mahRef = ds.writeValue(mahType).targetRef; - const trRef = ds.writeValue(trType).targetRef; assert.isTrue(otherType.equals(await ds.readValue(otherRef))); assert.isTrue(mapType.equals(await ds.readValue(mapRef))); assert.isTrue(setType.equals(await ds.readValue(setRef))); assert.isTrue(mahType.equals(await ds.readValue(mahRef))); - assert.isTrue(trType.equals(await ds.readValue(trRef))); - }); - - test('typeRef describe', async () => { - const mapType = makeCompoundType(Kind.Map, stringType, numberType); - const setType = makeCompoundType(Kind.Set, stringType); - - assert.strictEqual('Bool', boolType.describe()); - assert.strictEqual('Number', numberType.describe()); - assert.strictEqual('String', stringType.describe()); - assert.strictEqual('Map', mapType.describe()); - assert.strictEqual('Set', setType.describe()); - - const mahType = makeStructType('MahStruct',[ - new Field('Field1', stringType, false), - new Field('Field2', boolType, true), - ], [ - ]); - assert.strictEqual('struct MahStruct {\n Field1: String\n Field2: optional Bool\n}', - mahType.describe()); - - const otherType = makeStructType('MahOtherStruct',[ - new Field('Field1', stringType, false), - new Field('Field2', boolType, true), - ], [ - new Field('NumberField', numberType, false), - new Field('StringField', stringType, false), - ]); - - const exp = `struct MahOtherStruct {\n Field1: String\n Field2: optional Bool\n union {\n NumberField: Number\n StringField: String\n }\n}`; // eslint-disable-line max-len - assert.strictEqual(exp, otherType.describe()); - }); - - test('type with pkgRef', async () => { - const ms = new MemoryStore(); - const ds = new DataStore(ms); - - const pkg = new Package([numberType], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - - const unresolvedType = makeType(pkgRef, 42); - const unresolvedRef = ds.writeValue(unresolvedType).targetRef; - - const v = await ds.readValue(unresolvedRef); - assert.isNotNull(v); - assert.isTrue(pkgRef.equals(v.chunks[0].targetRef)); - const p = await ds.readValue(pkgRef); - assert.isNotNull(p); }); test('type Type', () => { assert.isTrue(boolType.type.equals(typeType)); }); - - test('empty package ref', async () => { - const ms = new MemoryStore(); - const ds = new DataStore(ms); - const v = makeType(emptyRef, -1); - const r = ds.writeValue(v).targetRef; - const v2 = await ds.readValue(r); - assert.isTrue(v.equals(v2)); - }); }); diff --git a/js/src/type.js b/js/src/type.js index b72f04d057..d8a7648a45 100644 --- a/js/src/type.js +++ b/js/src/type.js @@ -1,18 +1,16 @@ // @flow import type Ref from './ref.js'; -import {emptyRef} from './ref.js'; import RefValue from './ref-value.js'; import type {NomsKind} from './noms-kind.js'; import {invariant} from './assert.js'; -import {isPrimitiveKind, Kind, kindToString} from './noms-kind.js'; +import {isPrimitiveKind, Kind} from './noms-kind.js'; import {ValueBase} from './value.js'; -export type TypeDesc = { +export interface TypeDesc { kind: NomsKind; - equals: (other: TypeDesc) => boolean; - describe: () => string; -}; + equals(other: TypeDesc): boolean; +} export class PrimitiveDesc { kind: NomsKind; @@ -24,37 +22,6 @@ export class PrimitiveDesc { equals(other: TypeDesc): boolean { return other instanceof PrimitiveDesc && other.kind === this.kind; } - - describe(): string { - return kindToString(this.kind); - } -} - -export class UnresolvedDesc { - _pkgRef: Ref; - _ordinal: number; - - constructor(pkgRef: Ref, ordinal: number) { - this._pkgRef = pkgRef; - this._ordinal = ordinal; - } - - get kind(): NomsKind { - return Kind.Unresolved; - } - - equals(other: TypeDesc): boolean { - if (other.kind !== this.kind) { - return false; - } - invariant(other instanceof UnresolvedDesc); - - return other._pkgRef.equals(this._pkgRef) && other._ordinal === this._ordinal; - } - - describe(): string { - return `Unresolved(${this._pkgRef.toString()}, ${this._ordinal})`; - } } export class CompoundDesc { @@ -84,18 +51,15 @@ export class CompoundDesc { return false; } - - describe(): string { - const elemsDesc = this.elemTypes.map(e => e.describe()).join(', '); - return `${kindToString(this.kind)}<${elemsDesc}>`; - } } export class StructDesc { + name: string; fields: Array; union: Array; - constructor(fields: Array, union: Array) { + constructor(name: string, fields: Array, union: Array) { + this.name = name; this.fields = fields; this.union = union; } @@ -105,6 +69,10 @@ export class StructDesc { } equals(other: TypeDesc): boolean { + if (this === other) { + return true; + } + if (other.kind !== this.kind) { return false; } @@ -128,24 +96,6 @@ export class StructDesc { return true; } - - describe(): string { - let out = '{\n'; - this.fields.forEach(f => { - const optional = f.optional ? 'optional ' : ''; - out += ` ${f.name}: ${optional}${f.t.describe()}\n`; - }); - - if (this.union.length > 0) { - out += ' union {\n'; - this.union.forEach(f => { - out += ` ${f.name}: ${f.t.describe()}\n`; - }); - out += ' }\n'; - } - - return out + '}'; - } } export class Field { @@ -166,15 +116,11 @@ export class Field { } export class Type extends ValueBase { - _namespace: string; - _name: string; _desc: TypeDesc; _ref: ?Ref; - constructor(name: string = '', namespace: string = '', desc: TypeDesc) { + constructor(desc: TypeDesc) { super(); - this._name = name; - this._namespace = namespace; this._desc = desc; } @@ -183,21 +129,7 @@ export class Type extends ValueBase { } get chunks(): Array { - const chunks = []; - if (this.unresolved) { - if (this.hasPackageRef) { - chunks.push(new RefValue(this.packageRef, packageType)); - } - - return chunks; - } - - const desc = this._desc; - if (desc instanceof CompoundDesc) { - desc.elemTypes.forEach(et => chunks.push(...et.chunks())); - } - - return chunks; + return []; } get kind(): NomsKind { @@ -219,98 +151,23 @@ export class Type extends ValueBase { return this._desc; } - get unresolved(): boolean { - return this._desc instanceof UnresolvedDesc; - } - - get hasPackageRef(): boolean { - return this.unresolved && !this.packageRef.isEmpty(); - } - - get packageRef(): Ref { - invariant(this._desc instanceof UnresolvedDesc); - return this._desc._pkgRef; - } - - get ordinal(): number { - invariant(this._desc instanceof UnresolvedDesc); - return this._desc._ordinal; - } - get name(): string { - return this._name; - } - - get namespace(): string { - return this._namespace; - } - - get namespacedName(): string { - let out = ''; - - if (this._namespace !== '') { - out = this._namespace + '.'; - } - if (this._name !== '') { - out += this._name; - } - - return out; + invariant(this._desc instanceof StructDesc); + return this._desc.name; } get elemTypes(): Array { invariant(this._desc instanceof CompoundDesc); return this._desc.elemTypes; } - - describe(): string { - let out = ''; - switch (this.kind) { - case Kind.Struct: - out += 'struct '; - break; - } - if (this.name) { - invariant(!this.namespace || (this.namespace && this.name)); - if (this.namespace) { - out += this.namespace + '.'; - } - if (this.name) { - out += this.name; - } - out += ' '; - - if (this.unresolved) { - return out; - } - } - - out += this.desc.describe(); - return out; - } } -function buildType(n: string, desc: TypeDesc): Type { - if (isPrimitiveKind(desc.kind)) { - return new Type(n, '', desc); - } - - switch (desc.kind) { - case Kind.List: - case Kind.Ref: - case Kind.Set: - case Kind.Map: - case Kind.Struct: - case Kind.Unresolved: - return new Type(n, '', desc); - - default: - throw new Error('Unrecognized Kind: ' + desc.kind); - } +function buildType(desc: TypeDesc): Type { + return new Type(desc); } function makePrimitiveType(k: NomsKind): Type { - return buildType('', new PrimitiveDesc(k)); + return buildType(new PrimitiveDesc(k)); } export function makeCompoundType(k: NomsKind, ...elemTypes: Array): Type { @@ -322,35 +179,27 @@ export function makeCompoundType(k: NomsKind, ...elemTypes: Array): Type { invariant(elemTypes.length === 2, 'Map requires 2 element types'); } - return buildType('', new CompoundDesc(k, elemTypes)); + return buildType(new CompoundDesc(k, elemTypes)); } export function makeListType(elemType: Type): Type { - return buildType('', new CompoundDesc(Kind.List, [elemType])); + return buildType(new CompoundDesc(Kind.List, [elemType])); } export function makeSetType(elemType: Type): Type { - return buildType('', new CompoundDesc(Kind.Set, [elemType])); + return buildType(new CompoundDesc(Kind.Set, [elemType])); } export function makeMapType(keyType: Type, valueType: Type): Type { - return buildType('', new CompoundDesc(Kind.Map, [keyType, valueType])); + return buildType(new CompoundDesc(Kind.Map, [keyType, valueType])); } export function makeRefType(elemType: Type): Type { - return buildType('', new CompoundDesc(Kind.Ref, [elemType])); + return buildType(new CompoundDesc(Kind.Ref, [elemType])); } export function makeStructType(name: string, fields: Array, choices: Array): Type { - return buildType(name, new StructDesc(fields, choices)); -} - -export function makeType(pkgRef: Ref, ordinal: number): Type { - return new Type('', '', new UnresolvedDesc(pkgRef, ordinal)); -} - -export function makeUnresolvedType(namespace: string, name: string): Type { - return new Type(name, namespace, new UnresolvedDesc(emptyRef, -1)); + return buildType(new StructDesc(name, fields, choices)); } export const boolType = makePrimitiveType(Kind.Bool); @@ -358,15 +207,13 @@ export const numberType = makePrimitiveType(Kind.Number); export const stringType = makePrimitiveType(Kind.String); export const blobType = makePrimitiveType(Kind.Blob); export const typeType = makePrimitiveType(Kind.Type); -export const packageType = makePrimitiveType(Kind.Package); export const valueType = makePrimitiveType(Kind.Value); + export const refOfValueType = makeCompoundType(Kind.Ref, valueType); export const listOfValueType = makeCompoundType(Kind.List, valueType); export const setOfValueType = makeCompoundType(Kind.Set, valueType); export const mapOfValueType = makeCompoundType(Kind.Map, valueType, valueType); -export const packageRefType = makeCompoundType(Kind.Ref, packageType); - /** * Gives the existing primitive Type value for a NomsKind. */ @@ -383,11 +230,44 @@ export function getPrimitiveType(k: NomsKind): Type { return blobType; case Kind.Type: return typeType; - case Kind.Package: - return packageType; case Kind.Value: return valueType; default: invariant(false, 'not reachable'); } } + +export function getTypeOfValue(v: any): Type { + switch (typeof v) { + case 'object': + return v.type; + case 'string': + return stringType; + case 'boolean': + return boolType; + case 'number': + throw new Error('Encoding untagged numbers is not supported'); + default: + throw new Error('Unknown type'); + } +} + +export class BackRefDesc { + value: number; + + constructor(value: number) { + this.value = value; + } + + get kind(): NomsKind { + return Kind.BackRef; + } + + equals(other: TypeDesc): boolean { + return other instanceof BackRefDesc && other.value === this.value; + } + + describe(): string { + return `BackRef<${this.value}>`; + } +} diff --git a/js/src/validate-type-test.js b/js/src/validate-type-test.js index 15fbc92ec3..8f51cda536 100644 --- a/js/src/validate-type-test.js +++ b/js/src/validate-type-test.js @@ -7,7 +7,6 @@ import {newMap} from './map.js'; import {newSet} from './set.js'; import {newStruct} from './struct.js'; import {suite, test} from 'mocha'; -import {Package, registerPackage} from './package.js'; import validateType from './validate-type.js'; import type {Type} from './type.js'; import { @@ -19,10 +18,8 @@ import { makeMapType, makeSetType, makeStructType, - makeType, mapOfValueType, numberType, - packageType, setOfValueType, stringType, typeType, @@ -41,7 +38,6 @@ suite('validate type', () => { stringType, blobType, typeType, - packageType, valueType, ]; @@ -120,24 +116,12 @@ suite('validate type', () => { validateType(valueType, t); }); - test('package', async () => { - const pkg = new Package([], []); - validateType(packageType, pkg); - assertAll(packageType, pkg); - - validateType(valueType, pkg); - }); - test('struct', async () => { - const typeDef = makeStructType('Struct', [ + const type = makeStructType('Struct', [ new Field('x', boolType, false), ], []); - const pkg = new Package([typeDef], []); - registerPackage(pkg); - const pkgRef = pkg.ref; - const type = makeType(pkgRef, 0); - const v = newStruct(type, typeDef, {x: true}); + const v = newStruct(type, {x: true}); validateType(type, v); assertAll(type, v); diff --git a/js/src/validate-type.js b/js/src/validate-type.js index 9e8df4c280..c9f583b801 100644 --- a/js/src/validate-type.js +++ b/js/src/validate-type.js @@ -27,23 +27,17 @@ export default function validateType(t: Type, v: any): void { return; } - case Kind.Unresolved: { - // Struct. - assertSubtype(v, t); - return; - } - + case Kind.Blob: case Kind.List: case Kind.Map: case Kind.Ref: case Kind.Set: - case Kind.Blob: + case Kind.Struct: case Kind.Type: - case Kind.Package: assertSubtype(v, t); return; - case Kind.Struct: + case Kind.BackRef: default: throw new Error('unreachable'); } diff --git a/nomdl/codegen/.gitignore b/nomdl/codegen/.gitignore deleted file mode 100644 index e3a114014b..0000000000 --- a/nomdl/codegen/.gitignore +++ /dev/null @@ -1 +0,0 @@ -codegen diff --git a/nomdl/codegen/README.md b/nomdl/codegen/README.md deleted file mode 100644 index 1b23467433..0000000000 --- a/nomdl/codegen/README.md +++ /dev/null @@ -1,60 +0,0 @@ -To update the codegen there are some subtleties because the code depends on generated code. - -## Build a working version - -First step is to build a binary. - -``` -cd nomdl/codegen/ -go build -``` - -## Change templates - -Not much to say here but you can see the result without breaking things - -``` -./codegen --in=test/struct.noms -``` - -This generates `test.noms.go` in the current directory. Iterate until it looks correct. - -## Change system go files - -There are a few files that are generated that codegen itself depends on. - -1. `types/compound_blob_struct.noms.go` -1. `datas/types.noms.go` - -Both of these can be updated by running `go generate` in their respective directories - -There is also one more file that is generated but it requires manual intervention - -### `types/package_set_of_ref.go` - -This one is generated from `types/package_set_of_ref.noms`. However, it uses the symbol -`Package` to refer to a `types.Package`. Currently we have no convenient way to make this work -out of the box. However, it is pretty straight forward to make it work. - -1. Open `nomdl/pkg/grammar.pg` -2. Find `Number` -3. At that line, add one more builtin type called `Package`. -4. Run `go generate` `in nomdl/pkg` -5. Run `go run ../nomdl/codegen/codegen.go --in=package_set_of_ref.noms` in `types/`. - -Here is the diff: - -```diff ---- a/nomdl/pkg/grammar.peg -+++ b/nomdl/pkg/grammar.peg -@@ -159,7 +159,7 @@ CompoundType <- `List` _ `(` _ t:Type _ `)` _ { - return types.MakeRefType(t.(*types.Type)), nil - } - --PrimitiveType <- p:(`Number` / `Bool` / `String` / `Blob` / `Value` / `Type`) { -+PrimitiveType <- p:(`Number` / `Bool` / `String` / `Blob` / `Value` / `Type` / `Package`) { - return types.MakePrimitiveTypeByString(string(p.([]uint8))), nil - } - ``` - - Once [#577](https://github.com/attic-labs/noms/issues/577) is fixed this will need no manual intervention. diff --git a/nomdl/codegen/code/generate.go b/nomdl/codegen/code/generate.go deleted file mode 100644 index b3bb66ca89..0000000000 --- a/nomdl/codegen/code/generate.go +++ /dev/null @@ -1,491 +0,0 @@ -// Package code provides Generator, which has methods for generating code snippets from a *types.Type. -// Conceptually there are few type spaces here: -// -// - Def - MyStructDef, ListOfBoolDef; convenient Go types for working with data from a given Noms Value. -// - Native - such as string, uint32 -// - Value - the generic types.Value -// - Nom - types.String, types.Uint32, MyStruct, ListOfBool -// - User - User defined structs as well as native primitves. This uses Native when possible or Nom if not. These are to be used in APIs for generated types -- Getters and setters for maps and structs, etc. -package code - -import ( - "fmt" - "reflect" - "strings" - "unicode" - - "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" -) - -// Resolver provides a single method for resolving an unresolved types.Type. -type Resolver interface { - Resolve(t *types.Type, pkg *types.Package) *types.Type -} - -// Generator provides methods for generating code snippets from both resolved and unresolved types.Types. In the latter case, it uses R to resolve the types.Type before generating code. -type Generator struct { - R Resolver - TypesPackage string - ImportedJS map[string]bool - ImportedJSTypes map[string]bool - AliasNames map[ref.Ref]string - Package *types.Package -} - -// DefType returns a string containing the Go type that should be used as the 'Def' for the Noms type described by t. -func (gen *Generator) DefType(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return fmt.Sprintf("%sBlob", gen.TypesPackage) - case types.BoolKind, types.NumberKind, types.StringKind: - return strings.ToLower(kindToString(k)) - case types.ListKind, types.MapKind, types.SetKind, types.StructKind: - return gen.UserName(t) + "Def" - case types.PackageKind: - return fmt.Sprintf("%sPackage", gen.TypesPackage) - case types.RefKind: - return "ref.Ref" - case types.ValueKind: - return fmt.Sprintf("%sValue", gen.TypesPackage) - case types.TypeKind: - return fmt.Sprintf("%sType", gen.TypesPackage) - } - panic("unreachable") -} - -// UserType returns a string containing the Go type that should be used when the Noms type described by t needs to be returned by a generated getter or taken as a parameter to a generated setter. -func (gen *Generator) UserType(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return fmt.Sprintf("%sBlob", gen.TypesPackage) - case types.BoolKind, types.NumberKind, types.StringKind: - return strings.ToLower(kindToString(k)) - case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind: - return gen.UserName(t) - case types.PackageKind: - return fmt.Sprintf("%sPackage", gen.TypesPackage) - case types.ValueKind: - return fmt.Sprintf("%sValue", gen.TypesPackage) - case types.TypeKind: - return fmt.Sprintf("%sType", gen.TypesPackage) - } - panic("unreachable") -} - -// UserTypeJS returns a string containing the JS type that should be used when the Noms type described by t needs to be returned by a generated getter or taken as a parameter to a generated setter. -func (gen *Generator) UserTypeJS(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return gen.ImportJSType("Blob") - case types.BoolKind: - return "boolean" - case types.StringKind: - return "string" - case types.NumberKind: - return gen.ImportJSType(strings.ToLower(kindToString(k))) - case types.StructKind: - if t.HasPackageRef() && gen.Package.Ref() != t.PackageRef() { - return gen.importedUserNameJS(t) - } - return gen.UserName(t) - case types.ListKind: - return fmt.Sprintf("%s<%s>", gen.ImportJSType("NomsList"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.SetKind: - return fmt.Sprintf("%s<%s>", gen.ImportJSType("NomsSet"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.RefKind: - return fmt.Sprintf("%s<%s>", gen.ImportJSType("RefValue"), gen.UserTypeJS(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.MapKind: - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - return fmt.Sprintf("%s<%s, %s>", gen.ImportJSType("NomsMap"), gen.UserTypeJS(elemTypes[0]), gen.UserTypeJS(elemTypes[1])) - case types.PackageKind: - return gen.ImportJSType("Package") - case types.ValueKind: - return gen.ImportJSType("Value") - case types.TypeKind: - return gen.ImportJSType("Type") - } - panic("unreachable") -} - -// DefToValue returns a string containing Go code to convert an instance of a Def type (named val) to a Noms types.Value of the type described by t. -func (gen *Generator) DefToValue(val string, t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - switch rt.Kind() { - case types.BlobKind, types.PackageKind, types.ValueKind, types.TypeKind: - return val // No special Def representation - case types.BoolKind, types.NumberKind, types.StringKind: - return gen.NativeToValue(val, rt) - case types.ListKind, types.MapKind, types.SetKind, types.StructKind: - return fmt.Sprintf("%s.New()", val) - case types.RefKind: - return fmt.Sprintf("New%s(%s)", gen.UserName(rt), val) - } - panic("unreachable") -} - -// DefToUser returns a string containing Go code to convert an instance of a Def type (named val) to a User type described by t. -func (gen *Generator) DefToUser(val string, t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - switch rt.Kind() { - case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.TypeKind, types.ValueKind: - return val - case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind: - return gen.DefToValue(val, rt) - } - panic("unreachable") -} - -// MayHaveChunks returns whether the type (t) may contain more chunks. -func (gen *Generator) MayHaveChunks(t *types.Type) bool { - rt := gen.R.Resolve(t, gen.Package) - switch rt.Kind() { - case types.BlobKind, types.ListKind, types.MapKind, types.PackageKind, types.RefKind, types.SetKind, types.StructKind, types.TypeKind, types.ValueKind: - return true - case types.BoolKind, types.NumberKind, types.StringKind: - return false - } - panic("unreachable") -} - -// ValueToDef returns a string containing Go code to convert an instance of a types.Value (val) into the Def type appropriate for t. -func (gen *Generator) ValueToDef(val string, t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - switch rt.Kind() { - case types.BlobKind, types.PackageKind, types.TypeKind: - return gen.ValueToUser(val, rt) // No special Def representation - case types.BoolKind, types.NumberKind, types.StringKind: - return gen.ValueToNative(val, rt) - case types.ListKind, types.MapKind, types.SetKind, types.StructKind: - return fmt.Sprintf("%s.Def()", gen.ValueToUser(val, t)) - case types.RefKind: - return fmt.Sprintf("%s.TargetRef()", gen.ValueToUser(val, t)) - case types.ValueKind: - return val // Value is already a Value - } - panic("unreachable") -} - -// UserToDef returns a string containing Go code to convert an User value (val) into the Def type appropriate for t. -func (gen *Generator) UserToDef(val string, t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - switch rt.Kind() { - case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.TypeKind, types.ValueKind: - return val - case types.ListKind, types.MapKind, types.SetKind, types.StructKind: - return fmt.Sprintf("%s.Def()", val) - case types.RefKind: - return fmt.Sprintf("%s.TargetRef()", val) - } - panic("unreachable") -} - -// NativeToValue returns a string containing Go code to convert an instance of a native type (named val) to a Noms types.Value of the type described by t. -func (gen *Generator) NativeToValue(val string, t *types.Type) string { - t = gen.R.Resolve(t, gen.Package) - k := t.Kind() - switch k { - case types.BoolKind, types.NumberKind: - return fmt.Sprintf("%s%s(%s)", gen.TypesPackage, kindToString(k), val) - case types.StringKind: - return fmt.Sprintf("%sNewString(%s)", gen.TypesPackage, val) - } - panic("unreachable") -} - -// ValueToNative returns a string containing Go code to convert an instance of a types.Value (val) into the native type appropriate for t. -func (gen *Generator) ValueToNative(val string, t *types.Type) string { - k := t.Kind() - switch k { - case types.BoolKind, types.NumberKind: - n := kindToString(k) - return fmt.Sprintf("%s(%s.(%s%s))", strings.ToLower(n), val, gen.TypesPackage, n) - case types.StringKind: - return fmt.Sprintf("%s.(%sString).String()", val, gen.TypesPackage) - } - panic("unreachable") -} - -// UserToValue returns a string containing Go code to convert an instance of a User type (named val) to a Noms types.Value of the type described by t. For Go primitive types, this will use NativeToValue(). For other types, their UserType is a Noms types.Value (or a wrapper around one), so this is more-or-less a pass-through. -func (gen *Generator) UserToValue(val string, t *types.Type) string { - t = gen.R.Resolve(t, gen.Package) - k := t.Kind() - switch k { - case types.BlobKind, types.ListKind, types.MapKind, types.PackageKind, types.RefKind, types.SetKind, types.StructKind, types.TypeKind, types.ValueKind: - return val - case types.BoolKind, types.NumberKind, types.StringKind: - return gen.NativeToValue(val, t) - } - panic("unreachable") -} - -// ValueToUser returns a string containing Go code to convert an instance of a types.Value (val) into the User type appropriate for t. For Go primitives, this will use ValueToNative(). -func (gen *Generator) ValueToUser(val string, t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return fmt.Sprintf("%s.(%sBlob)", val, gen.TypesPackage) - case types.BoolKind, types.NumberKind, types.StringKind: - return gen.ValueToNative(val, rt) - case types.ListKind, types.MapKind, types.RefKind, types.SetKind, types.StructKind: - return fmt.Sprintf("%s.(%s)", val, gen.UserName(t)) - case types.PackageKind: - return fmt.Sprintf("%s.(%sPackage)", val, gen.TypesPackage) - case types.ValueKind: - return val - case types.TypeKind: - return fmt.Sprintf("%s.(%sType)", val, gen.TypesPackage) - } - panic("unreachable") -} - -// UserZero returns a string containing Go code to create an uninitialized instance of the User type appropriate for t. -func (gen *Generator) UserZero(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return fmt.Sprintf("%sNewEmptyBlob()", gen.TypesPackage) - case types.BoolKind: - return "false" - case types.NumberKind: - return fmt.Sprintf("%s(0)", strings.ToLower(kindToString(k))) - case types.ListKind, types.MapKind, types.SetKind, types.StructKind: - return fmt.Sprintf("New%s()", gen.UserName(rt)) - case types.PackageKind: - return fmt.Sprintf("New%s()", gen.UserName(rt)) - case types.RefKind: - return fmt.Sprintf("New%s(ref.Ref{})", gen.UserName(rt)) - case types.StringKind: - return `""` - case types.ValueKind: - // TODO: This is where a null Value would have been useful. - return fmt.Sprintf("%sBool(false)", gen.TypesPackage) - case types.TypeKind: - return fmt.Sprintf("%sType{R: ref.Ref{}}", gen.TypesPackage) - } - panic("unreachable") -} - -// ValueZero returns a string containing Go code to create an uninitialized instance of the Noms types.Value appropriate for t. -func (gen *Generator) ValueZero(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind: - return fmt.Sprintf("%sNewEmptyBlob()", gen.TypesPackage) - case types.BoolKind: - return fmt.Sprintf("%sBool(false)", gen.TypesPackage) - case types.NumberKind: - return fmt.Sprintf("%s%s(0)", gen.TypesPackage, kindToString(k)) - case types.ListKind, types.MapKind, types.RefKind, types.SetKind: - return gen.UserZero(t) - case types.PackageKind: - return fmt.Sprintf("%sNewPackage()", gen.TypesPackage) - case types.StringKind: - return fmt.Sprintf(`%sNewString("")`, gen.TypesPackage) - case types.StructKind: - return fmt.Sprintf("New%s()", gen.UserName(rt)) - case types.ValueKind: - // TODO: Use nil here - return fmt.Sprintf("%sBool(false)", gen.TypesPackage) - case types.TypeKind: - return fmt.Sprintf("%sType{R: ref.Ref{}}", gen.TypesPackage) - } - panic("unreachable") -} - -// UserName returns the name of the User type appropriate for t, taking into account Noms types imported from other packages. -func (gen *Generator) UserName(t *types.Type) string { - rt := gen.R.Resolve(t, gen.Package) - k := rt.Kind() - switch k { - case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.ValueKind, types.TypeKind: - return kindToString(k) - case types.ListKind: - return fmt.Sprintf("ListOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.MapKind: - elemTypes := rt.Desc.(types.CompoundDesc).ElemTypes - return fmt.Sprintf("MapOf%sTo%s", gen.refToID(elemTypes[0]), gen.refToID(elemTypes[1])) - case types.RefKind: - return fmt.Sprintf("RefOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.SetKind: - return fmt.Sprintf("SetOf%s", gen.refToID(rt.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.StructKind: - // We get an empty name when we have a struct that is used as union - if rt.Name() == "" { - choices := rt.Desc.(types.StructDesc).Union - s := "__unionOf" - for i, f := range choices { - if i > 0 { - s += "And" - } - s += strings.Title(f.Name) + "Of" + gen.refToID(f.T) - } - return s - } - return rt.Name() - } - panic("unreachable") -} - -func (gen Generator) importedUserNameJS(t *types.Type) string { - d.Chk.True(t.HasPackageRef()) - return fmt.Sprintf("%s.%s", gen.RefToAliasName(t.PackageRef()), gen.UserName(t)) -} - -func (gen *Generator) refToID(t *types.Type) string { - if !t.IsUnresolved() || !t.HasPackageRef() { - return gen.UserName(t) - } - return gen.UserName(gen.R.Resolve(t, gen.Package)) -} - -// RefToJSIdentfierName generates an identifier name representing a Ref. ie. `sha1_abc1234`. -func (gen *Generator) RefToJSIdentfierName(r ref.Ref) string { - return strings.Replace(r.String(), "-", "_", 1)[0:12] -} - -// RefToAliasName is used to map the ref of an import to the alias name used in the noms file -func (gen *Generator) RefToAliasName(r ref.Ref) string { - // When we generate code from a Package stored in a DataStore we do not have the alias names. - if n, ok := gen.AliasNames[r]; ok { - return n - } - return fmt.Sprintf("_%s", gen.RefToJSIdentfierName(r)) -} - -// ToTypesType returns a string containing Go code that instantiates a *types.Type instance equivalent to t. -func (gen *Generator) ToTypesType(t *types.Type, inPackageDef bool) string { - if t.IsUnresolved() { - d.Chk.True(t.HasPackageRef()) - d.Chk.True(t.HasOrdinal(), "%s does not have an ordinal set", t.Name()) - if t.PackageRef() == gen.Package.Ref() && inPackageDef { - return fmt.Sprintf(`%sMakeType(ref.Ref{}, %d)`, gen.TypesPackage, t.Ordinal()) - } - return fmt.Sprintf(`%sMakeType(ref.Parse("%s"), %d)`, gen.TypesPackage, t.PackageRef().String(), t.Ordinal()) - } - - if types.IsPrimitiveKind(t.Kind()) { - return fmt.Sprintf("%sMakePrimitiveType(%s%sKind)", gen.TypesPackage, gen.TypesPackage, kindToString(t.Kind())) - } - - switch desc := t.Desc.(type) { - case types.CompoundDesc: - types := make([]string, len(desc.ElemTypes)) - for i, t := range desc.ElemTypes { - types[i] = gen.ToTypesType(t, inPackageDef) - } - return fmt.Sprintf(`%sMakeCompoundType(%s%sKind, %s)`, gen.TypesPackage, gen.TypesPackage, kindToString(t.Kind()), strings.Join(types, ", ")) - case types.StructDesc: - flatten := func(f []types.Field) string { - out := make([]string, 0, len(f)) - for _, field := range f { - out = append(out, fmt.Sprintf(`%sField{"%s", %s, %t},`, gen.TypesPackage, field.Name, gen.ToTypesType(field.T, inPackageDef), field.Optional)) - } - return strings.Join(out, "\n") - } - fields := fmt.Sprintf("[]%sField{\n%s\n}", gen.TypesPackage, flatten(desc.Fields)) - choices := fmt.Sprintf("%sChoices{\n%s\n}", gen.TypesPackage, flatten(desc.Union)) - return fmt.Sprintf("%sMakeStructType(\"%s\",\n%s,\n%s,\n)", gen.TypesPackage, t.Name(), fields, choices) - default: - d.Chk.Fail("Unknown TypeDesc.", "%#v (%T)", desc, desc) - } - panic("Unreachable") -} - -func ind(i int) string { - return strings.Repeat(" ", i) -} - -func firstToLower(s string) string { - b := []rune(s) - b[0] = unicode.ToLower(b[0]) - return string(b) -} - -// ToTypeValueJS returns a string containing JS code that instantiates a Type instance equivalent to t for JavaScript. -func (gen *Generator) ToTypeValueJS(t *types.Type, inPackageDef bool, indent int) string { - if t.IsUnresolved() { - d.Chk.True(t.HasPackageRef()) - d.Chk.True(t.HasOrdinal(), "%s does not have an ordinal set", t.Name()) - if t.PackageRef() == gen.Package.Ref() { - if inPackageDef { - return fmt.Sprintf(`%s(%s, %d)`, gen.ImportJS("makeType"), gen.ImportJS("emptyRef"), t.Ordinal()) - } else { - return fmt.Sprintf(`%s(_pkg.ref, %d)`, gen.ImportJS("makeType"), t.Ordinal()) - } - } - return fmt.Sprintf(`%s(%s.parse('%s'), %d)`, gen.ImportJS("makeType"), gen.ImportJS("Ref"), t.PackageRef().String(), t.Ordinal()) - } - - if types.IsPrimitiveKind(t.Kind()) { - return gen.ImportJS(firstToLower(kindToString(t.Kind())) + "Type") - } - - switch desc := t.Desc.(type) { - case types.CompoundDesc: - types := make([]string, len(desc.ElemTypes)) - for i, t := range desc.ElemTypes { - types[i] = gen.ToTypeValueJS(t, inPackageDef, 0) - } - return fmt.Sprintf(`%s(%s.%s, %s)`, gen.ImportJS("makeCompoundType"), gen.ImportJS("Kind"), kindToString(t.Kind()), strings.Join(types, ", ")) - case types.StructDesc: - flatten := func(f []types.Field) string { - out := make([]string, 0, len(f)) - for _, field := range f { - out = append(out, fmt.Sprintf(`%snew %s('%s', %s, %t),`, ind(indent+1), gen.ImportJS("Field"), field.Name, gen.ToTypeValueJS(field.T, inPackageDef, 0), field.Optional)) - } - return strings.Join(out, "\n") - } - fields := fmt.Sprintf("%s[\n%s\n%s]", ind(indent), flatten(desc.Fields), ind(indent)) - choices := fmt.Sprintf("%s[\n%s\n%s]", ind(indent), flatten(desc.Union), ind(indent)) - return fmt.Sprintf("%s('%s',\n%s,\n%s\n%s)", gen.ImportJS("makeStructType"), t.Name(), fields, choices, ind(indent-1)) - default: - d.Chk.Fail("Unknown TypeDesc.", "%#v (%T)", desc, desc) - } - panic("Unreachable") -} - -// IsLast determines if |index| is the last index in |seq|. -func (gen *Generator) IsLast(index int, seq interface{}) bool { - return reflect.ValueOf(seq).Len() == index+1 -} - -// ToTag replaces "-" characters in s with "_", so it can be used in a Go identifier. -// TODO: replace other illegal chars as well? -func ToTag(r ref.Ref) string { - return strings.Replace(r.String()[0:12], "-", "_", -1) -} - -func kindToString(k types.NomsKind) (out string) { - out = types.KindToString[k] - d.Chk.NotEmpty(out, "Unknown NomsKind %d", k) - return -} - -// ImportJS returns the name of the imported binding as well as registers the binding as imported so that we can later generate the right import declaration. -func (gen *Generator) ImportJS(name string) string { - if gen.ImportedJS == nil { - gen.ImportedJS = map[string]bool{} - } - gen.ImportedJS[name] = true - return fmt.Sprintf("_%s", name) -} - -// ImportJSType returns the name of the imported type as well as registers the type as imported so that we can later generate the right import type declaration. -func (gen *Generator) ImportJSType(name string) string { - if gen.ImportedJSTypes == nil { - gen.ImportedJSTypes = map[string]bool{} - } - gen.ImportedJSTypes[name] = true - return fmt.Sprintf("_%s", name) -} diff --git a/nomdl/codegen/code/generate_test.go b/nomdl/codegen/code/generate_test.go deleted file mode 100644 index 58171b4445..0000000000 --- a/nomdl/codegen/code/generate_test.go +++ /dev/null @@ -1,52 +0,0 @@ -package code - -import ( - "testing" - - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" - "github.com/stretchr/testify/assert" -) - -type testResolver struct { - assert *assert.Assertions - deps map[ref.Ref]types.Package -} - -func (res *testResolver) Resolve(t *types.Type, pkg *types.Package) *types.Type { - if !t.IsUnresolved() { - return t - } - - if !t.HasPackageRef() { - res.assert.Fail("Test does not handle local references") - } - - if t.PackageRef() == pkg.Ref() { - return pkg.Types()[t.Ordinal()] - } - - dep, ok := res.deps[t.PackageRef()] - res.assert.True(ok, "Package %s is referenced in %+v, but is not a dependency.", t.PackageRef().String(), t) - return dep.Types()[t.Ordinal()] -} - -func TestUserName(t *testing.T) { - assert := assert.New(t) - - imported := types.NewPackage([]*types.Type{ - types.MakeStructType("S1", []types.Field{ - types.Field{"f", types.BoolType, false}, - }, []types.Field{}), - }, []ref.Ref{}) - - res := testResolver{assert, map[ref.Ref]types.Package{imported.Ref(): imported}} - - localStructName := "Local" - resolved := types.MakeStructType(localStructName, []types.Field{ - types.Field{"a", types.NumberType, false}, - }, []types.Field{}) - - g := Generator{R: &res, Package: &imported} - assert.Equal(localStructName, g.UserName(resolved)) -} diff --git a/nomdl/codegen/codegen.go b/nomdl/codegen/codegen.go deleted file mode 100644 index 24864c998a..0000000000 --- a/nomdl/codegen/codegen.go +++ /dev/null @@ -1,495 +0,0 @@ -package main - -import ( - "bytes" - "flag" - "io" - "log" - "os" - "path" - "path/filepath" - "regexp" - "runtime" - "sort" - "strings" - "text/template" - - "github.com/attic-labs/noms/chunks" - "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/datas" - "github.com/attic-labs/noms/dataset" - "github.com/attic-labs/noms/nomdl/codegen/code" - "github.com/attic-labs/noms/nomdl/pkg" - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" -) - -var ( - outDirFlag = flag.String("out-dir", ".", "Directory where generated code will be written") - inFlag = flag.String("in", "", "The name of the noms file to read") - pkgDSFlag = flag.String("package-ds", "", "The dataset to read/write packages from/to.") - packageFlag = flag.String("package", "", "The name of the go package to write") - - idRegexp = regexp.MustCompile(`[_\pL][_\pL\pN]*`) - illegalRune = regexp.MustCompile(`[^_\pL\pN]`) -) - -const ext = ".noms" - -type refSet map[ref.Ref]bool - -func main() { - flags := datas.NewFlags() - flag.Parse() - - ds, ok := flags.CreateDataStore() - if !ok { - ds = datas.NewDataStore(chunks.NewMemoryStore()) - } - defer ds.Close() - - if *pkgDSFlag != "" { - if !ok { - log.Print("Package dataset provided, but DataStore could not be opened.") - flag.Usage() - return - } - } else { - log.Print("No package dataset provided; will be unable to process imports.") - *pkgDSFlag = "default" - } - - pkgDS := dataset.NewDataset(ds, *pkgDSFlag) - // Ensure that, if pkgDS has stuff in it, its head is a SetOfRefOfPackage. - if h, ok := pkgDS.MaybeHead(); ok { - d.Chk.IsType(types.NewSetOfRefOfPackage(), h.Get(datas.ValueField)) - } - - localPkgs := refSet{} - outDir, err := filepath.Abs(*outDirFlag) - d.Chk.NoError(err, "Could not canonicalize -out-dir: %v", err) - packageName := "" - - if *inFlag != "" { - out := getOutFileName(filepath.Base(*inFlag)) - p := parsePackageFile(packageName, *inFlag, pkgDS) - localPkgs[p.Ref()] = true - generate(packageName, *inFlag, filepath.Join(outDir, out), outDir, map[string]bool{}, p, localPkgs, pkgDS) - return - } - - // Generate code from all .noms file in the current directory - nomsFiles, err := filepath.Glob("*" + ext) - d.Chk.NoError(err) - - written := map[string]bool{} - packages := map[string]pkg.Parsed{} - for _, inFile := range nomsFiles { - p := parsePackageFile(packageName, inFile, pkgDS) - localPkgs[p.Ref()] = true - packages[inFile] = p - } - // Sort to have deterministic output. - keys := make([]string, 0, len(packages)) - sort.Strings(keys) - for inFile := range packages { - keys = append(keys, inFile) - } - for _, inFile := range keys { - p := packages[inFile] - pkgDS = generate(packageName, inFile, filepath.Join(outDir, getOutFileName(inFile)), outDir, written, p, localPkgs, pkgDS) - } -} - -func parsePackageFile(packageName string, in string, pkgDS dataset.Dataset) pkg.Parsed { - inFile, err := os.Open(in) - d.Chk.NoError(err) - defer inFile.Close() - - return pkg.ParseNomDL(packageName, inFile, filepath.Dir(in), pkgDS.Store()) -} - -func generate(packageName, in, out, outDir string, written map[string]bool, parsed pkg.Parsed, localPkgs refSet, pkgDS dataset.Dataset) dataset.Dataset { - // Generate code for all p's deps first. - deps := generateDepCode(packageName, outDir, written, parsed.Package, localPkgs, pkgDS.Store()) - generateAndEmit(getBareFileName(in), out, written, deps, parsed) - - // Since we're just building up a set of refs to all the packages in pkgDS, simply retrying is the logical response to commit failure. - err := datas.ErrOptimisticLockFailed - for ; err == datas.ErrOptimisticLockFailed; pkgDS, err = pkgDS.Commit(buildSetOfRefOfPackage(parsed, deps, pkgDS)) { - } - return pkgDS -} - -type depsMap map[ref.Ref]types.Package - -func generateDepCode(packageName, outDir string, written map[string]bool, p types.Package, localPkgs refSet, vr types.ValueReader) depsMap { - deps := depsMap{} - for _, r := range p.Dependencies() { - p := vr.ReadValue(r).(types.Package) - pDeps := generateDepCode(packageName, outDir, written, p, localPkgs, vr) - tag := code.ToTag(p.Ref()) - parsed := pkg.Parsed{Package: p, Name: packageName} - if !localPkgs[parsed.Ref()] { - generateAndEmit(tag, filepath.Join(outDir, tag+".js"), written, pDeps, parsed) - localPkgs[parsed.Ref()] = true - } - for depRef, dep := range pDeps { - deps[depRef] = dep - } - deps[r] = p - } - return deps -} - -func generateAndEmit(tag, out string, written map[string]bool, deps depsMap, p pkg.Parsed) { - var buf bytes.Buffer - gen := newCodeGen(&buf, tag, written, deps, p) - gen.WritePackage() - - d.Chk.NoError(os.MkdirAll(filepath.Dir(out), 0700)) - - outFile, err := os.OpenFile(out, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600) - d.Chk.NoError(err) - defer outFile.Close() - - io.Copy(outFile, &buf) -} - -func buildSetOfRefOfPackage(pkg pkg.Parsed, deps depsMap, ds dataset.Dataset) types.Set { - // Can do better once generated collections implement types.Value. - s := types.NewSetOfRefOfPackage() - if h, ok := ds.MaybeHead(); ok { - s = h.Get(datas.ValueField).(types.Set) - } - for _, dep := range deps { - // Writing the deps into ds should be redundant at this point, but do it to be sure. - // TODO: consider moving all dataset work over into nomdl/pkg BUG 409 - s = s.Insert(ds.Store().WriteValue(dep)) - } - r := ds.Store().WriteValue(pkg.Package) - return s.Insert(r) -} - -func getOutFileName(in string) string { - return in[:len(in)-len(ext)] + ".noms.js" -} - -func getBareFileName(in string) string { - base := filepath.Base(in) - return base[:len(base)-len(filepath.Ext(base))] -} - -type codeGen struct { - w io.Writer - pkg pkg.Parsed - deps depsMap - written map[string]bool - toWrite []*types.Type - generator *code.Generator - templates *template.Template -} - -func newCodeGen(w io.Writer, fileID string, written map[string]bool, deps depsMap, pkg pkg.Parsed) *codeGen { - gen := &codeGen{w, pkg, deps, written, []*types.Type{}, nil, nil} - gen.generator = &code.Generator{ - R: gen, - AliasNames: pkg.AliasNames, - Package: &pkg.Package, - } - gen.templates = gen.readTemplates() - return gen -} - -func (gen *codeGen) readTemplates() *template.Template { - _, thisfile, _, _ := runtime.Caller(1) - glob := path.Join(path.Dir(thisfile), "js", "*.tmpl") - return template.Must(template.New("").Funcs( - template.FuncMap{ - "defToUser": gen.generator.DefToUser, - "defToValue": gen.generator.DefToValue, - "defType": gen.generator.DefType, - "importJS": gen.generator.ImportJS, - "importJsType": gen.generator.ImportJSType, - "isLast": gen.generator.IsLast, - "mayHaveChunks": gen.generator.MayHaveChunks, - "refToAliasName": gen.generator.RefToAliasName, - "refToJSIdentfierName": gen.generator.RefToJSIdentfierName, - "title": strings.Title, - "toTypesType": gen.generator.ToTypesType, - "toTypeValueJS": gen.generator.ToTypeValueJS, - "userToDef": gen.generator.UserToDef, - "userToValue": gen.generator.UserToValue, - "userType": gen.generator.UserType, - "userTypeJS": gen.generator.UserTypeJS, - "userZero": gen.generator.UserZero, - "valueToDef": gen.generator.ValueToDef, - "valueToUser": gen.generator.ValueToUser, - "valueZero": gen.generator.ValueZero, - }).ParseGlob(glob)) -} - -func (gen *codeGen) Resolve(t *types.Type, pkg *types.Package) *types.Type { - if !t.IsUnresolved() { - return t - } - if !t.HasPackageRef() { - return gen.pkg.Types()[t.Ordinal()] - } - - if t.PackageRef() == pkg.Ref() { - return pkg.Types()[t.Ordinal()] - } - - dep, ok := gen.deps[t.PackageRef()] - d.Chk.True(ok, "Package %s is referenced in %+v, but is not a dependency.", t.PackageRef().String(), t) - return dep.Types()[t.Ordinal()] -} - -func (gen *codeGen) WritePackage() { - pkgTypes := gen.pkg.Types() - data := struct { - PackageRef ref.Ref - HasTypes bool - Dependencies []ref.Ref - Name string - Types []*types.Type - }{ - gen.pkg.Package.Ref(), - len(pkgTypes) > 0, - gen.pkg.Dependencies(), - gen.pkg.Name, - pkgTypes, - } - - // In JS we want to write the imports at the top of the file but we do not know what we need to import until we have written everything. We therefore write to a buffer and when everything is done we can write the imports and write the buffer into the writer. - var buf bytes.Buffer - w := gen.w - - gen.w = &buf - - err := gen.templates.ExecuteTemplate(gen.w, "package.tmpl", data) - d.Exp.NoError(err) - - for i, t := range pkgTypes { - gen.writeTopLevel(t, i) - } - - for _, t := range gen.pkg.UsingDeclarations { - gen.write(t) - } - - for len(gen.toWrite) > 0 { - t := gen.toWrite[0] - gen.toWrite = gen.toWrite[1:] - gen.write(t) - } - - gen.w = w - gen.WriteHeader() - io.Copy(w, &buf) -} - -func (gen *codeGen) WriteHeader() { - importedJS := make([]string, 0, len(gen.generator.ImportedJS)) - importedJSTypes := make([]string, 0, len(gen.generator.ImportedJSTypes)) - for name := range gen.generator.ImportedJS { - importedJS = append(importedJS, name) - } - for name := range gen.generator.ImportedJSTypes { - if _, ok := gen.generator.ImportedJS[name]; !ok { - importedJSTypes = append(importedJSTypes, name) - } - } - sort.Strings(importedJS) - sort.Strings(importedJSTypes) - - pkgTypes := gen.pkg.Types() - data := struct { - PackageRef ref.Ref - HasTypes bool - Dependencies []ref.Ref - Name string - Types []*types.Type - ImportedJS []string - ImportedJSTypes []string - AliasNames map[ref.Ref]string - }{ - gen.pkg.Package.Ref(), - len(pkgTypes) > 0, - gen.pkg.Dependencies(), - gen.pkg.Name, - pkgTypes, - importedJS, - importedJSTypes, - gen.pkg.AliasNames, - } - - err := gen.templates.ExecuteTemplate(gen.w, "header.tmpl", data) - d.Exp.NoError(err) -} - -func (gen *codeGen) shouldBeWritten(t *types.Type) bool { - if t.IsUnresolved() { - return false - } - if t.Kind() == types.StructKind { - name := gen.generator.UserName(t) - d.Chk.False(gen.written[name], "Multiple definitions of type named %s", name) - return true - } - return !gen.written[gen.generator.UserName(t)] -} - -func (gen *codeGen) writeTopLevel(t *types.Type, ordinal int) { - switch t.Kind() { - case types.StructKind: - gen.writeStruct(t, ordinal) - default: - gen.write(t) - } -} - -// write generates the code for the given type. -func (gen *codeGen) write(t *types.Type) { - if !gen.shouldBeWritten(t) { - return - } - k := t.Kind() - switch k { - case types.BlobKind, types.BoolKind, types.NumberKind, types.PackageKind, types.StringKind, types.ValueKind, types.TypeKind: - return - case types.ListKind: - gen.writeList(t) - case types.MapKind: - gen.writeMap(t) - case types.RefKind: - gen.writeRef(t) - case types.SetKind: - gen.writeSet(t) - default: - panic("unreachable") - } -} - -func (gen *codeGen) writeLater(t *types.Type) { - if !gen.shouldBeWritten(t) { - return - } - gen.toWrite = append(gen.toWrite, t) -} - -func (gen *codeGen) writeTemplate(tmpl string, t *types.Type, data interface{}) { - err := gen.templates.ExecuteTemplate(gen.w, tmpl, data) - d.Exp.NoError(err) - gen.written[gen.generator.UserName(t)] = true -} - -func (gen *codeGen) writeStruct(t *types.Type, ordinal int) { - d.Chk.True(ordinal >= 0) - desc := t.Desc.(types.StructDesc) - data := struct { - PackageRef ref.Ref - Name string - Type *types.Type - Ordinal int - Fields []types.Field - Choices []types.Field - HasUnion bool - UnionZeroType *types.Type - }{ - gen.pkg.Package.Ref(), - gen.generator.UserName(t), - t, - ordinal, - desc.Fields, - nil, - len(desc.Union) != 0, - types.NumberType, - } - - if data.HasUnion { - data.Choices = desc.Union - data.UnionZeroType = data.Choices[0].T - } - gen.writeTemplate("struct.tmpl", t, data) - for _, f := range desc.Fields { - gen.writeLater(f.T) - } - if data.HasUnion { - for _, f := range desc.Union { - gen.writeLater(f.T) - } - } -} - -func (gen *codeGen) writeList(t *types.Type) { - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - data := struct { - PackageRef ref.Ref - Name string - Type *types.Type - ElemType *types.Type - }{ - gen.pkg.Package.Ref(), - gen.generator.UserName(t), - t, - elemTypes[0], - } - gen.writeTemplate("list.tmpl", t, data) - gen.writeLater(elemTypes[0]) -} - -func (gen *codeGen) writeMap(t *types.Type) { - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - data := struct { - PackageRef ref.Ref - Name string - Type *types.Type - KeyType *types.Type - ValueType *types.Type - }{ - gen.pkg.Package.Ref(), - gen.generator.UserName(t), - t, - elemTypes[0], - elemTypes[1], - } - gen.writeTemplate("map.tmpl", t, data) - gen.writeLater(elemTypes[0]) - gen.writeLater(elemTypes[1]) -} - -func (gen *codeGen) writeRef(t *types.Type) { - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - data := struct { - PackageRef ref.Ref - Name string - Type *types.Type - ElemType *types.Type - }{ - gen.pkg.Package.Ref(), - gen.generator.UserName(t), - t, - elemTypes[0], - } - gen.writeTemplate("ref.tmpl", t, data) - gen.writeLater(elemTypes[0]) -} - -func (gen *codeGen) writeSet(t *types.Type) { - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - data := struct { - PackageRef ref.Ref - Name string - Type *types.Type - ElemType *types.Type - }{ - gen.pkg.Package.Ref(), - gen.generator.UserName(t), - t, - elemTypes[0], - } - gen.writeTemplate("set.tmpl", t, data) - gen.writeLater(elemTypes[0]) -} diff --git a/nomdl/codegen/codegen_test.go b/nomdl/codegen/codegen_test.go deleted file mode 100644 index 5ae76c3af3..0000000000 --- a/nomdl/codegen/codegen_test.go +++ /dev/null @@ -1,118 +0,0 @@ -package main - -import ( - "bytes" - "io/ioutil" - "os" - "path/filepath" - "testing" - - "github.com/attic-labs/noms/chunks" - "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/datas" - "github.com/attic-labs/noms/dataset" - "github.com/attic-labs/noms/nomdl/codegen/code" - "github.com/attic-labs/noms/nomdl/pkg" - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" - "github.com/stretchr/testify/assert" -) - -func assertOutput(inPath, goldenPath string, t *testing.T) { - assert := assert.New(t) - emptyDS := datas.NewDataStore(chunks.NewMemoryStore()) // Will be DataStore containing imports - - depsDir, err := ioutil.TempDir("", "") - assert.NoError(err) - defer os.RemoveAll(depsDir) - - inFile, err := os.Open(inPath) - assert.NoError(err) - defer inFile.Close() - - goldenFile, err := os.Open(goldenPath) - assert.NoError(err) - defer goldenFile.Close() - goldenBytes, err := ioutil.ReadAll(goldenFile) - d.Chk.NoError(err) - - var buf bytes.Buffer - pkg := pkg.ParseNomDL("gen", inFile, filepath.Dir(inPath), emptyDS) - written := map[string]bool{} - gen := newCodeGen(&buf, getBareFileName(inPath), written, depsMap{}, pkg) - gen.WritePackage() - - bs := buf.Bytes() - assert.Equal(string(goldenBytes), string(bs), "%s did not generate the same string", inPath) -} - -func TestGeneratedFiles(t *testing.T) { - files, err := filepath.Glob("test/*.noms") - d.Chk.NoError(err) - assert.NotEmpty(t, files) - for _, n := range files { - _, file := filepath.Split(n) - if file == "struct_with_imports.noms" { - // We are not writing deps in this test so lookup by ref does not work. - continue - } - if file == "struct_with_list.noms" || file == "struct_with_dup_list.noms" { - // These two files race to write ListOfNumber - continue - } - assertOutput(n, filepath.Join("test", "gen", file+".js"), t) - } -} - -func TestSkipDuplicateTypes(t *testing.T) { - assert := assert.New(t) - dir, err := ioutil.TempDir("", "codegen_test_") - assert.NoError(err) - defer os.RemoveAll(dir) - - leaf1 := types.NewPackage([]*types.Type{ - types.MakeStructType("S1", []types.Field{ - types.Field{"f", types.MakeListType(types.NumberType), false}, - types.Field{"e", types.MakeType(ref.Ref{}, 0), false}, - }, []types.Field{}), - }, []ref.Ref{}) - leaf2 := types.NewPackage([]*types.Type{ - types.MakeStructType("S2", []types.Field{ - types.Field{"f", types.MakeListType(types.NumberType), false}, - }, []types.Field{}), - }, []ref.Ref{}) - - written := map[string]bool{} - tag1 := code.ToTag(leaf1.Ref()) - leaf1Path := filepath.Join(dir, tag1+".js") - generateAndEmit(tag1, leaf1Path, written, depsMap{}, pkg.Parsed{Package: leaf1, Name: "p"}) - - tag2 := code.ToTag(leaf2.Ref()) - leaf2Path := filepath.Join(dir, tag2+".js") - generateAndEmit(tag2, leaf2Path, written, depsMap{}, pkg.Parsed{Package: leaf2, Name: "p"}) - - code, err := ioutil.ReadFile(leaf2Path) - assert.NoError(err) - assert.NotContains(string(code), "type ListOfNumber") -} - -func TestCommitNewPackages(t *testing.T) { - assert := assert.New(t) - ds := datas.NewDataStore(chunks.NewMemoryStore()) - pkgDS := dataset.NewDataset(ds, "packages") - - dir, err := ioutil.TempDir("", "") - assert.NoError(err) - defer os.RemoveAll(dir) - inFile := filepath.Join(dir, "in.noms") - err = ioutil.WriteFile(inFile, []byte("struct Simple{a:Bool}"), 0600) - assert.NoError(err) - - p := parsePackageFile("name", inFile, pkgDS) - localPkgs := refSet{p.Ref(): true} - pkgDS = generate("name", inFile, filepath.Join(dir, "out.js"), dir, map[string]bool{}, p, localPkgs, pkgDS) - s := pkgDS.Head().Get(datas.ValueField).(types.Set) - assert.EqualValues(1, s.Len()) - tr := s.First().(types.Ref).TargetValue(ds).(types.Package).Types()[0] - assert.EqualValues(types.StructKind, tr.Kind()) -} diff --git a/nomdl/codegen/js/enum.tmpl b/nomdl/codegen/js/enum.tmpl deleted file mode 100644 index a2a3b864c8..0000000000 --- a/nomdl/codegen/js/enum.tmpl +++ /dev/null @@ -1,3 +0,0 @@ -{{$name := .Name}} -export type {{.Name}} ={{range $index, $id := .Ids}} - {{$index}}{{if isLast $index $.Ids | not}} |{{else}};{{end}} // {{$id}}{{end}} diff --git a/nomdl/codegen/js/header.tmpl b/nomdl/codegen/js/header.tmpl deleted file mode 100644 index abf4bab832..0000000000 --- a/nomdl/codegen/js/header.tmpl +++ /dev/null @@ -1,12 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ -{{if (len .ImportedJS) ge 0}} -import {{"{"}}{{range $name := .ImportedJS}} - {{$name}} as _{{$name}},{{end}} -} from '@attic/noms';{{end}}{{if (len .ImportedJSTypes) ge 0}} -import type {{"{"}}{{range $name := .ImportedJSTypes}} - {{$name}} as _{{$name}},{{end}} -} from '@attic/noms';{{end}}{{if (len .Dependencies) ge 0}}{{$aliasNames := .AliasNames}} -{{range $i, $r := .Dependencies}}import * as {{refToAliasName $r}} from './{{refToJSIdentfierName $r}}.js'; -{{end}}{{end}} diff --git a/nomdl/codegen/js/list.tmpl b/nomdl/codegen/js/list.tmpl deleted file mode 100644 index 9094ed8f32..0000000000 --- a/nomdl/codegen/js/list.tmpl +++ /dev/null @@ -1,4 +0,0 @@ - -export function new{{userType .Type}}(values: Array<{{userTypeJS .ElemType}}>): Promise<{{importJsType "NomsList"}}<{{userTypeJS .ElemType}}>> { - return {{importJS "newList"}}(values, {{importJS "makeListType"}}({{toTypeValueJS .ElemType false 0}})); -} diff --git a/nomdl/codegen/js/map.tmpl b/nomdl/codegen/js/map.tmpl deleted file mode 100644 index 7594bc9989..0000000000 --- a/nomdl/codegen/js/map.tmpl +++ /dev/null @@ -1,4 +0,0 @@ - -export function new{{userType .Type}}(values: Array): Promise<{{importJsType "NomsMap"}}<{{userTypeJS .KeyType}}, {{userTypeJS .ValueType}}>> { - return {{importJS "newMap"}}(values, {{importJS "makeMapType"}}({{toTypeValueJS .KeyType false 0}}, {{toTypeValueJS .ValueType false 0}})); -} diff --git a/nomdl/codegen/js/package.tmpl b/nomdl/codegen/js/package.tmpl deleted file mode 100644 index 2a253647e5..0000000000 --- a/nomdl/codegen/js/package.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -{{if .HasTypes}} -const _pkg = new {{importJS "Package"}}([{{range $i, $t := .Types}} - {{toTypeValueJS $t true 2}},{{end}} -], [{{range $deps := .Dependencies}} - {{importJS "Ref"}}.parse('{{$deps}}'),{{end}} -]); -{{importJS "registerPackage"}}(_pkg);{{range $i, $t := .Types}} -const {{userType $t}}$type = {{importJS "makeType"}}(_pkg.ref, {{$i}}); -const {{userType $t}}$typeDef = _pkg.types[{{$i}}];{{end}} -{{end}} diff --git a/nomdl/codegen/js/ref.tmpl b/nomdl/codegen/js/ref.tmpl deleted file mode 100644 index ea38e9ef4f..0000000000 --- a/nomdl/codegen/js/ref.tmpl +++ /dev/null @@ -1 +0,0 @@ -{{/* Refs in JS needs no codegen */}} \ No newline at end of file diff --git a/nomdl/codegen/js/set.tmpl b/nomdl/codegen/js/set.tmpl deleted file mode 100644 index 4f8d668968..0000000000 --- a/nomdl/codegen/js/set.tmpl +++ /dev/null @@ -1,4 +0,0 @@ - -export function new{{userType .Type}}(values: Array<{{userTypeJS .ElemType}}>): Promise<{{importJsType "NomsSet"}}<{{userTypeJS .ElemType}}>> { - return {{importJS "newSet"}}(values, {{importJS "makeSetType"}}({{toTypeValueJS .ElemType false 0}})); -} diff --git a/nomdl/codegen/js/struct.tmpl b/nomdl/codegen/js/struct.tmpl deleted file mode 100644 index 0eb2a31931..0000000000 --- a/nomdl/codegen/js/struct.tmpl +++ /dev/null @@ -1,14 +0,0 @@ -{{$name := .Name}} -type {{.Name}}$Data = {{"{"}}{{range $field := .Fields}} - {{.Name}}{{if .Optional}}?{{end}}: {{userTypeJS .T}};{{end}} -}; - -interface {{.Name}}$Interface extends {{importJsType "Struct"}} { - constructor(data: {{.Name}}$Data): void;{{range $field := .Fields}} - {{.Name}}: {{if .Optional}}?{{end}}{{userTypeJS .T}}; // readonly - set{{title .Name}}(value: {{if .Optional}}?{{end}}{{userTypeJS .T}}): {{$name}}$Interface;{{end}}{{range $field := .Choices}} - {{.Name}}: ?{{userTypeJS .T}}; // readonly - set{{title .Name}}(value: {{userTypeJS .T}}): {{$name}}$Interface;{{end}} -} - -export const {{.Name}}: Class<{{.Name}}$Interface> = {{importJS "createStructClass"}}({{userType .Type}}$type, {{userType .Type}}$typeDef); diff --git a/nomdl/codegen/test/.babelrc b/nomdl/codegen/test/.babelrc deleted file mode 120000 index 2036edcccd..0000000000 --- a/nomdl/codegen/test/.babelrc +++ /dev/null @@ -1 +0,0 @@ -../../../js/.babelrc \ No newline at end of file diff --git a/nomdl/codegen/test/.eslintrc.js b/nomdl/codegen/test/.eslintrc.js deleted file mode 100644 index b7cfbed4a6..0000000000 --- a/nomdl/codegen/test/.eslintrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('@attic/eslintrc'); diff --git a/nomdl/codegen/test/.flowconfig b/nomdl/codegen/test/.flowconfig deleted file mode 120000 index 69e272d83e..0000000000 --- a/nomdl/codegen/test/.flowconfig +++ /dev/null @@ -1 +0,0 @@ -../../../js/.flowconfig \ No newline at end of file diff --git a/nomdl/codegen/test/.gitignore b/nomdl/codegen/test/.gitignore deleted file mode 100644 index 3c3629e647..0000000000 --- a/nomdl/codegen/test/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/nomdl/codegen/test/clobber.noms b/nomdl/codegen/test/clobber.noms deleted file mode 100644 index 11727ddf14..0000000000 --- a/nomdl/codegen/test/clobber.noms +++ /dev/null @@ -1,2 +0,0 @@ -alias a = import "./clobber_a/a.noms" -alias b = import "./clobber_b/b.noms" diff --git a/nomdl/codegen/test/clobber_a/a.noms b/nomdl/codegen/test/clobber_a/a.noms deleted file mode 100644 index de234c8f4f..0000000000 --- a/nomdl/codegen/test/clobber_a/a.noms +++ /dev/null @@ -1,3 +0,0 @@ -struct A { - A: List> -} diff --git a/nomdl/codegen/test/clobber_b/b.noms b/nomdl/codegen/test/clobber_b/b.noms deleted file mode 100644 index 7fec88086a..0000000000 --- a/nomdl/codegen/test/clobber_b/b.noms +++ /dev/null @@ -1 +0,0 @@ -alias a = import "../clobber_a/a.noms" diff --git a/nomdl/codegen/test/gen/clobber.noms.js b/nomdl/codegen/test/gen/clobber.noms.js deleted file mode 100644 index 8cd474100b..0000000000 --- a/nomdl/codegen/test/gen/clobber.noms.js +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import * as a from './sha1_89fa11c.js'; -import * as b from './sha1_f2ea794.js'; - - diff --git a/nomdl/codegen/test/gen/enum_struct.noms.js b/nomdl/codegen/test/gen/enum_struct.noms.js deleted file mode 100644 index 4c20049c21..0000000000 --- a/nomdl/codegen/test/gen/enum_struct.noms.js +++ /dev/null @@ -1,53 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - createStructClass as _createStructClass, - emptyRef as _emptyRef, - makeEnumType as _makeEnumType, - makeStructType as _makeStructType, - makeType as _makeType, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeEnumType('Handedness', 'right', 'left', 'switch'), - _makeStructType('EnumStruct', - [ - new _Field('hand', _makeType(_emptyRef, 0), false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const Handedness$type = _makeType(_pkg.ref, 0); -const Handedness$typeDef = _pkg.types[0]; -const EnumStruct$type = _makeType(_pkg.ref, 1); -const EnumStruct$typeDef = _pkg.types[1]; - - -export type Handedness = - 0 | // right - 1 | // left - 2; // switch - -type EnumStruct$Data = { - hand: Handedness; -}; - -interface EnumStruct$Interface extends _Struct { - constructor(data: EnumStruct$Data): void; - hand: Handedness; // readonly - setHand(value: Handedness): EnumStruct$Interface; -} - -export const EnumStruct: Class = _createStructClass(EnumStruct$type, EnumStruct$typeDef); diff --git a/nomdl/codegen/test/gen/list_number.noms.js b/nomdl/codegen/test/gen/list_number.noms.js deleted file mode 100644 index f9e742c27d..0000000000 --- a/nomdl/codegen/test/gen/list_number.noms.js +++ /dev/null @@ -1,18 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - makeListType as _makeListType, - newList as _newList, - numberType as _numberType, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - number as _number, -} from '@attic/noms'; - - -export function newListOfNumber(values: Array<_number>): Promise<_NomsList<_number>> { - return _newList(values, _makeListType(_numberType)); -} diff --git a/nomdl/codegen/test/gen/map.noms.js b/nomdl/codegen/test/gen/map.noms.js deleted file mode 100644 index 4b8cc709e9..0000000000 --- a/nomdl/codegen/test/gen/map.noms.js +++ /dev/null @@ -1,24 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - boolType as _boolType, - makeMapType as _makeMapType, - newMap as _newMap, - stringType as _stringType, - valueType as _valueType, -} from '@attic/noms'; -import type { - NomsMap as _NomsMap, - Value as _Value, -} from '@attic/noms'; - - -export function newMapOfBoolToString(values: Array): Promise<_NomsMap> { - return _newMap(values, _makeMapType(_boolType, _stringType)); -} - -export function newMapOfStringToValue(values: Array): Promise<_NomsMap> { - return _newMap(values, _makeMapType(_stringType, _valueType)); -} diff --git a/nomdl/codegen/test/gen/ref.noms.js b/nomdl/codegen/test/gen/ref.noms.js deleted file mode 100644 index b2eee23a06..0000000000 --- a/nomdl/codegen/test/gen/ref.noms.js +++ /dev/null @@ -1,67 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - createStructClass as _createStructClass, - makeCompoundType as _makeCompoundType, - makeListType as _makeListType, - makeSetType as _makeSetType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - newSet as _newSet, - numberType as _numberType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - NomsSet as _NomsSet, - RefValue as _RefValue, - Struct as _Struct, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructWithRef', - [ - new _Field('r', _makeCompoundType(_Kind.Ref, _makeCompoundType(_Kind.Set, _numberType)), false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructWithRef$type = _makeType(_pkg.ref, 0); -const StructWithRef$typeDef = _pkg.types[0]; - - -type StructWithRef$Data = { - r: _RefValue<_NomsSet<_number>>; -}; - -interface StructWithRef$Interface extends _Struct { - constructor(data: StructWithRef$Data): void; - r: _RefValue<_NomsSet<_number>>; // readonly - setR(value: _RefValue<_NomsSet<_number>>): StructWithRef$Interface; -} - -export const StructWithRef: Class = _createStructClass(StructWithRef$type, StructWithRef$typeDef); - -export function newListOfRefOfNumber(values: Array<_RefValue<_number>>): Promise<_NomsList<_RefValue<_number>>> { - return _newList(values, _makeListType(_makeCompoundType(_Kind.Ref, _numberType))); -} - -export function newListOfString(values: Array): Promise<_NomsList> { - return _newList(values, _makeListType(_stringType)); -} - -export function newSetOfNumber(values: Array<_number>): Promise<_NomsSet<_number>> { - return _newSet(values, _makeSetType(_numberType)); -} diff --git a/nomdl/codegen/test/gen/set.noms.js b/nomdl/codegen/test/gen/set.noms.js deleted file mode 100644 index 61a6861419..0000000000 --- a/nomdl/codegen/test/gen/set.noms.js +++ /dev/null @@ -1,17 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - boolType as _boolType, - makeSetType as _makeSetType, - newSet as _newSet, -} from '@attic/noms'; -import type { - NomsSet as _NomsSet, -} from '@attic/noms'; - - -export function newSetOfBool(values: Array): Promise<_NomsSet> { - return _newSet(values, _makeSetType(_boolType)); -} diff --git a/nomdl/codegen/test/gen/sha1_068bb32.js b/nomdl/codegen/test/gen/sha1_068bb32.js deleted file mode 100644 index 44746785ba..0000000000 --- a/nomdl/codegen/test/gen/sha1_068bb32.js +++ /dev/null @@ -1,49 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - boolType as _boolType, - createStructClass as _createStructClass, - makeStructType as _makeStructType, - makeType as _makeType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('S', - [ - new _Field('s', _stringType, false), - new _Field('b', _boolType, false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const S$type = _makeType(_pkg.ref, 0); -const S$typeDef = _pkg.types[0]; - - -type S$Data = { - s: string; - b: boolean; -}; - -interface S$Interface extends _Struct { - constructor(data: S$Data): void; - s: string; // readonly - setS(value: string): S$Interface; - b: boolean; // readonly - setB(value: boolean): S$Interface; -} - -export const S: Class = _createStructClass(S$type, S$typeDef); diff --git a/nomdl/codegen/test/gen/sha1_6574913.js b/nomdl/codegen/test/gen/sha1_6574913.js deleted file mode 100644 index 8fdd77a639..0000000000 --- a/nomdl/codegen/test/gen/sha1_6574913.js +++ /dev/null @@ -1,70 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - Ref as _Ref, - createStructClass as _createStructClass, - emptyRef as _emptyRef, - makeStructType as _makeStructType, - makeType as _makeType, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - Struct as _Struct, -} from '@attic/noms'; -import * as _sha1_068bb32 from './sha1_068bb32.js'; - - -const _pkg = new _Package([ - _makeStructType('D', - [ - new _Field('structField', _makeType(_Ref.parse('sha1-068bb32c733bd940a0d758715bf05082f4c12fcb'), 0), false), - ], - [ - - ] - ), - _makeStructType('DUser', - [ - new _Field('Dfield', _makeType(_emptyRef, 0), false), - ], - [ - - ] - ), -], [ - _Ref.parse('sha1-068bb32c733bd940a0d758715bf05082f4c12fcb'), -]); -_registerPackage(_pkg); -const D$type = _makeType(_pkg.ref, 0); -const D$typeDef = _pkg.types[0]; -const DUser$type = _makeType(_pkg.ref, 1); -const DUser$typeDef = _pkg.types[1]; - - -type D$Data = { - structField: _sha1_068bb32.S; -}; - -interface D$Interface extends _Struct { - constructor(data: D$Data): void; - structField: _sha1_068bb32.S; // readonly - setStructField(value: _sha1_068bb32.S): D$Interface; -} - -export const D: Class = _createStructClass(D$type, D$typeDef); - -type DUser$Data = { - Dfield: D; -}; - -interface DUser$Interface extends _Struct { - constructor(data: DUser$Data): void; - Dfield: D; // readonly - setDfield(value: D): DUser$Interface; -} - -export const DUser: Class = _createStructClass(DUser$type, DUser$typeDef); diff --git a/nomdl/codegen/test/gen/sha1_89fa11c.js b/nomdl/codegen/test/gen/sha1_89fa11c.js deleted file mode 100644 index 407ada68ff..0000000000 --- a/nomdl/codegen/test/gen/sha1_89fa11c.js +++ /dev/null @@ -1,58 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - blobType as _blobType, - createStructClass as _createStructClass, - makeCompoundType as _makeCompoundType, - makeListType as _makeListType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - Blob as _Blob, - NomsList as _NomsList, - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('A', - [ - new _Field('A', _makeCompoundType(_Kind.List, _makeCompoundType(_Kind.List, _blobType)), false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const A$type = _makeType(_pkg.ref, 0); -const A$typeDef = _pkg.types[0]; - - -type A$Data = { - A: _NomsList<_NomsList<_Blob>>; -}; - -interface A$Interface extends _Struct { - constructor(data: A$Data): void; - A: _NomsList<_NomsList<_Blob>>; // readonly - setA(value: _NomsList<_NomsList<_Blob>>): A$Interface; -} - -export const A: Class = _createStructClass(A$type, A$typeDef); - -export function newListOfListOfBlob(values: Array<_NomsList<_Blob>>): Promise<_NomsList<_NomsList<_Blob>>> { - return _newList(values, _makeListType(_makeCompoundType(_Kind.List, _blobType))); -} - -export function newListOfBlob(values: Array<_Blob>): Promise<_NomsList<_Blob>> { - return _newList(values, _makeListType(_blobType)); -} diff --git a/nomdl/codegen/test/gen/sha1_f2ea794.js b/nomdl/codegen/test/gen/sha1_f2ea794.js deleted file mode 100644 index b783708437..0000000000 --- a/nomdl/codegen/test/gen/sha1_f2ea794.js +++ /dev/null @@ -1,7 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import * as _sha1_9c6e87c from './sha1_89fa11c.js'; - - diff --git a/nomdl/codegen/test/gen/struct.noms.js b/nomdl/codegen/test/gen/struct.noms.js deleted file mode 100644 index a21f6f7817..0000000000 --- a/nomdl/codegen/test/gen/struct.noms.js +++ /dev/null @@ -1,56 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - boolType as _boolType, - createStructClass as _createStructClass, - makeListType as _makeListType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('Struct', - [ - new _Field('s', _stringType, false), - new _Field('b', _boolType, false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const Struct$type = _makeType(_pkg.ref, 0); -const Struct$typeDef = _pkg.types[0]; - - -type Struct$Data = { - s: string; - b: boolean; -}; - -interface Struct$Interface extends _Struct { - constructor(data: Struct$Data): void; - s: string; // readonly - setS(value: string): Struct$Interface; - b: boolean; // readonly - setB(value: boolean): Struct$Interface; -} - -export const Struct: Class = _createStructClass(Struct$type, Struct$typeDef); - -export function newListOfStruct(values: Array): Promise<_NomsList> { - return _newList(values, _makeListType(_makeType(_pkg.ref, 0))); -} diff --git a/nomdl/codegen/test/gen/struct_optional.noms.js b/nomdl/codegen/test/gen/struct_optional.noms.js deleted file mode 100644 index 1019a9f2f7..0000000000 --- a/nomdl/codegen/test/gen/struct_optional.noms.js +++ /dev/null @@ -1,49 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - boolType as _boolType, - createStructClass as _createStructClass, - makeStructType as _makeStructType, - makeType as _makeType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('OptionalStruct', - [ - new _Field('s', _stringType, true), - new _Field('b', _boolType, true), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const OptionalStruct$type = _makeType(_pkg.ref, 0); -const OptionalStruct$typeDef = _pkg.types[0]; - - -type OptionalStruct$Data = { - s?: string; - b?: boolean; -}; - -interface OptionalStruct$Interface extends _Struct { - constructor(data: OptionalStruct$Data): void; - s: ?string; // readonly - setS(value: ?string): OptionalStruct$Interface; - b: ?boolean; // readonly - setB(value: ?boolean): OptionalStruct$Interface; -} - -export const OptionalStruct: Class = _createStructClass(OptionalStruct$type, OptionalStruct$typeDef); diff --git a/nomdl/codegen/test/gen/struct_primitives.noms.js b/nomdl/codegen/test/gen/struct_primitives.noms.js deleted file mode 100644 index 9112520d1b..0000000000 --- a/nomdl/codegen/test/gen/struct_primitives.noms.js +++ /dev/null @@ -1,67 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - blobType as _blobType, - boolType as _boolType, - createStructClass as _createStructClass, - makeStructType as _makeStructType, - makeType as _makeType, - numberType as _numberType, - registerPackage as _registerPackage, - stringType as _stringType, - valueType as _valueType, -} from '@attic/noms'; -import type { - Blob as _Blob, - Struct as _Struct, - Value as _Value, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructPrimitives', - [ - new _Field('number', _numberType, false), - new _Field('bool', _boolType, false), - new _Field('string', _stringType, false), - new _Field('blob', _blobType, false), - new _Field('value', _valueType, false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructPrimitives$type = _makeType(_pkg.ref, 0); -const StructPrimitives$typeDef = _pkg.types[0]; - - -type StructPrimitives$Data = { - number: _number; - bool: boolean; - string: string; - blob: _Blob; - value: _Value; -}; - -interface StructPrimitives$Interface extends _Struct { - constructor(data: StructPrimitives$Data): void; - number: _number; // readonly - setNumber(value: _number): StructPrimitives$Interface; - bool: boolean; // readonly - setBool(value: boolean): StructPrimitives$Interface; - string: string; // readonly - setString(value: string): StructPrimitives$Interface; - blob: _Blob; // readonly - setBlob(value: _Blob): StructPrimitives$Interface; - value: _Value; // readonly - setValue(value: _Value): StructPrimitives$Interface; -} - -export const StructPrimitives: Class = _createStructClass(StructPrimitives$type, StructPrimitives$typeDef); diff --git a/nomdl/codegen/test/gen/struct_recursive.noms.js b/nomdl/codegen/test/gen/struct_recursive.noms.js deleted file mode 100644 index 6c34804f38..0000000000 --- a/nomdl/codegen/test/gen/struct_recursive.noms.js +++ /dev/null @@ -1,53 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - createStructClass as _createStructClass, - emptyRef as _emptyRef, - makeCompoundType as _makeCompoundType, - makeListType as _makeListType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('Tree', - [ - new _Field('children', _makeCompoundType(_Kind.List, _makeType(_emptyRef, 0)), false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const Tree$type = _makeType(_pkg.ref, 0); -const Tree$typeDef = _pkg.types[0]; - - -type Tree$Data = { - children: _NomsList; -}; - -interface Tree$Interface extends _Struct { - constructor(data: Tree$Data): void; - children: _NomsList; // readonly - setChildren(value: _NomsList): Tree$Interface; -} - -export const Tree: Class = _createStructClass(Tree$type, Tree$typeDef); - -export function newListOfTree(values: Array): Promise<_NomsList> { - return _newList(values, _makeListType(_makeType(_pkg.ref, 0))); -} diff --git a/nomdl/codegen/test/gen/struct_with_dup_list.noms.js b/nomdl/codegen/test/gen/struct_with_dup_list.noms.js deleted file mode 100644 index 5fd1ed9d39..0000000000 --- a/nomdl/codegen/test/gen/struct_with_dup_list.noms.js +++ /dev/null @@ -1,48 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - createStructClass as _createStructClass, - makeCompoundType as _makeCompoundType, - makeStructType as _makeStructType, - makeType as _makeType, - numberType as _numberType, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - Struct as _Struct, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructWithDupList', - [ - new _Field('l', _makeCompoundType(_Kind.List, _numberType), false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructWithDupList$type = _makeType(_pkg.ref, 0); -const StructWithDupList$typeDef = _pkg.types[0]; - - -type StructWithDupList$Data = { - l: _NomsList<_number>; -}; - -interface StructWithDupList$Interface extends _Struct { - constructor(data: StructWithDupList$Data): void; - l: _NomsList<_number>; // readonly - setL(value: _NomsList<_number>): StructWithDupList$Interface; -} - -export const StructWithDupList: Class = _createStructClass(StructWithDupList$type, StructWithDupList$typeDef); diff --git a/nomdl/codegen/test/gen/struct_with_imports.noms.js b/nomdl/codegen/test/gen/struct_with_imports.noms.js deleted file mode 100644 index e992ab5cca..0000000000 --- a/nomdl/codegen/test/gen/struct_with_imports.noms.js +++ /dev/null @@ -1,54 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - Ref as _Ref, - createStructClass as _createStructClass, - makeListType as _makeListType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - registerPackage as _registerPackage, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - Struct as _Struct, -} from '@attic/noms'; -import * as dep from './sha1_6574913.js'; - - -const _pkg = new _Package([ - _makeStructType('ImportUser', - [ - new _Field('importedStruct', _makeType(_Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'), 0), false), - ], - [ - - ] - ), -], [ - _Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'), -]); -_registerPackage(_pkg); -const ImportUser$type = _makeType(_pkg.ref, 0); -const ImportUser$typeDef = _pkg.types[0]; - - -type ImportUser$Data = { - importedStruct: dep.D; -}; - -interface ImportUser$Interface extends _Struct { - constructor(data: ImportUser$Data): void; - importedStruct: dep.D; // readonly - setImportedStruct(value: dep.D): ImportUser$Interface; -} - -export const ImportUser: Class = _createStructClass(ImportUser$type, ImportUser$typeDef); - -export function newListOfD(values: Array): Promise<_NomsList> { - return _newList(values, _makeListType(_makeType(_Ref.parse('sha1-65749135e74064eca6e7a34f04c95ac0768fa788'), 0))); -} diff --git a/nomdl/codegen/test/gen/struct_with_list.noms.js b/nomdl/codegen/test/gen/struct_with_list.noms.js deleted file mode 100644 index 723e18ed94..0000000000 --- a/nomdl/codegen/test/gen/struct_with_list.noms.js +++ /dev/null @@ -1,68 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - boolType as _boolType, - createStructClass as _createStructClass, - makeCompoundType as _makeCompoundType, - makeListType as _makeListType, - makeStructType as _makeStructType, - makeType as _makeType, - newList as _newList, - numberType as _numberType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - NomsList as _NomsList, - Struct as _Struct, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructWithList', - [ - new _Field('l', _makeCompoundType(_Kind.List, _number), false), - new _Field('b', _boolType, false), - new _Field('s', _stringType, false), - new _Field('i', _number, false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructWithList$type = _makeType(_pkg.ref, 0); -const StructWithList$typeDef = _pkg.types[0]; - - -type StructWithList$Data = { - l: _NomsList<_number>; - b: boolean; - s: string; - i: _number; -}; - -interface StructWithList$Interface extends _Struct { - constructor(data: StructWithList$Data): void; - l: _NomsList<_number>; // readonly - setL(value: _NomsList<_number>): StructWithList$Interface; - b: boolean; // readonly - setB(value: boolean): StructWithList$Interface; - s: string; // readonly - setS(value: string): StructWithList$Interface; - i: _int64; // readonly - setI(value: _number): StructWithList$Interface; -} - -export const StructWithList: Class = _createStructClass(StructWithList$type, StructWithList$typeDef); - -export function newListOfNumber(values: Array<_number>): Promise<_NomsList<_number>> { - return _newList(values, _makeListType(_numberType)); -} diff --git a/nomdl/codegen/test/gen/struct_with_union_field.noms.js b/nomdl/codegen/test/gen/struct_with_union_field.noms.js deleted file mode 100644 index 5340d6e0c1..0000000000 --- a/nomdl/codegen/test/gen/struct_with_union_field.noms.js +++ /dev/null @@ -1,73 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Kind as _Kind, - Package as _Package, - blobType as _blobType, - createStructClass as _createStructClass, - makeCompoundType as _makeCompoundType, - makeSetType as _makeSetType, - makeStructType as _makeStructType, - makeType as _makeType, - newSet as _newSet, - numberType as _numberType, - registerPackage as _registerPackage, - stringType as _stringType, - valueType as _valueType, -} from '@attic/noms'; -import type { - Blob as _Blob, - NomsSet as _NomsSet, - Struct as _Struct, - Value as _Value, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructWithUnionField', - [ - new _Field('a', _numberType, false), - ], - [ - new _Field('b', _numberType, false), - new _Field('c', _stringType, false), - new _Field('d', _blobType, false), - new _Field('e', _valueType, false), - new _Field('f', _makeCompoundType(_Kind.Set, _numberType), false), - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructWithUnionField$type = _makeType(_pkg.ref, 0); -const StructWithUnionField$typeDef = _pkg.types[0]; - - -type StructWithUnionField$Data = { - a: _number; -}; - -interface StructWithUnionField$Interface extends _Struct { - constructor(data: StructWithUnionField$Data): void; - a: _number; // readonly - setA(value: _number): StructWithUnionField$Interface; - b: ?_number; // readonly - setB(value: _number): StructWithUnionField$Interface; - c: ?string; // readonly - setC(value: string): StructWithUnionField$Interface; - d: ?_Blob; // readonly - setD(value: _Blob): StructWithUnionField$Interface; - e: ?_Value; // readonly - setE(value: _Value): StructWithUnionField$Interface; - f: ?_NomsSet<_number>; // readonly - setF(value: _NomsSet<_number>): StructWithUnionField$Interface; -} - -export const StructWithUnionField: Class = _createStructClass(StructWithUnionField$type, StructWithUnionField$typeDef); - -export function newSetOfNumber(values: Array<_number>): Promise<_NomsSet<_number>> { - return _newSet(values, _makeSetType(_numberType)); -} diff --git a/nomdl/codegen/test/gen/struct_with_unions.noms.js b/nomdl/codegen/test/gen/struct_with_unions.noms.js deleted file mode 100644 index 8c8e80208e..0000000000 --- a/nomdl/codegen/test/gen/struct_with_unions.noms.js +++ /dev/null @@ -1,99 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - createStructClass as _createStructClass, - emptyRef as _emptyRef, - makeStructType as _makeStructType, - makeType as _makeType, - numberType as _numberType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - Struct as _Struct, - number as _number, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('StructWithUnions', - [ - new _Field('a', _makeType(_emptyRef, 1), false), - new _Field('d', _makeType(_emptyRef, 2), false), - ], - [ - - ] - ), - _makeStructType('', - [ - - ], - [ - new _Field('b', _numberType, false), - new _Field('c', _stringType, false), - ] - ), - _makeStructType('', - [ - - ], - [ - new _Field('e', _numberType, false), - new _Field('f', _stringType, false), - ] - ), -], [ -]); -_registerPackage(_pkg); -const StructWithUnions$type = _makeType(_pkg.ref, 0); -const StructWithUnions$typeDef = _pkg.types[0]; -const __unionOfBOfNumberAndCOfString$type = _makeType(_pkg.ref, 1); -const __unionOfBOfNumberAndCOfString$typeDef = _pkg.types[1]; -const __unionOfEOfNumberAndFOfString$type = _makeType(_pkg.ref, 2); -const __unionOfEOfNumberAndFOfString$typeDef = _pkg.types[2]; - - -type StructWithUnions$Data = { - a: __unionOfBOfNumberAndCOfString; - d: __unionOfEOfNumberAndFOfString; -}; - -interface StructWithUnions$Interface extends _Struct { - constructor(data: StructWithUnions$Data): void; - a: __unionOfBOfNumberAndCOfString; // readonly - setA(value: __unionOfBOfNumberAndCOfString): StructWithUnions$Interface; - d: __unionOfEOfNumberAndFOfString; // readonly - setD(value: __unionOfEOfNumberAndFOfString): StructWithUnions$Interface; -} - -export const StructWithUnions: Class = _createStructClass(StructWithUnions$type, StructWithUnions$typeDef); - -type __unionOfBOfNumberAndCOfString$Data = { -}; - -interface __unionOfBOfNumberAndCOfString$Interface extends _Struct { - constructor(data: __unionOfBOfNumberAndCOfString$Data): void; - b: ?_number; // readonly - setB(value: _number): __unionOfBOfNumberAndCOfString$Interface; - c: ?string; // readonly - setC(value: string): __unionOfBOfNumberAndCOfString$Interface; -} - -export const __unionOfBOfNumberAndCOfString: Class<__unionOfBOfNumberAndCOfString$Interface> = _createStructClass(__unionOfBOfNumberAndCOfString$type, __unionOfBOfNumberAndCOfString$typeDef); - -type __unionOfEOfNumberAndFOfString$Data = { -}; - -interface __unionOfEOfNumberAndFOfString$Interface extends _Struct { - constructor(data: __unionOfEOfNumberAndFOfString$Data): void; - e: ?_number; // readonly - setE(value: _number): __unionOfEOfNumberAndFOfString$Interface; - f: ?string; // readonly - setF(value: string): __unionOfEOfNumberAndFOfString$Interface; -} - -export const __unionOfEOfNumberAndFOfString: Class<__unionOfEOfNumberAndFOfString$Interface> = _createStructClass(__unionOfEOfNumberAndFOfString$type, __unionOfEOfNumberAndFOfString$typeDef); diff --git a/nomdl/codegen/test/list-number-test.js b/nomdl/codegen/test/list-number-test.js deleted file mode 100644 index 986d351318..0000000000 --- a/nomdl/codegen/test/list-number-test.js +++ /dev/null @@ -1,14 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; -import {newListOfNumber} from './gen/list_number.noms.js'; -import {makeListType, numberType} from '@attic/noms'; - -suite('list_number.noms', () => { - test('constructor', async () => { - const l = await newListOfNumber([0, 1, 2, 3]); - assert.equal(l.length, 4); - assert.isTrue(l.type.equals(makeListType(numberType))); - }); -}); diff --git a/nomdl/codegen/test/list_number.noms b/nomdl/codegen/test/list_number.noms deleted file mode 100644 index 3377e28708..0000000000 --- a/nomdl/codegen/test/list_number.noms +++ /dev/null @@ -1 +0,0 @@ -using List diff --git a/nomdl/codegen/test/map-test.js b/nomdl/codegen/test/map-test.js deleted file mode 100644 index 7eac224cca..0000000000 --- a/nomdl/codegen/test/map-test.js +++ /dev/null @@ -1,13 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; -import {newMapOfBoolToString} from './gen/map.noms.js'; -import {makeMapType, boolType, stringType} from '@attic/noms'; - -suite('map.noms', () => { - test('constructor', async () => { - const s = await newMapOfBoolToString([true, 'yes', false, 'no']); - assert.isTrue(s.type.equals(makeMapType(boolType, stringType))); - }); -}); diff --git a/nomdl/codegen/test/map.noms b/nomdl/codegen/test/map.noms deleted file mode 100644 index e08605230b..0000000000 --- a/nomdl/codegen/test/map.noms +++ /dev/null @@ -1,2 +0,0 @@ -using Map -using Map diff --git a/nomdl/codegen/test/package.json b/nomdl/codegen/test/package.json deleted file mode 100644 index 19f32cf639..0000000000 --- a/nomdl/codegen/test/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "nomdl-codegen-test", - "version": "0.0.1", - "description": "Tests for generated js code", - "main": "index.js", - "devDependencies": { - "@attic/eslintrc": "^1.0.0", - "@attic/noms": "^13.0.0", - "@attic/webpack-config": "^2.1.0", - "babel-cli": "6.6.5", - "babel-core": "6.7.2", - "babel-generator": "6.7.2", - "babel-plugin-syntax-async-functions": "6.5.0", - "babel-plugin-syntax-flow": "6.5.0", - "babel-plugin-transform-async-to-generator": "6.7.0", - "babel-plugin-transform-class-properties": "6.6.0", - "babel-plugin-transform-es2015-destructuring": "6.6.5", - "babel-plugin-transform-es2015-modules-commonjs": "6.7.0", - "babel-plugin-transform-es2015-parameters": "6.7.0", - "babel-plugin-transform-runtime": "^6.6.0", - "babel-preset-es2015": "6.6.0", - "babel-preset-react": "6.5.0", - "chai": "3.5.0", - "chokidar": "1.4.3", - "commander": "2.9.0", - "flow-bin": "0.23.0", - "fs-extra": "0.26.7", - "mocha": "2.4.5" - }, - "scripts": { - "pretest": "eslint . && flow .", - "test": "mocha --ui tdd --reporter dot --compilers js:babel-core/register ./*-test.js" - } -} diff --git a/nomdl/codegen/test/ref-test.js b/nomdl/codegen/test/ref-test.js deleted file mode 100644 index 02250ffb94..0000000000 --- a/nomdl/codegen/test/ref-test.js +++ /dev/null @@ -1,21 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; - -import {newSet, makeSetType, numberType, DataStore, MemoryStore} from '@attic/noms'; -import type {NomsSet} from '@attic/noms'; -import {StructWithRef} from './gen/ref.noms.js'; - -suite('ref.noms', () => { - test('constructor', async () => { - const ds = new DataStore(new MemoryStore()); - const set: NomsSet = await newSet([0, 1, 2, 3], makeSetType(numberType)); - const r = ds.writeValue(set); - const struct = new StructWithRef({r}); - - assert.isTrue(struct.r.equals(r)); - const set2 = await ds.readValue(r.targetRef); - assert.isTrue(set.equals(set2)); - }); -}); diff --git a/nomdl/codegen/test/ref.noms b/nomdl/codegen/test/ref.noms deleted file mode 100644 index 59da29ae91..0000000000 --- a/nomdl/codegen/test/ref.noms +++ /dev/null @@ -1,6 +0,0 @@ -using Ref> -using List> - -struct StructWithRef { - r: Ref> -} diff --git a/nomdl/codegen/test/rungen.go b/nomdl/codegen/test/rungen.go deleted file mode 100644 index 80a22d5849..0000000000 --- a/nomdl/codegen/test/rungen.go +++ /dev/null @@ -1,9 +0,0 @@ -package test - -//go:generate rm -rf /tmp/depGenTest - -//go:generate go run ../codegen.go -ldb=/tmp/depGenTest -package-ds=testDeps -in=../testDeps/leafDep/leafDep.noms -out-dir=../testDeps/leafDep - -//go:generate go run ../codegen.go -out-dir=gen -ldb=/tmp/depGenTest -package-ds=testDeps - -//go:generate rm -rf /tmp/depGenTest diff --git a/nomdl/codegen/test/set-test.js b/nomdl/codegen/test/set-test.js deleted file mode 100644 index 9816244945..0000000000 --- a/nomdl/codegen/test/set-test.js +++ /dev/null @@ -1,13 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; -import {newSetOfBool} from './gen/set.noms.js'; -import {makeSetType, boolType} from '@attic/noms'; - -suite('set.noms', () => { - test('constructor', async () => { - const s = await newSetOfBool([true]); - assert.isTrue(s.type.equals(makeSetType(boolType))); - }); -}); diff --git a/nomdl/codegen/test/set.noms b/nomdl/codegen/test/set.noms deleted file mode 100644 index 1b9d757620..0000000000 --- a/nomdl/codegen/test/set.noms +++ /dev/null @@ -1 +0,0 @@ -using Set diff --git a/nomdl/codegen/test/struct-optional-test.js b/nomdl/codegen/test/struct-optional-test.js deleted file mode 100644 index 6dd31b5bd9..0000000000 --- a/nomdl/codegen/test/struct-optional-test.js +++ /dev/null @@ -1,25 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; - -import {OptionalStruct} from './gen/struct_optional.noms.js'; - -suite('struct_optional.noms', () => { - test('constructor', async () => { - const os = new OptionalStruct({}); - assert.isUndefined(os.s); - assert.isUndefined(os.b); - - const os2 = os.setS('hi'); - assert.equal(os2.s, 'hi'); - assert.isUndefined(os2.b); - - const os3 = os2.setB(true); - assert.equal(os3.s, 'hi'); - assert.equal(os3.b, true); - - const os4 = os2.setB(undefined).setS(undefined); - assert.isTrue(os4.equals(os)); - }); -}); diff --git a/nomdl/codegen/test/struct-primitives-test.js b/nomdl/codegen/test/struct-primitives-test.js deleted file mode 100644 index 071974bfae..0000000000 --- a/nomdl/codegen/test/struct-primitives-test.js +++ /dev/null @@ -1,40 +0,0 @@ -// @flow - -import {assert} from 'chai'; //eslint-disable-line -import {suite, test} from 'mocha'; - -import {newBlob} from '@attic/noms'; -import {StructPrimitives} from './gen/struct_primitives.noms.js'; - -suite('struct-primitives.noms', () => { - test('constructor', async () => { - const s: StructPrimitives = new StructPrimitives({ //eslint-disable-line - number: 9, - bool: true, - string: 'hi', - blob: await newBlob(new Uint8Array([0, 1, 2, 3])), - value: 123, - }); - - let s2; - assert.equal(s.number, 9); - s2 = s.setNumber(99); - assert.equal(s2.number, 99); - - assert.equal(s.bool, true); - s2 = s.setBool(false); - assert.equal(s2.bool, false); - - assert.equal(s.string, 'hi'); - s2 = s.setString('bye'); - assert.equal(s2.string, 'bye'); - - assert.isTrue(s.blob.equals(await newBlob(new Uint8Array([0, 1, 2, 3])))); - s2 = s.setBlob(await newBlob(new Uint8Array([4, 5, 6, 7]))); - assert.isTrue(s2.blob.equals(await newBlob(new Uint8Array([4, 5, 6, 7])))); - - assert.equal(s.value, 123); - s2 = s.setValue('x'); - assert.equal(s2.value, 'x'); - }); -}); diff --git a/nomdl/codegen/test/struct-recursive-test.js b/nomdl/codegen/test/struct-recursive-test.js deleted file mode 100644 index 632ea7e292..0000000000 --- a/nomdl/codegen/test/struct-recursive-test.js +++ /dev/null @@ -1,24 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; -import {Tree, newListOfTree} from './gen/struct_recursive.noms.js'; -import {newList, makeListType} from '@attic/noms'; - -suite('struct_recursive.noms', () => { - test('constructor', async () => { - const t: Tree = new Tree({children: await newListOfTree([ - new Tree({children: await newListOfTree([])}), - new Tree({children: await newListOfTree([])}), - ])}); - assert.equal(t.children.length, 2); - - const listOfTreeType = makeListType(t.type); - const t2: Tree = new Tree({children: await newList([ - new Tree({children: await newList([], listOfTreeType)}), - new Tree({children: await newList([], listOfTreeType)}), - ], listOfTreeType)}); - - assert.isTrue(t.equals(t2)); - }); -}); diff --git a/nomdl/codegen/test/struct-test.js b/nomdl/codegen/test/struct-test.js deleted file mode 100644 index a403778141..0000000000 --- a/nomdl/codegen/test/struct-test.js +++ /dev/null @@ -1,20 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; - -import {Kind} from '@attic/noms'; -import {Struct} from './gen/struct.noms.js'; - -suite('struct.noms', () => { - test('constructor', () => { - const s: Struct = new Struct({s: 'hi', b: true}); - assert.equal(s.s, 'hi'); - assert.equal(s.b, true); - }); - - test('type', () => { - const s: Struct = new Struct({s: 'hi', b: true}); - assert.equal(s.type.kind, Kind.Unresolved); - }); -}); diff --git a/nomdl/codegen/test/struct-with-union-field-test.js b/nomdl/codegen/test/struct-with-union-field-test.js deleted file mode 100644 index 8b4bf00f03..0000000000 --- a/nomdl/codegen/test/struct-with-union-field-test.js +++ /dev/null @@ -1,30 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; - -import {StructWithUnionField} from './gen/struct_with_union_field.noms.js'; - -suite('struct_optional.noms', () => { - test('constructor', async () => { - const swuf = new StructWithUnionField({a: 1, b: 2}); - assert.equal(swuf.a, 1); - assert.equal(swuf.b, 2); - assert.isUndefined(swuf.c); - assert.isUndefined(swuf.d); - assert.isUndefined(swuf.e); - assert.isUndefined(swuf.f); - - const swuf2 = swuf.setC('hi'); - assert.equal(swuf2.a, 1); - assert.isUndefined(swuf2.b); - assert.equal(swuf2.c, 'hi'); - assert.isUndefined(swuf2.d); - assert.isUndefined(swuf2.e); - assert.isUndefined(swuf2.f); - - assert.throws(() => { - swuf.setC(undefined); - }); - }); -}); diff --git a/nomdl/codegen/test/struct-with-unions-test.js b/nomdl/codegen/test/struct-with-unions-test.js deleted file mode 100644 index 28df49747b..0000000000 --- a/nomdl/codegen/test/struct-with-unions-test.js +++ /dev/null @@ -1,29 +0,0 @@ -// @flow - -import {assert} from 'chai'; -import {suite, test} from 'mocha'; - -import { - StructWithUnions, - __unionOfBOfNumberAndCOfString, - __unionOfEOfNumberAndFOfString, - } from './gen/struct_with_unions.noms.js'; - -suite('struct_optional.noms', () => { - test('constructor', async () => { - // TODO: This needs to be cleaner. - const swu = new StructWithUnions({ - a: new __unionOfBOfNumberAndCOfString({b: 1}), - d: new __unionOfEOfNumberAndFOfString({f:'hi'}), - }); - assert.equal(swu.a.b, 1); - assert.equal(swu.d.f, 'hi'); - - const swu2 = swu.setA(swu.a.setC('bye')); - const swu3 = new StructWithUnions({ - a: new __unionOfBOfNumberAndCOfString({c: 'bye'}), - d: new __unionOfEOfNumberAndFOfString({f:'hi'}), - }); - assert.isTrue(swu2.equals(swu3)); - }); -}); diff --git a/nomdl/codegen/test/struct.noms b/nomdl/codegen/test/struct.noms deleted file mode 100644 index 152a88d064..0000000000 --- a/nomdl/codegen/test/struct.noms +++ /dev/null @@ -1,6 +0,0 @@ -struct Struct { - s: String - b: Bool -} - -using List diff --git a/nomdl/codegen/test/struct_optional.noms b/nomdl/codegen/test/struct_optional.noms deleted file mode 100644 index 8149187522..0000000000 --- a/nomdl/codegen/test/struct_optional.noms +++ /dev/null @@ -1,4 +0,0 @@ -struct OptionalStruct { - s: optional String - b: optional Bool -} diff --git a/nomdl/codegen/test/struct_primitives.noms b/nomdl/codegen/test/struct_primitives.noms deleted file mode 100644 index 194f3782fb..0000000000 --- a/nomdl/codegen/test/struct_primitives.noms +++ /dev/null @@ -1,7 +0,0 @@ -struct StructPrimitives { - number: Number - bool: Bool - string: String - blob: Blob - value: Value -} diff --git a/nomdl/codegen/test/struct_recursive.noms b/nomdl/codegen/test/struct_recursive.noms deleted file mode 100644 index 85ff5e4c9a..0000000000 --- a/nomdl/codegen/test/struct_recursive.noms +++ /dev/null @@ -1,3 +0,0 @@ -struct Tree { - children: List -} diff --git a/nomdl/codegen/test/struct_with_dup_list.noms b/nomdl/codegen/test/struct_with_dup_list.noms deleted file mode 100644 index cf0975fc04..0000000000 --- a/nomdl/codegen/test/struct_with_dup_list.noms +++ /dev/null @@ -1,3 +0,0 @@ -struct StructWithDupList { - l: List -} diff --git a/nomdl/codegen/test/struct_with_imports.noms b/nomdl/codegen/test/struct_with_imports.noms deleted file mode 100644 index a114533b92..0000000000 --- a/nomdl/codegen/test/struct_with_imports.noms +++ /dev/null @@ -1,7 +0,0 @@ -alias dep = import "../testDeps/dep.noms" - -struct ImportUser { - importedStruct :dep.D -} - -using List diff --git a/nomdl/codegen/test/struct_with_list.noms b/nomdl/codegen/test/struct_with_list.noms deleted file mode 100644 index 8cd3db7ec9..0000000000 --- a/nomdl/codegen/test/struct_with_list.noms +++ /dev/null @@ -1,6 +0,0 @@ -struct StructWithList { - l: List - b: Bool - s: String - i: Number -} diff --git a/nomdl/codegen/test/struct_with_union_field.noms b/nomdl/codegen/test/struct_with_union_field.noms deleted file mode 100644 index 591638e621..0000000000 --- a/nomdl/codegen/test/struct_with_union_field.noms +++ /dev/null @@ -1,10 +0,0 @@ -struct StructWithUnionField { - a: Number - union { - b: Number - c: String - d: Blob - e: Value - f: Set - } -} diff --git a/nomdl/codegen/test/struct_with_unions.noms b/nomdl/codegen/test/struct_with_unions.noms deleted file mode 100644 index 4bd09db56f..0000000000 --- a/nomdl/codegen/test/struct_with_unions.noms +++ /dev/null @@ -1,10 +0,0 @@ -struct StructWithUnions { - a: union { - b: Number - c: String - } - d: union { - e: Number - f: String - } -} diff --git a/nomdl/codegen/testDeps/dep.noms b/nomdl/codegen/testDeps/dep.noms deleted file mode 100644 index d6b96a1fc8..0000000000 --- a/nomdl/codegen/testDeps/dep.noms +++ /dev/null @@ -1,9 +0,0 @@ -alias leaf = import "sha1-068bb32c733bd940a0d758715bf05082f4c12fcb" - -struct D { - structField: leaf.S -} - -struct DUser { - Dfield: D -} diff --git a/nomdl/codegen/testDeps/leafDep/leafDep.noms b/nomdl/codegen/testDeps/leafDep/leafDep.noms deleted file mode 100644 index 031b10e53e..0000000000 --- a/nomdl/codegen/testDeps/leafDep/leafDep.noms +++ /dev/null @@ -1,4 +0,0 @@ -struct S { - s: String - b: Bool -} diff --git a/nomdl/codegen/testDeps/leafDep/leafDep.noms.js b/nomdl/codegen/testDeps/leafDep/leafDep.noms.js deleted file mode 100644 index 44746785ba..0000000000 --- a/nomdl/codegen/testDeps/leafDep/leafDep.noms.js +++ /dev/null @@ -1,49 +0,0 @@ -// This file was generated by nomdl/codegen. -// @flow -/* eslint-disable */ - -import { - Field as _Field, - Package as _Package, - boolType as _boolType, - createStructClass as _createStructClass, - makeStructType as _makeStructType, - makeType as _makeType, - registerPackage as _registerPackage, - stringType as _stringType, -} from '@attic/noms'; -import type { - Struct as _Struct, -} from '@attic/noms'; - -const _pkg = new _Package([ - _makeStructType('S', - [ - new _Field('s', _stringType, false), - new _Field('b', _boolType, false), - ], - [ - - ] - ), -], [ -]); -_registerPackage(_pkg); -const S$type = _makeType(_pkg.ref, 0); -const S$typeDef = _pkg.types[0]; - - -type S$Data = { - s: string; - b: boolean; -}; - -interface S$Interface extends _Struct { - constructor(data: S$Data): void; - s: string; // readonly - setS(value: string): S$Interface; - b: boolean; // readonly - setB(value: boolean): S$Interface; -} - -export const S: Class = _createStructClass(S$type, S$typeDef); diff --git a/nomdl/pkg/grammar.peg b/nomdl/pkg/grammar.peg index a7575b8bc9..b1c445640e 100644 --- a/nomdl/pkg/grammar.peg +++ b/nomdl/pkg/grammar.peg @@ -14,7 +14,6 @@ type namespaceIdent struct { Package <- _ dd:Definition+ _ EOF { aliases := map[string]string{} - usings := []*types.Type{} seenTypes := map[string]bool{} orderedTypes := []*types.Type{} for _, d := range dd.([]interface{}) { @@ -30,41 +29,26 @@ Package <- _ dd:Definition+ _ EOF { switch d.Kind() { default: return nil, fmt.Errorf("%v can't be defined at the top-level", d) - case types.ListKind, types.MapKind, types.RefKind, types.SetKind: - for _, u := range usings { - if u.Equals(d) { - return nil, fmt.Errorf("%v is a duplicate using declaration", d) - } - } - usings = append(usings, d) case types.StructKind: - ds := expandStruct(d, len(orderedTypes)) - for _, d := range ds { - if d.Name() != "" { - if seenTypes[d.Name()] { - return nil, fmt.Errorf("Redefinition of " + d.Name()) - } - seenTypes[d.Name()] = true + if d.Name() != "" { + if seenTypes[d.Name()] { + return nil, fmt.Errorf("Redefinition of " + d.Name()) } - orderedTypes = append(orderedTypes, d) + seenTypes[d.Name()] = true } + orderedTypes = append(orderedTypes, d) } } } - return intermediate{"", aliases, usings, orderedTypes}, nil + return intermediate{"", aliases, orderedTypes}, nil } -Definition <- Struct / Using / Alias +Definition <- Struct / Alias Alias <- `alias` _ i:Ident _ `=` _ `import` _ q:QuotedString _ { return alias{i.(string), q.(string)}, nil } -Using <- `using` _ ct:CompoundType _ { - return ct, nil -} - - Struct <- `struct` _ i:Ident _ `{` _ l:StructEntry+ _ `}` _ { ll := l.([]interface{}) var u []types.Field @@ -123,7 +107,7 @@ Type <- t:(PrimitiveType / CompoundType / Union / NamespaceIdent) { case []types.Field: return types.MakeStructType("", nil, t), nil case namespaceIdent: - return types.MakeUnresolvedType(t.Namespace, t.ID), nil + return makeUnresolvedType(t.Namespace, t.ID), nil default: return nil, fmt.Errorf("%v is %T, not something that satisfies Type", t, t) } diff --git a/nomdl/pkg/grammar.peg.go b/nomdl/pkg/grammar.peg.go index f1cae220a5..a48d79c222 100644 --- a/nomdl/pkg/grammar.peg.go +++ b/nomdl/pkg/grammar.peg.go @@ -64,20 +64,16 @@ var g = &grammar{ }, { name: "Definition", - pos: position{line: 57, col: 1, offset: 1323}, + pos: position{line: 46, col: 1, offset: 966}, expr: &choiceExpr{ - pos: position{line: 57, col: 15, offset: 1337}, + pos: position{line: 46, col: 15, offset: 980}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 57, col: 15, offset: 1337}, + pos: position{line: 46, col: 15, offset: 980}, name: "Struct", }, &ruleRefExpr{ - pos: position{line: 57, col: 24, offset: 1346}, - name: "Using", - }, - &ruleRefExpr{ - pos: position{line: 57, col: 32, offset: 1354}, + pos: position{line: 46, col: 24, offset: 989}, name: "Alias", }, }, @@ -85,96 +81,62 @@ var g = &grammar{ }, { name: "Alias", - pos: position{line: 59, col: 1, offset: 1361}, + pos: position{line: 48, col: 1, offset: 996}, expr: &actionExpr{ - pos: position{line: 59, col: 10, offset: 1370}, + pos: position{line: 48, col: 10, offset: 1005}, run: (*parser).callonAlias1, expr: &seqExpr{ - pos: position{line: 59, col: 10, offset: 1370}, + pos: position{line: 48, col: 10, offset: 1005}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 59, col: 10, offset: 1370}, + pos: position{line: 48, col: 10, offset: 1005}, val: "alias", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 59, col: 18, offset: 1378}, + pos: position{line: 48, col: 18, offset: 1013}, name: "_", }, &labeledExpr{ - pos: position{line: 59, col: 20, offset: 1380}, + pos: position{line: 48, col: 20, offset: 1015}, label: "i", expr: &ruleRefExpr{ - pos: position{line: 59, col: 22, offset: 1382}, + pos: position{line: 48, col: 22, offset: 1017}, name: "Ident", }, }, &ruleRefExpr{ - pos: position{line: 59, col: 28, offset: 1388}, + pos: position{line: 48, col: 28, offset: 1023}, name: "_", }, &litMatcher{ - pos: position{line: 59, col: 30, offset: 1390}, + pos: position{line: 48, col: 30, offset: 1025}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 59, col: 34, offset: 1394}, + pos: position{line: 48, col: 34, offset: 1029}, name: "_", }, &litMatcher{ - pos: position{line: 59, col: 36, offset: 1396}, + pos: position{line: 48, col: 36, offset: 1031}, val: "import", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 59, col: 45, offset: 1405}, + pos: position{line: 48, col: 45, offset: 1040}, name: "_", }, &labeledExpr{ - pos: position{line: 59, col: 47, offset: 1407}, + pos: position{line: 48, col: 47, offset: 1042}, label: "q", expr: &ruleRefExpr{ - pos: position{line: 59, col: 49, offset: 1409}, + pos: position{line: 48, col: 49, offset: 1044}, name: "QuotedString", }, }, &ruleRefExpr{ - pos: position{line: 59, col: 62, offset: 1422}, - name: "_", - }, - }, - }, - }, - }, - { - name: "Using", - pos: position{line: 63, col: 1, offset: 1472}, - expr: &actionExpr{ - pos: position{line: 63, col: 10, offset: 1481}, - run: (*parser).callonUsing1, - expr: &seqExpr{ - pos: position{line: 63, col: 10, offset: 1481}, - exprs: []interface{}{ - &litMatcher{ - pos: position{line: 63, col: 10, offset: 1481}, - val: "using", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 63, col: 18, offset: 1489}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 63, col: 20, offset: 1491}, - label: "ct", - expr: &ruleRefExpr{ - pos: position{line: 63, col: 23, offset: 1494}, - name: "CompoundType", - }, - }, - &ruleRefExpr{ - pos: position{line: 63, col: 36, offset: 1507}, + pos: position{line: 48, col: 62, offset: 1057}, name: "_", }, }, @@ -183,65 +145,65 @@ var g = &grammar{ }, { name: "Struct", - pos: position{line: 68, col: 1, offset: 1531}, + pos: position{line: 52, col: 1, offset: 1107}, expr: &actionExpr{ - pos: position{line: 68, col: 11, offset: 1541}, + pos: position{line: 52, col: 11, offset: 1117}, run: (*parser).callonStruct1, expr: &seqExpr{ - pos: position{line: 68, col: 11, offset: 1541}, + pos: position{line: 52, col: 11, offset: 1117}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 68, col: 11, offset: 1541}, + pos: position{line: 52, col: 11, offset: 1117}, val: "struct", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 68, col: 20, offset: 1550}, + pos: position{line: 52, col: 20, offset: 1126}, name: "_", }, &labeledExpr{ - pos: position{line: 68, col: 22, offset: 1552}, + pos: position{line: 52, col: 22, offset: 1128}, label: "i", expr: &ruleRefExpr{ - pos: position{line: 68, col: 24, offset: 1554}, + pos: position{line: 52, col: 24, offset: 1130}, name: "Ident", }, }, &ruleRefExpr{ - pos: position{line: 68, col: 30, offset: 1560}, + pos: position{line: 52, col: 30, offset: 1136}, name: "_", }, &litMatcher{ - pos: position{line: 68, col: 32, offset: 1562}, + pos: position{line: 52, col: 32, offset: 1138}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 68, col: 36, offset: 1566}, + pos: position{line: 52, col: 36, offset: 1142}, name: "_", }, &labeledExpr{ - pos: position{line: 68, col: 38, offset: 1568}, + pos: position{line: 52, col: 38, offset: 1144}, label: "l", expr: &oneOrMoreExpr{ - pos: position{line: 68, col: 40, offset: 1570}, + pos: position{line: 52, col: 40, offset: 1146}, expr: &ruleRefExpr{ - pos: position{line: 68, col: 40, offset: 1570}, + pos: position{line: 52, col: 40, offset: 1146}, name: "StructEntry", }, }, }, &ruleRefExpr{ - pos: position{line: 68, col: 53, offset: 1583}, + pos: position{line: 52, col: 53, offset: 1159}, name: "_", }, &litMatcher{ - pos: position{line: 68, col: 55, offset: 1585}, + pos: position{line: 52, col: 55, offset: 1161}, val: "}", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 68, col: 59, offset: 1589}, + pos: position{line: 52, col: 59, offset: 1165}, name: "_", }, }, @@ -250,16 +212,16 @@ var g = &grammar{ }, { name: "StructEntry", - pos: position{line: 93, col: 1, offset: 2269}, + pos: position{line: 77, col: 1, offset: 1845}, expr: &choiceExpr{ - pos: position{line: 93, col: 16, offset: 2284}, + pos: position{line: 77, col: 16, offset: 1860}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 93, col: 16, offset: 2284}, + pos: position{line: 77, col: 16, offset: 1860}, name: "Union", }, &ruleRefExpr{ - pos: position{line: 93, col: 24, offset: 2292}, + pos: position{line: 77, col: 24, offset: 1868}, name: "Field", }, }, @@ -267,53 +229,53 @@ var g = &grammar{ }, { name: "Union", - pos: position{line: 96, col: 1, offset: 2300}, + pos: position{line: 80, col: 1, offset: 1876}, expr: &actionExpr{ - pos: position{line: 96, col: 10, offset: 2309}, + pos: position{line: 80, col: 10, offset: 1885}, run: (*parser).callonUnion1, expr: &seqExpr{ - pos: position{line: 96, col: 10, offset: 2309}, + pos: position{line: 80, col: 10, offset: 1885}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 96, col: 10, offset: 2309}, + pos: position{line: 80, col: 10, offset: 1885}, val: "union", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 96, col: 18, offset: 2317}, + pos: position{line: 80, col: 18, offset: 1893}, name: "_", }, &litMatcher{ - pos: position{line: 96, col: 20, offset: 2319}, + pos: position{line: 80, col: 20, offset: 1895}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 96, col: 24, offset: 2323}, + pos: position{line: 80, col: 24, offset: 1899}, name: "_", }, &labeledExpr{ - pos: position{line: 96, col: 26, offset: 2325}, + pos: position{line: 80, col: 26, offset: 1901}, label: "u", expr: &oneOrMoreExpr{ - pos: position{line: 96, col: 28, offset: 2327}, + pos: position{line: 80, col: 28, offset: 1903}, expr: &ruleRefExpr{ - pos: position{line: 96, col: 28, offset: 2327}, + pos: position{line: 80, col: 28, offset: 1903}, name: "UnionField", }, }, }, &ruleRefExpr{ - pos: position{line: 96, col: 40, offset: 2339}, + pos: position{line: 80, col: 40, offset: 1915}, name: "_", }, &litMatcher{ - pos: position{line: 96, col: 42, offset: 2341}, + pos: position{line: 80, col: 42, offset: 1917}, val: "}", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 96, col: 46, offset: 2345}, + pos: position{line: 80, col: 46, offset: 1921}, name: "_", }, }, @@ -322,49 +284,49 @@ var g = &grammar{ }, { name: "Field", - pos: position{line: 111, col: 1, offset: 2679}, + pos: position{line: 95, col: 1, offset: 2255}, expr: &actionExpr{ - pos: position{line: 111, col: 10, offset: 2688}, + pos: position{line: 95, col: 10, offset: 2264}, run: (*parser).callonField1, expr: &seqExpr{ - pos: position{line: 111, col: 10, offset: 2688}, + pos: position{line: 95, col: 10, offset: 2264}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 111, col: 10, offset: 2688}, + pos: position{line: 95, col: 10, offset: 2264}, label: "i", expr: &ruleRefExpr{ - pos: position{line: 111, col: 12, offset: 2690}, + pos: position{line: 95, col: 12, offset: 2266}, name: "Ident", }, }, &ruleRefExpr{ - pos: position{line: 111, col: 18, offset: 2696}, + pos: position{line: 95, col: 18, offset: 2272}, name: "_", }, &litMatcher{ - pos: position{line: 111, col: 20, offset: 2698}, + pos: position{line: 95, col: 20, offset: 2274}, val: ":", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 111, col: 24, offset: 2702}, + pos: position{line: 95, col: 24, offset: 2278}, name: "_", }, &labeledExpr{ - pos: position{line: 111, col: 26, offset: 2704}, + pos: position{line: 95, col: 26, offset: 2280}, label: "o", expr: &zeroOrOneExpr{ - pos: position{line: 111, col: 28, offset: 2706}, + pos: position{line: 95, col: 28, offset: 2282}, expr: &seqExpr{ - pos: position{line: 111, col: 29, offset: 2707}, + pos: position{line: 95, col: 29, offset: 2283}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 111, col: 29, offset: 2707}, + pos: position{line: 95, col: 29, offset: 2283}, val: "optional", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 111, col: 40, offset: 2718}, + pos: position{line: 95, col: 40, offset: 2294}, name: "_", }, }, @@ -372,15 +334,15 @@ var g = &grammar{ }, }, &labeledExpr{ - pos: position{line: 111, col: 44, offset: 2722}, + pos: position{line: 95, col: 44, offset: 2298}, label: "t", expr: &ruleRefExpr{ - pos: position{line: 111, col: 46, offset: 2724}, + pos: position{line: 95, col: 46, offset: 2300}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 111, col: 51, offset: 2729}, + pos: position{line: 95, col: 51, offset: 2305}, name: "_", }, }, @@ -389,44 +351,44 @@ var g = &grammar{ }, { name: "UnionField", - pos: position{line: 115, col: 1, offset: 2800}, + pos: position{line: 99, col: 1, offset: 2376}, expr: &actionExpr{ - pos: position{line: 115, col: 15, offset: 2814}, + pos: position{line: 99, col: 15, offset: 2390}, run: (*parser).callonUnionField1, expr: &seqExpr{ - pos: position{line: 115, col: 15, offset: 2814}, + pos: position{line: 99, col: 15, offset: 2390}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 115, col: 15, offset: 2814}, + pos: position{line: 99, col: 15, offset: 2390}, label: "i", expr: &ruleRefExpr{ - pos: position{line: 115, col: 17, offset: 2816}, + pos: position{line: 99, col: 17, offset: 2392}, name: "Ident", }, }, &ruleRefExpr{ - pos: position{line: 115, col: 23, offset: 2822}, + pos: position{line: 99, col: 23, offset: 2398}, name: "_", }, &litMatcher{ - pos: position{line: 115, col: 25, offset: 2824}, + pos: position{line: 99, col: 25, offset: 2400}, val: ":", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 115, col: 29, offset: 2828}, + pos: position{line: 99, col: 29, offset: 2404}, name: "_", }, &labeledExpr{ - pos: position{line: 115, col: 31, offset: 2830}, + pos: position{line: 99, col: 31, offset: 2406}, label: "t", expr: &ruleRefExpr{ - pos: position{line: 115, col: 33, offset: 2832}, + pos: position{line: 99, col: 33, offset: 2408}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 115, col: 38, offset: 2837}, + pos: position{line: 99, col: 38, offset: 2413}, name: "_", }, }, @@ -435,30 +397,30 @@ var g = &grammar{ }, { name: "Type", - pos: position{line: 119, col: 1, offset: 2905}, + pos: position{line: 103, col: 1, offset: 2481}, expr: &actionExpr{ - pos: position{line: 119, col: 9, offset: 2913}, + pos: position{line: 103, col: 9, offset: 2489}, run: (*parser).callonType1, expr: &labeledExpr{ - pos: position{line: 119, col: 9, offset: 2913}, + pos: position{line: 103, col: 9, offset: 2489}, label: "t", expr: &choiceExpr{ - pos: position{line: 119, col: 12, offset: 2916}, + pos: position{line: 103, col: 12, offset: 2492}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 119, col: 12, offset: 2916}, + pos: position{line: 103, col: 12, offset: 2492}, name: "PrimitiveType", }, &ruleRefExpr{ - pos: position{line: 119, col: 28, offset: 2932}, + pos: position{line: 103, col: 28, offset: 2508}, name: "CompoundType", }, &ruleRefExpr{ - pos: position{line: 119, col: 43, offset: 2947}, + pos: position{line: 103, col: 43, offset: 2523}, name: "Union", }, &ruleRefExpr{ - pos: position{line: 119, col: 51, offset: 2955}, + pos: position{line: 103, col: 51, offset: 2531}, name: "NamespaceIdent", }, }, @@ -468,218 +430,218 @@ var g = &grammar{ }, { name: "CompoundType", - pos: position{line: 132, col: 1, offset: 3274}, + pos: position{line: 116, col: 1, offset: 2844}, expr: &choiceExpr{ - pos: position{line: 132, col: 17, offset: 3290}, + pos: position{line: 116, col: 17, offset: 2860}, alternatives: []interface{}{ &actionExpr{ - pos: position{line: 132, col: 17, offset: 3290}, + pos: position{line: 116, col: 17, offset: 2860}, run: (*parser).callonCompoundType2, expr: &seqExpr{ - pos: position{line: 132, col: 17, offset: 3290}, + pos: position{line: 116, col: 17, offset: 2860}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 132, col: 17, offset: 3290}, + pos: position{line: 116, col: 17, offset: 2860}, val: "List", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 132, col: 24, offset: 3297}, + pos: position{line: 116, col: 24, offset: 2867}, name: "_", }, &litMatcher{ - pos: position{line: 132, col: 26, offset: 3299}, + pos: position{line: 116, col: 26, offset: 2869}, val: "<", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 132, col: 30, offset: 3303}, + pos: position{line: 116, col: 30, offset: 2873}, name: "_", }, &labeledExpr{ - pos: position{line: 132, col: 32, offset: 3305}, + pos: position{line: 116, col: 32, offset: 2875}, label: "t", expr: &ruleRefExpr{ - pos: position{line: 132, col: 34, offset: 3307}, + pos: position{line: 116, col: 34, offset: 2877}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 132, col: 39, offset: 3312}, + pos: position{line: 116, col: 39, offset: 2882}, name: "_", }, &litMatcher{ - pos: position{line: 132, col: 41, offset: 3314}, + pos: position{line: 116, col: 41, offset: 2884}, val: ">", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 132, col: 45, offset: 3318}, + pos: position{line: 116, col: 45, offset: 2888}, name: "_", }, }, }, }, &actionExpr{ - pos: position{line: 134, col: 5, offset: 3395}, + pos: position{line: 118, col: 5, offset: 2945}, run: (*parser).callonCompoundType13, expr: &seqExpr{ - pos: position{line: 134, col: 5, offset: 3395}, + pos: position{line: 118, col: 5, offset: 2945}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 134, col: 5, offset: 3395}, + pos: position{line: 118, col: 5, offset: 2945}, val: "Map", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 134, col: 11, offset: 3401}, + pos: position{line: 118, col: 11, offset: 2951}, name: "_", }, &litMatcher{ - pos: position{line: 134, col: 13, offset: 3403}, + pos: position{line: 118, col: 13, offset: 2953}, val: "<", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 134, col: 17, offset: 3407}, + pos: position{line: 118, col: 17, offset: 2957}, name: "_", }, &labeledExpr{ - pos: position{line: 134, col: 19, offset: 3409}, + pos: position{line: 118, col: 19, offset: 2959}, label: "k", expr: &ruleRefExpr{ - pos: position{line: 134, col: 21, offset: 3411}, + pos: position{line: 118, col: 21, offset: 2961}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 134, col: 26, offset: 3416}, + pos: position{line: 118, col: 26, offset: 2966}, name: "_", }, &litMatcher{ - pos: position{line: 134, col: 28, offset: 3418}, + pos: position{line: 118, col: 28, offset: 2968}, val: ",", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 134, col: 32, offset: 3422}, + pos: position{line: 118, col: 32, offset: 2972}, name: "_", }, &labeledExpr{ - pos: position{line: 134, col: 34, offset: 3424}, + pos: position{line: 118, col: 34, offset: 2974}, label: "v", expr: &ruleRefExpr{ - pos: position{line: 134, col: 36, offset: 3426}, + pos: position{line: 118, col: 36, offset: 2976}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 134, col: 41, offset: 3431}, + pos: position{line: 118, col: 41, offset: 2981}, name: "_", }, &litMatcher{ - pos: position{line: 134, col: 43, offset: 3433}, + pos: position{line: 118, col: 43, offset: 2983}, val: ">", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 134, col: 47, offset: 3437}, + pos: position{line: 118, col: 47, offset: 2987}, name: "_", }, }, }, }, &actionExpr{ - pos: position{line: 136, col: 5, offset: 3530}, + pos: position{line: 120, col: 5, offset: 3060}, run: (*parser).callonCompoundType29, expr: &seqExpr{ - pos: position{line: 136, col: 5, offset: 3530}, + pos: position{line: 120, col: 5, offset: 3060}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 136, col: 5, offset: 3530}, + pos: position{line: 120, col: 5, offset: 3060}, val: "Set", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 136, col: 11, offset: 3536}, + pos: position{line: 120, col: 11, offset: 3066}, name: "_", }, &litMatcher{ - pos: position{line: 136, col: 13, offset: 3538}, + pos: position{line: 120, col: 13, offset: 3068}, val: "<", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 136, col: 17, offset: 3542}, + pos: position{line: 120, col: 17, offset: 3072}, name: "_", }, &labeledExpr{ - pos: position{line: 136, col: 19, offset: 3544}, + pos: position{line: 120, col: 19, offset: 3074}, label: "t", expr: &ruleRefExpr{ - pos: position{line: 136, col: 21, offset: 3546}, + pos: position{line: 120, col: 21, offset: 3076}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 136, col: 26, offset: 3551}, + pos: position{line: 120, col: 26, offset: 3081}, name: "_", }, &litMatcher{ - pos: position{line: 136, col: 28, offset: 3553}, + pos: position{line: 120, col: 28, offset: 3083}, val: ">", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 136, col: 32, offset: 3557}, + pos: position{line: 120, col: 32, offset: 3087}, name: "_", }, }, }, }, &actionExpr{ - pos: position{line: 138, col: 5, offset: 3633}, + pos: position{line: 122, col: 5, offset: 3143}, run: (*parser).callonCompoundType40, expr: &seqExpr{ - pos: position{line: 138, col: 5, offset: 3633}, + pos: position{line: 122, col: 5, offset: 3143}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 138, col: 5, offset: 3633}, + pos: position{line: 122, col: 5, offset: 3143}, val: "Ref", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 138, col: 11, offset: 3639}, + pos: position{line: 122, col: 11, offset: 3149}, name: "_", }, &litMatcher{ - pos: position{line: 138, col: 13, offset: 3641}, + pos: position{line: 122, col: 13, offset: 3151}, val: "<", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 138, col: 17, offset: 3645}, + pos: position{line: 122, col: 17, offset: 3155}, name: "_", }, &labeledExpr{ - pos: position{line: 138, col: 19, offset: 3647}, + pos: position{line: 122, col: 19, offset: 3157}, label: "t", expr: &ruleRefExpr{ - pos: position{line: 138, col: 21, offset: 3649}, + pos: position{line: 122, col: 21, offset: 3159}, name: "Type", }, }, &ruleRefExpr{ - pos: position{line: 138, col: 26, offset: 3654}, + pos: position{line: 122, col: 26, offset: 3164}, name: "_", }, &litMatcher{ - pos: position{line: 138, col: 28, offset: 3656}, + pos: position{line: 122, col: 28, offset: 3166}, val: ">", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 138, col: 32, offset: 3660}, + pos: position{line: 122, col: 32, offset: 3170}, name: "_", }, }, @@ -690,43 +652,43 @@ var g = &grammar{ }, { name: "PrimitiveType", - pos: position{line: 142, col: 1, offset: 3735}, + pos: position{line: 126, col: 1, offset: 3225}, expr: &actionExpr{ - pos: position{line: 142, col: 18, offset: 3752}, + pos: position{line: 126, col: 18, offset: 3242}, run: (*parser).callonPrimitiveType1, expr: &labeledExpr{ - pos: position{line: 142, col: 18, offset: 3752}, + pos: position{line: 126, col: 18, offset: 3242}, label: "p", expr: &choiceExpr{ - pos: position{line: 142, col: 21, offset: 3755}, + pos: position{line: 126, col: 21, offset: 3245}, alternatives: []interface{}{ &litMatcher{ - pos: position{line: 142, col: 103, offset: 3837}, + pos: position{line: 126, col: 21, offset: 3245}, val: "Number", ignoreCase: false, }, &litMatcher{ - pos: position{line: 142, col: 127, offset: 3861}, + pos: position{line: 126, col: 32, offset: 3256}, val: "Bool", ignoreCase: false, }, &litMatcher{ - pos: position{line: 142, col: 136, offset: 3870}, + pos: position{line: 126, col: 41, offset: 3265}, val: "String", ignoreCase: false, }, &litMatcher{ - pos: position{line: 142, col: 147, offset: 3881}, + pos: position{line: 126, col: 52, offset: 3276}, val: "Blob", ignoreCase: false, }, &litMatcher{ - pos: position{line: 142, col: 156, offset: 3890}, + pos: position{line: 126, col: 61, offset: 3285}, val: "Value", ignoreCase: false, }, &litMatcher{ - pos: position{line: 142, col: 166, offset: 3900}, + pos: position{line: 126, col: 71, offset: 3295}, val: "Type", ignoreCase: false, }, @@ -737,28 +699,28 @@ var g = &grammar{ }, { name: "QuotedString", - pos: position{line: 146, col: 1, offset: 3979}, + pos: position{line: 130, col: 1, offset: 3374}, expr: &actionExpr{ - pos: position{line: 146, col: 17, offset: 3995}, + pos: position{line: 130, col: 17, offset: 3390}, run: (*parser).callonQuotedString1, expr: &seqExpr{ - pos: position{line: 146, col: 17, offset: 3995}, + pos: position{line: 130, col: 17, offset: 3390}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 146, col: 17, offset: 3995}, + pos: position{line: 130, col: 17, offset: 3390}, val: "\"", ignoreCase: false, }, &labeledExpr{ - pos: position{line: 146, col: 21, offset: 3999}, + pos: position{line: 130, col: 21, offset: 3394}, label: "n", expr: &ruleRefExpr{ - pos: position{line: 146, col: 23, offset: 4001}, + pos: position{line: 130, col: 23, offset: 3396}, name: "String", }, }, &litMatcher{ - pos: position{line: 146, col: 30, offset: 4008}, + pos: position{line: 130, col: 30, offset: 3403}, val: "\"", ignoreCase: false, }, @@ -768,42 +730,42 @@ var g = &grammar{ }, { name: "String", - pos: position{line: 150, col: 1, offset: 4041}, + pos: position{line: 134, col: 1, offset: 3436}, expr: &actionExpr{ - pos: position{line: 150, col: 11, offset: 4051}, + pos: position{line: 134, col: 11, offset: 3446}, run: (*parser).callonString1, expr: &choiceExpr{ - pos: position{line: 150, col: 12, offset: 4052}, + pos: position{line: 134, col: 12, offset: 3447}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 150, col: 12, offset: 4052}, + pos: position{line: 134, col: 12, offset: 3447}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 150, col: 12, offset: 4052}, + pos: position{line: 134, col: 12, offset: 3447}, name: "StringPiece", }, &litMatcher{ - pos: position{line: 150, col: 24, offset: 4064}, + pos: position{line: 134, col: 24, offset: 3459}, val: "\\\"", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 150, col: 29, offset: 4069}, + pos: position{line: 134, col: 29, offset: 3464}, name: "StringPiece", }, &litMatcher{ - pos: position{line: 150, col: 41, offset: 4081}, + pos: position{line: 134, col: 41, offset: 3476}, val: "\\\"", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 150, col: 46, offset: 4086}, + pos: position{line: 134, col: 46, offset: 3481}, name: "StringPiece", }, }, }, &ruleRefExpr{ - pos: position{line: 150, col: 60, offset: 4100}, + pos: position{line: 134, col: 60, offset: 3495}, name: "StringPiece", }, }, @@ -812,24 +774,24 @@ var g = &grammar{ }, { name: "StringPiece", - pos: position{line: 154, col: 1, offset: 4146}, + pos: position{line: 138, col: 1, offset: 3541}, expr: &zeroOrMoreExpr{ - pos: position{line: 154, col: 16, offset: 4161}, + pos: position{line: 138, col: 16, offset: 3556}, expr: &choiceExpr{ - pos: position{line: 154, col: 17, offset: 4162}, + pos: position{line: 138, col: 17, offset: 3557}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 154, col: 17, offset: 4162}, + pos: position{line: 138, col: 17, offset: 3557}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 154, col: 17, offset: 4162}, + pos: position{line: 138, col: 17, offset: 3557}, val: "\\", ignoreCase: false, }, ¬Expr{ - pos: position{line: 154, col: 21, offset: 4166}, + pos: position{line: 138, col: 21, offset: 3561}, expr: &litMatcher{ - pos: position{line: 154, col: 22, offset: 4167}, + pos: position{line: 138, col: 22, offset: 3562}, val: "\"", ignoreCase: false, }, @@ -837,7 +799,7 @@ var g = &grammar{ }, }, &charClassMatcher{ - pos: position{line: 154, col: 28, offset: 4173}, + pos: position{line: 138, col: 28, offset: 3568}, val: "[^\"\\\\]", chars: []rune{'"', '\\'}, ignoreCase: false, @@ -849,27 +811,27 @@ var g = &grammar{ }, { name: "NamespaceIdent", - pos: position{line: 156, col: 1, offset: 4183}, + pos: position{line: 140, col: 1, offset: 3578}, expr: &actionExpr{ - pos: position{line: 156, col: 19, offset: 4201}, + pos: position{line: 140, col: 19, offset: 3596}, run: (*parser).callonNamespaceIdent1, expr: &seqExpr{ - pos: position{line: 156, col: 19, offset: 4201}, + pos: position{line: 140, col: 19, offset: 3596}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 156, col: 19, offset: 4201}, + pos: position{line: 140, col: 19, offset: 3596}, label: "n", expr: &zeroOrMoreExpr{ - pos: position{line: 156, col: 21, offset: 4203}, + pos: position{line: 140, col: 21, offset: 3598}, expr: &seqExpr{ - pos: position{line: 156, col: 22, offset: 4204}, + pos: position{line: 140, col: 22, offset: 3599}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 156, col: 22, offset: 4204}, + pos: position{line: 140, col: 22, offset: 3599}, name: "Ident", }, &litMatcher{ - pos: position{line: 156, col: 28, offset: 4210}, + pos: position{line: 140, col: 28, offset: 3605}, val: ".", ignoreCase: false, }, @@ -878,10 +840,10 @@ var g = &grammar{ }, }, &labeledExpr{ - pos: position{line: 156, col: 34, offset: 4216}, + pos: position{line: 140, col: 34, offset: 3611}, label: "id", expr: &ruleRefExpr{ - pos: position{line: 156, col: 37, offset: 4219}, + pos: position{line: 140, col: 37, offset: 3614}, name: "Ident", }, }, @@ -891,15 +853,15 @@ var g = &grammar{ }, { name: "Ident", - pos: position{line: 165, col: 1, offset: 4417}, + pos: position{line: 149, col: 1, offset: 3812}, expr: &actionExpr{ - pos: position{line: 165, col: 10, offset: 4426}, + pos: position{line: 149, col: 10, offset: 3821}, run: (*parser).callonIdent1, expr: &seqExpr{ - pos: position{line: 165, col: 10, offset: 4426}, + pos: position{line: 149, col: 10, offset: 3821}, exprs: []interface{}{ &charClassMatcher{ - pos: position{line: 165, col: 10, offset: 4426}, + pos: position{line: 149, col: 10, offset: 3821}, val: "[\\pL_]", chars: []rune{'_'}, classes: []*unicode.RangeTable{rangeTable("L")}, @@ -907,9 +869,9 @@ var g = &grammar{ inverted: false, }, &zeroOrMoreExpr{ - pos: position{line: 165, col: 17, offset: 4433}, + pos: position{line: 149, col: 17, offset: 3828}, expr: &charClassMatcher{ - pos: position{line: 165, col: 17, offset: 4433}, + pos: position{line: 149, col: 17, offset: 3828}, val: "[\\pL\\pN_]", chars: []rune{'_'}, classes: []*unicode.RangeTable{rangeTable("L"), rangeTable("N")}, @@ -924,28 +886,28 @@ var g = &grammar{ { name: "_", displayName: "\"optional whitespace\"", - pos: position{line: 169, col: 1, offset: 4477}, + pos: position{line: 153, col: 1, offset: 3872}, expr: &actionExpr{ - pos: position{line: 169, col: 28, offset: 4504}, + pos: position{line: 153, col: 28, offset: 3899}, run: (*parser).callon_1, expr: &seqExpr{ - pos: position{line: 169, col: 28, offset: 4504}, + pos: position{line: 153, col: 28, offset: 3899}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 169, col: 28, offset: 4504}, + pos: position{line: 153, col: 28, offset: 3899}, name: "WS", }, &zeroOrMoreExpr{ - pos: position{line: 169, col: 31, offset: 4507}, + pos: position{line: 153, col: 31, offset: 3902}, expr: &seqExpr{ - pos: position{line: 169, col: 32, offset: 4508}, + pos: position{line: 153, col: 32, offset: 3903}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 169, col: 32, offset: 4508}, + pos: position{line: 153, col: 32, offset: 3903}, name: "Comment", }, &ruleRefExpr{ - pos: position{line: 169, col: 40, offset: 4516}, + pos: position{line: 153, col: 40, offset: 3911}, name: "WS", }, }, @@ -957,11 +919,11 @@ var g = &grammar{ }, { name: "WS", - pos: position{line: 173, col: 1, offset: 4543}, + pos: position{line: 157, col: 1, offset: 3938}, expr: &zeroOrMoreExpr{ - pos: position{line: 173, col: 7, offset: 4549}, + pos: position{line: 157, col: 7, offset: 3944}, expr: &charClassMatcher{ - pos: position{line: 173, col: 7, offset: 4549}, + pos: position{line: 157, col: 7, offset: 3944}, val: "[\\r\\n\\t\\pZ]", chars: []rune{'\r', '\n', '\t'}, classes: []*unicode.RangeTable{rangeTable("Z")}, @@ -972,22 +934,22 @@ var g = &grammar{ }, { name: "Comment", - pos: position{line: 175, col: 1, offset: 4563}, + pos: position{line: 159, col: 1, offset: 3958}, expr: &choiceExpr{ - pos: position{line: 175, col: 12, offset: 4574}, + pos: position{line: 159, col: 12, offset: 3969}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 175, col: 12, offset: 4574}, + pos: position{line: 159, col: 12, offset: 3969}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 175, col: 12, offset: 4574}, + pos: position{line: 159, col: 12, offset: 3969}, val: "//", ignoreCase: false, }, &zeroOrMoreExpr{ - pos: position{line: 175, col: 17, offset: 4579}, + pos: position{line: 159, col: 17, offset: 3974}, expr: &charClassMatcher{ - pos: position{line: 175, col: 17, offset: 4579}, + pos: position{line: 159, col: 17, offset: 3974}, val: "[^\\n]", chars: []rune{'\n'}, ignoreCase: false, @@ -997,7 +959,7 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 175, col: 26, offset: 4588}, + pos: position{line: 159, col: 26, offset: 3983}, name: "MultilineComment", }, }, @@ -1005,32 +967,32 @@ var g = &grammar{ }, { name: "MultilineComment", - pos: position{line: 177, col: 1, offset: 4606}, + pos: position{line: 161, col: 1, offset: 4001}, expr: &seqExpr{ - pos: position{line: 177, col: 21, offset: 4626}, + pos: position{line: 161, col: 21, offset: 4021}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 177, col: 21, offset: 4626}, + pos: position{line: 161, col: 21, offset: 4021}, val: "/*", ignoreCase: false, }, &zeroOrMoreExpr{ - pos: position{line: 177, col: 26, offset: 4631}, + pos: position{line: 161, col: 26, offset: 4026}, expr: &choiceExpr{ - pos: position{line: 177, col: 27, offset: 4632}, + pos: position{line: 161, col: 27, offset: 4027}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 177, col: 27, offset: 4632}, + pos: position{line: 161, col: 27, offset: 4027}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 177, col: 27, offset: 4632}, + pos: position{line: 161, col: 27, offset: 4027}, val: "*", ignoreCase: false, }, ¬Expr{ - pos: position{line: 177, col: 31, offset: 4636}, + pos: position{line: 161, col: 31, offset: 4031}, expr: &litMatcher{ - pos: position{line: 177, col: 32, offset: 4637}, + pos: position{line: 161, col: 32, offset: 4032}, val: "/", ignoreCase: false, }, @@ -1038,7 +1000,7 @@ var g = &grammar{ }, }, &charClassMatcher{ - pos: position{line: 177, col: 38, offset: 4643}, + pos: position{line: 161, col: 38, offset: 4038}, val: "[^*]", chars: []rune{'*'}, ignoreCase: false, @@ -1048,7 +1010,7 @@ var g = &grammar{ }, }, &litMatcher{ - pos: position{line: 177, col: 45, offset: 4650}, + pos: position{line: 161, col: 45, offset: 4045}, val: "*/", ignoreCase: false, }, @@ -1057,18 +1019,18 @@ var g = &grammar{ }, { name: "EOF", - pos: position{line: 179, col: 1, offset: 4656}, + pos: position{line: 163, col: 1, offset: 4051}, expr: &seqExpr{ - pos: position{line: 179, col: 8, offset: 4663}, + pos: position{line: 163, col: 8, offset: 4058}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 179, col: 8, offset: 4663}, + pos: position{line: 163, col: 8, offset: 4058}, name: "_", }, ¬Expr{ - pos: position{line: 179, col: 10, offset: 4665}, + pos: position{line: 163, col: 10, offset: 4060}, expr: &anyMatcher{ - line: 179, col: 11, offset: 4666, + line: 163, col: 11, offset: 4061, }, }, }, @@ -1079,7 +1041,6 @@ var g = &grammar{ func (c *current) onPackage1(dd interface{}) (interface{}, error) { aliases := map[string]string{} - usings := []*types.Type{} seenTypes := map[string]bool{} orderedTypes := []*types.Type{} for _, d := range dd.([]interface{}) { @@ -1095,28 +1056,18 @@ func (c *current) onPackage1(dd interface{}) (interface{}, error) { switch d.Kind() { default: return nil, fmt.Errorf("%v can't be defined at the top-level", d) - case types.ListKind, types.MapKind, types.RefKind, types.SetKind: - for _, u := range usings { - if u.Equals(d) { - return nil, fmt.Errorf("%v is a duplicate using declaration", d) - } - } - usings = append(usings, d) case types.StructKind: - ds := expandStruct(d, len(orderedTypes)) - for _, d := range ds { - if d.Name() != "" { - if seenTypes[d.Name()] { - return nil, fmt.Errorf("Redefinition of " + d.Name()) - } - seenTypes[d.Name()] = true + if d.Name() != "" { + if seenTypes[d.Name()] { + return nil, fmt.Errorf("Redefinition of " + d.Name()) } - orderedTypes = append(orderedTypes, d) + seenTypes[d.Name()] = true } + orderedTypes = append(orderedTypes, d) } } } - return intermediate{"", aliases, usings, orderedTypes}, nil + return intermediate{"", aliases, orderedTypes}, nil } func (p *parser) callonPackage1() (interface{}, error) { @@ -1135,16 +1086,6 @@ func (p *parser) callonAlias1() (interface{}, error) { return p.cur.onAlias1(stack["i"], stack["q"]) } -func (c *current) onUsing1(ct interface{}) (interface{}, error) { - return ct, nil -} - -func (p *parser) callonUsing1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onUsing1(stack["ct"]) -} - func (c *current) onStruct1(i, l interface{}) (interface{}, error) { ll := l.([]interface{}) var u []types.Field @@ -1224,7 +1165,7 @@ func (c *current) onType1(t interface{}) (interface{}, error) { case []types.Field: return types.MakeStructType("", nil, t), nil case namespaceIdent: - return types.MakeUnresolvedType(t.Namespace, t.ID), nil + return makeUnresolvedType(t.Namespace, t.ID), nil default: return nil, fmt.Errorf("%v is %T, not something that satisfies Type", t, t) } diff --git a/nomdl/pkg/imports.go b/nomdl/pkg/imports.go index 85bb6f0d46..ababc12fce 100644 --- a/nomdl/pkg/imports.go +++ b/nomdl/pkg/imports.go @@ -5,54 +5,24 @@ import ( "path/filepath" "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" "github.com/attic-labs/noms/types" ) -// getDeps reads the types.Package objects referred to by depRefs from vr and returns a map of ref: PackageDef. -func getDeps(deps []ref.Ref, vr types.ValueReader) map[ref.Ref]types.Package { - depsMap := map[ref.Ref]types.Package{} - for _, depRef := range deps { - v := vr.ReadValue(depRef) - d.Chk.NotNil(v, "Importing package by ref %s failed.", depRef.String()) - depsMap[depRef] = v.(types.Package) - } - return depsMap -} - -func resolveImports(aliases map[string]string, includePath string, vrw types.ValueReadWriter) map[string]ref.Ref { +func resolveImports(aliases map[string]string, includePath string) (imports map[string][]*types.Type) { canonicalize := func(path string) string { if filepath.IsAbs(path) { return path } return filepath.Join(includePath, path) } - imports := map[string]ref.Ref{} for alias, target := range aliases { - var r ref.Ref - if d.Try(func() { r = ref.Parse(target) }) != nil { - canonical := canonicalize(target) - inFile, err := os.Open(canonical) - d.Chk.NoError(err) - defer inFile.Close() - parsedDep := ParseNomDL(alias, inFile, filepath.Dir(canonical), vrw) - imports[alias] = vrw.WriteValue(parsedDep.Package).TargetRef() - } else { - imports[alias] = r - } + canonical := canonicalize(target) + inFile, err := os.Open(canonical) + d.Chk.NoError(err) + defer inFile.Close() + ts := ParseNomDL(alias, inFile, filepath.Dir(canonical)) + imports[alias] = ts } - return imports -} - -func importsToDeps(imports map[string]ref.Ref) []ref.Ref { - depsSet := make(map[ref.Ref]bool, len(imports)) - deps := make([]ref.Ref, 0, len(imports)) - for _, target := range imports { - if !depsSet[target] { - deps = append(deps, target) - } - depsSet[target] = true - } - return deps + return } diff --git a/nomdl/pkg/imports_test.go b/nomdl/pkg/imports_test.go deleted file mode 100644 index ed21cf5b13..0000000000 --- a/nomdl/pkg/imports_test.go +++ /dev/null @@ -1,215 +0,0 @@ -package pkg - -import ( - "fmt" - "io/ioutil" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/attic-labs/noms/chunks" - "github.com/attic-labs/noms/datas" - "github.com/attic-labs/noms/ref" - "github.com/attic-labs/noms/types" - "github.com/stretchr/testify/suite" -) - -func TestImportSuite(t *testing.T) { - suite.Run(t, &ImportTestSuite{}) -} - -type ImportTestSuite struct { - suite.Suite - vrw types.ValueReadWriter - imported types.Package - importRef ref.Ref - nested types.Package - nestedRef ref.Ref -} - -func (suite *ImportTestSuite) SetupTest() { - suite.vrw = datas.NewDataStore(chunks.NewMemoryStore()) - - ns := types.MakeStructType("NestedDepStruct", []types.Field{}, []types.Field{ - types.Field{"b", types.BoolType, false}, - types.Field{"i", types.NumberType, false}, - }) - suite.nested = types.NewPackage([]*types.Type{ns}, []ref.Ref{}) - suite.nestedRef = suite.vrw.WriteValue(suite.nested).TargetRef() - - fs := types.MakeStructType("ForeignStruct", []types.Field{ - types.Field{"b", types.MakeType(ref.Ref{}, 1), false}, - types.Field{"n", types.MakeType(suite.nestedRef, 0), false}, - }, - []types.Field{}) - suite.imported = types.NewPackage([]*types.Type{fs}, []ref.Ref{suite.nestedRef}) - suite.importRef = suite.vrw.WriteValue(suite.imported).TargetRef() -} - -func (suite *ImportTestSuite) TestGetDeps() { - deps := getDeps([]ref.Ref{suite.importRef}, suite.vrw) - suite.Len(deps, 1) - imported, ok := deps[suite.importRef] - suite.True(ok, "%s is a dep; should have been found.", suite.importRef.String()) - - deps = getDeps(imported.Dependencies(), suite.vrw) - suite.Len(deps, 1) - imported, ok = deps[suite.nestedRef] - suite.True(ok, "%s is a dep; should have been found.", suite.nestedRef.String()) -} - -func (suite *ImportTestSuite) TestUnknownImportedType() { - deps := getDeps([]ref.Ref{suite.importRef}, suite.vrw) - suite.Panics(func() { - resolveNamespace(types.MakeUnresolvedType("Other", "NotThere"), map[string]ref.Ref{"Other": suite.importRef}, deps) - }) -} - -func (suite *ImportTestSuite) TestDetectFreeVariable() { - ls := types.MakeStructType("Local", []types.Field{ - types.Field{"b", types.BoolType, false}, - types.Field{"n", types.MakeUnresolvedType("", "OtherLocal"), false}, - }, - []types.Field{}) - suite.Panics(func() { - inter := intermediate{Types: []*types.Type{ls}} - resolveLocalOrdinals(&inter) - }) -} - -func (suite *ImportTestSuite) TestImports() { - find := func(n string, typ *types.Type) types.Field { - suite.Equal(types.StructKind, typ.Kind()) - for _, f := range typ.Desc.(types.StructDesc).Fields { - if f.Name == n { - return f - } - } - suite.Fail("Could not find field", "%s not present", n) - return types.Field{} - } - findChoice := func(n string, typ *types.Type) types.Field { - suite.Equal(types.StructKind, typ.Kind()) - for _, f := range typ.Desc.(types.StructDesc).Union { - if f.Name == n { - return f - } - } - suite.Fail("Could not find choice", "%s not present", n) - return types.Field{} - } - refFromNomsFile := func(path string) ref.Ref { - ds := datas.NewDataStore(chunks.NewMemoryStore()) - inFile, err := os.Open(path) - suite.NoError(err) - defer inFile.Close() - parsedDep := ParseNomDL("", inFile, filepath.Dir(path), ds) - return ds.WriteValue(parsedDep.Package).TargetRef() - } - - dir, err := ioutil.TempDir("", "") - suite.NoError(err) - defer os.RemoveAll(dir) - - byPathNomDL := filepath.Join(dir, "filedep.noms") - err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Number}"), 0600) - suite.NoError(err) - - r := strings.NewReader(fmt.Sprintf(` - alias Other = import "%s" - alias ByPath = import "%s" - - using List - struct Local1 { - a: Other.ForeignStruct - b: Number - c: Local2 - } - struct Local2 { - a: ByPath.FromFile - } - struct Union { - union { - a: Other.ForeignStruct - b: Local2 - } - } - struct WithUnion { - a: Other.ForeignStruct - b: union { - s: Local1 - } - }`, suite.importRef, filepath.Base(byPathNomDL))) - p := ParseNomDL("testing", r, dir, suite.vrw) - - named := p.Types()[0] - suite.Equal("Local1", named.Name()) - field := find("a", named) - suite.EqualValues(suite.importRef, field.T.PackageRef()) - field = find("c", named) - suite.EqualValues(p.Ref(), field.T.PackageRef()) - - named = p.Types()[1] - suite.Equal("Local2", named.Name()) - field = find("a", named) - suite.EqualValues(refFromNomsFile(byPathNomDL), field.T.PackageRef()) - - named = p.Types()[2] - suite.Equal("Union", named.Name()) - field = findChoice("a", named) - suite.EqualValues(suite.importRef, field.T.PackageRef()) - field = findChoice("b", named) - suite.EqualValues(p.Ref(), field.T.PackageRef()) - - named = p.Types()[3] - suite.Equal("WithUnion", named.Name()) - field = find("a", named) - suite.EqualValues(suite.importRef, field.T.PackageRef()) - namedUnion := find("b", named).T - suite.True(namedUnion.IsUnresolved()) - namedUnion = p.Types()[namedUnion.Ordinal()] - field = findChoice("s", namedUnion) - suite.EqualValues(p.Ref(), field.T.PackageRef()) - - usings := p.UsingDeclarations - suite.Len(usings, 1) - suite.EqualValues(types.ListKind, usings[0].Kind()) -} - -func (suite *ImportTestSuite) TestImportWithLocalRef() { - dir, err := ioutil.TempDir("", "") - suite.NoError(err) - defer os.RemoveAll(dir) - - byPathNomDL := filepath.Join(dir, "filedep.noms") - err = ioutil.WriteFile(byPathNomDL, []byte("struct FromFile{i:Number}"), 0600) - suite.NoError(err) - - r1 := strings.NewReader(` - struct A { - B: B - } - struct B { - X: Number - }`) - pkg1 := ParseNomDL("test1", r1, dir, suite.vrw) - pkgRef1 := suite.vrw.WriteValue(pkg1.Package).TargetRef() - - r2 := strings.NewReader(fmt.Sprintf(` - alias Other = import "%s" - struct C { - C: Map - } - `, pkgRef1)) - pkg2 := ParseNomDL("test2", r2, dir, suite.vrw) - - ts := pkg2.Types() - suite.Len(ts, 1) - suite.EqualValues(types.StructKind, ts[0].Kind()) - mapType := ts[0].Desc.(types.StructDesc).Fields[0].T - suite.EqualValues(types.MapKind, mapType.Kind()) - otherAType := mapType.Desc.(types.CompoundDesc).ElemTypes[1] - suite.EqualValues(types.UnresolvedKind, otherAType.Kind()) - suite.EqualValues(pkgRef1, otherAType.PackageRef()) -} diff --git a/nomdl/pkg/parse.go b/nomdl/pkg/parse.go index 4b7609ef7c..d04465b455 100644 --- a/nomdl/pkg/parse.go +++ b/nomdl/pkg/parse.go @@ -4,64 +4,63 @@ import ( "io" "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" "github.com/attic-labs/noms/types" ) // Parsed represents a parsed Noms type package, which has some additional metadata beyond that which is present in a types.Package. -// UsingDeclarations is kind of a hack to indicate specializations of Noms containers that need to be generated. These should all be one of ListKind, SetKind, MapKind or RefKind, and Desc should be a CompoundDesc instance. type Parsed struct { - types.Package - Name string - UsingDeclarations []*types.Type - AliasNames map[ref.Ref]string + Filename string + Types []*types.Type + AliasNames map[string]string } // ParseNomDL reads a Noms package specification from r and returns a Package. Errors will be annotated with packageName and thrown. -func ParseNomDL(packageName string, r io.Reader, includePath string, vrw types.ValueReadWriter) Parsed { - i := runParser(packageName, r) - i.Name = packageName - imports := resolveImports(i.Aliases, includePath, vrw) - deps := importsToDeps(imports) +func ParseNomDL(filename string, r io.Reader, includePath string) []*types.Type { + i := runParser(filename, r) + i.Filename = filename + // name -> Parsed + imports := resolveImports(i.Aliases, includePath) - resolveLocalOrdinals(&i) - resolveNamespaces(&i, imports, getDeps(deps, vrw)) + // Replace all variable references with the actual type. + resolveReferences(&i, imports) - // Transpose imports - aliasNames := map[ref.Ref]string{} - for k, v := range imports { - aliasNames[v] = k - } - - pkg := types.NewPackage(i.Types, deps) - - usingDeclarations := make([]*types.Type, len(i.UsingDeclarations)) - for idx, t := range i.UsingDeclarations { - usingDeclarations[idx] = types.FixupType(t, &pkg) - } - - return Parsed{ - pkg, - i.Name, - usingDeclarations, - aliasNames, - } + return i.Types } type intermediate struct { - Name string - Aliases map[string]string - UsingDeclarations []*types.Type - Types []*types.Type + Filename string + // Aliases maps from Name to Target, where target is the non resolved filename. + Aliases map[string]string + Types []*types.Type } -func runParser(logname string, r io.Reader) intermediate { - got, err := ParseReader(logname, r) +func runParser(filename string, r io.Reader) intermediate { + got, err := ParseReader(filename, r) d.Exp.NoError(err) return got.(intermediate) } -func resolveLocalOrdinals(p *intermediate) { +func indexOf(t *types.Type, ts []*types.Type) int16 { + for i, tt := range ts { + if tt.Name() == t.Name() { + return int16(i) + } + } + return -1 +} + +func findType(n string, ts []*types.Type) *types.Type { + for _, t := range ts { + if n == t.Name() { + return t + } + } + d.Exp.Fail("Undefined reference %s", n) + return nil +} + +// resolveReferences replaces references with the actual Type +func resolveReferences(i *intermediate, aliases map[string][]*types.Type) { var rec func(t *types.Type) *types.Type resolveFields := func(fields []types.Field) { for idx, f := range fields { @@ -70,17 +69,15 @@ func resolveLocalOrdinals(p *intermediate) { } } rec = func(t *types.Type) *types.Type { - if t.IsUnresolved() { - if t.Namespace() == "" && !t.HasOrdinal() { - ordinal := indexOf(t, p.Types) - d.Chk.True(ordinal >= 0 && int(ordinal) < len(p.Types), "Invalid reference: %s", t.Name()) - return types.MakeType(ref.Ref{}, int16(ordinal)) - } - - return t - } - switch t.Kind() { + case UnresolvedKind: + desc := t.Desc.(UnresolvedDesc) + if desc.Namespace == "" { + return findType(desc.Name, i.Types) + } + ts, ok := aliases[desc.Namespace] + d.Exp.True(ok, "No such namespace: %s", desc.Namespace) + return findType(desc.Name, ts) case types.ListKind: return types.MakeListType(rec(t.Desc.(types.CompoundDesc).ElemTypes[0])) case types.SetKind: @@ -97,128 +94,7 @@ func resolveLocalOrdinals(p *intermediate) { return t } - for i, t := range p.Types { - p.Types[i] = rec(t) - } - for i, t := range p.UsingDeclarations { - p.UsingDeclarations[i] = rec(t) + for idx, t := range i.Types { + i.Types[idx] = rec(t) } } - -func indexOf(t *types.Type, ts []*types.Type) int16 { - for i, tt := range ts { - if tt.Name() == t.Name() && tt.Namespace() == "" { - return int16(i) - } - } - return -1 -} - -func resolveNamespaces(p *intermediate, aliases map[string]ref.Ref, deps map[ref.Ref]types.Package) { - var rec func(t *types.Type) *types.Type - resolveFields := func(fields []types.Field) { - for idx, f := range fields { - if f.T.IsUnresolved() { - if p.checkLocal(f.T) { - continue - } - f.T = resolveNamespace(f.T, aliases, deps) - } else { - f.T = rec(f.T) - } - - d.Chk.True(!f.T.IsUnresolved() || f.T.HasOrdinal()) - fields[idx] = f - } - } - rec = func(t *types.Type) *types.Type { - if t.IsUnresolved() { - if p.checkLocal(t) { - return t - } - t = resolveNamespace(t, aliases, deps) - } - switch t.Kind() { - case types.UnresolvedKind: - d.Chk.True(t.HasPackageRef(), "should resolve again") - case types.ListKind: - return types.MakeListType(rec(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.SetKind: - return types.MakeSetType(rec(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.RefKind: - return types.MakeRefType(rec(t.Desc.(types.CompoundDesc).ElemTypes[0])) - case types.MapKind: - elemTypes := t.Desc.(types.CompoundDesc).ElemTypes - return types.MakeMapType(rec(elemTypes[0]), rec(elemTypes[1])) - case types.StructKind: - resolveFields(t.Desc.(types.StructDesc).Fields) - resolveFields(t.Desc.(types.StructDesc).Union) - } - - if t.IsUnresolved() { - return rec(t) - } - - return t - } - - for i, t := range p.Types { - p.Types[i] = rec(t) - } - for i, t := range p.UsingDeclarations { - p.UsingDeclarations[i] = rec(t) - } -} - -func (i *intermediate) checkLocal(t *types.Type) bool { - if t.Namespace() == "" { - d.Chk.True(t.HasOrdinal(), "Invalid local reference") - return true - } - return false -} - -func resolveNamespace(t *types.Type, aliases map[string]ref.Ref, deps map[ref.Ref]types.Package) *types.Type { - pkgRef, ok := aliases[t.Namespace()] - d.Exp.True(ok, "Could not find import aliased to %s", t.Namespace()) - d.Chk.NotEqual("", t.Name()) - ordinal := deps[pkgRef].GetOrdinal(t.Name()) - d.Exp.NotEqual(int64(-1), ordinal, "Could not find type %s in package %s (aliased to %s).", t.Name(), pkgRef.String(), t.Namespace()) - d.Chk.False(pkgRef.IsEmpty()) - return types.MakeType(pkgRef, int16(ordinal)) -} - -// expandStruct takes a struct definition and expands the internal structs created for unions. -func expandStruct(t *types.Type, ordinal int) []*types.Type { - d.Chk.Equal(types.StructKind, t.Kind()) - ts := []*types.Type{t} - ordinal++ - - doFields := func(fields []types.Field) []types.Field { - rv := make([]types.Field, len(fields)) - for i, f := range fields { - if f.T.Kind() == types.StructKind { - newType := expandStruct(f.T, ordinal) - ts = append(ts, newType...) - rv[i] = types.Field{Name: f.Name, T: types.MakeType(ref.Ref{}, int16(ordinal)), Optional: f.Optional} - ordinal += len(newType) - } else { - rv[i] = f - } - } - return rv - } - - desc := t.Desc.(types.StructDesc) - fields := doFields(desc.Fields) - - var choices []types.Field - if desc.Union != nil { - choices = doFields(desc.Union) - } - - if len(ts) != 1 { - ts[0] = types.MakeStructType(t.Name(), fields, choices) - } - return ts -} diff --git a/nomdl/pkg/parse_test.go b/nomdl/pkg/parse_test.go index 7315dbdc9d..17dd8ce2b8 100644 --- a/nomdl/pkg/parse_test.go +++ b/nomdl/pkg/parse_test.go @@ -6,7 +6,6 @@ import ( "testing" "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" "github.com/attic-labs/noms/types" "github.com/stretchr/testify/suite" ) @@ -45,30 +44,6 @@ func (suite *ParserTestSuite) TestAlias() { suite.Equal(path, pkg.Aliases["Noms"]) } -func (suite *ParserTestSuite) TestUsing() { - usingDecls := ` -using Map -using List -` - pkg := runParser("", strings.NewReader(usingDecls)) - suite.Len(pkg.UsingDeclarations, 2) - - suite.Equal(types.MapKind, pkg.UsingDeclarations[0].Desc.Kind()) - suite.True(types.StringType.Equals(pkg.UsingDeclarations[0].Desc.(types.CompoundDesc).ElemTypes[0])) - suite.True(types.MakeUnresolvedType("", "Simple").Equals(pkg.UsingDeclarations[0].Desc.(types.CompoundDesc).ElemTypes[1])) - - suite.Equal(types.ListKind, pkg.UsingDeclarations[1].Desc.Kind()) - elemTypes := pkg.UsingDeclarations[1].Desc.(types.CompoundDesc).ElemTypes - suite.Len(elemTypes, 1) - suite.True(types.MakeUnresolvedType("Noms", "Commit").Equals(elemTypes[0])) -} - -func (suite *ParserTestSuite) TestBadUsing() { - suite.Panics(func() { runParser("", strings.NewReader("using Blob")) }, "Can't 'use' a primitive.") - suite.Panics(func() { runParser("", strings.NewReader("using Noms.Commit")) }, "Can't 'use' a type from another package.") - suite.Panics(func() { runParser("", strings.NewReader("using f@") } type structTestCase struct { Name string - Union testChoices Fields []testField } -func makeStructTestCase(n string, u testChoices, fields ...testField) structTestCase { - return structTestCase{n, u, fields} +func makeStructTestCase(n string, fields ...testField) structTestCase { + return structTestCase{Name: n, Fields: fields} } -func (s structTestCase) toText() string { - return fmt.Sprintf(structTmpl, s.Name, s.fieldsToString(), s.unionToString()) -} - -func (s structTestCase) fieldsToString() (out string) { +func (s structTestCase) String() string { + fieldsSource := "" for _, f := range s.Fields { - out += f.Name + ": " - if f.Optional { - out += "optional " - } - out += f.D.Describe() + "\n" + fieldsSource += f.String() + "\n" } - return -} - -func (s structTestCase) unionToString() string { - if s.Union == nil { - return "" - } - return describeUnion(s.Union) -} - -func describeUnion(fields []types.Field) string { - out := " union {" - for _, f := range fields { - out += f.Name + ": " - out += f.T.Describe() + "\n" - } - return out + "}" + return fmt.Sprintf("struct %s { %s }", s.Name, fieldsSource) } type testField struct { - Name string - D describable - Optional bool + types.Field + S string } -func (t testField) toField() types.Field { - return types.Field{t.Name, t.D.(*types.Type), t.Optional} +func newTestField(name string, t *types.Type, optional bool, s string) testField { + return testField{Field: types.Field{Name: name, T: t, Optional: optional}, S: s} } -type describable interface { - Describe() string -} - -func (suite *ParsedResultTestSuite) findTypeByName(n string, ts []*types.Type) *types.Type { - for _, t := range ts { - if n == t.Name() { - return t - } +func (t testField) String() string { + s := t.Name + ": " + if t.Optional { + s += "optional " } - suite.Fail("Failed to find type by name") - panic("Unreachable") -} - -func (suite *ParsedResultTestSuite) checkStruct(pkg intermediate, s structTestCase) { - typ := suite.findTypeByName(s.Name, pkg.Types) - typFields := typ.Desc.(types.StructDesc).Fields - typUnion := typ.Desc.(types.StructDesc).Union - - suite.Equal(s.Name, typ.Name()) - suite.Len(typFields, len(s.Fields)) - for i, f := range s.Fields { - // Named unions are syntactic sugar for a struct Field that points to an anonymous struct containing an anonymous union. - // So, if the field in the test input was a union... - if _, ok := f.D.(testChoices); ok { - // ...make sure the names are the same... - suite.Equal(f.Name, typFields[i].Name) - suite.Equal(f.Optional, typFields[i].Optional) - // and the Type points to somewhere else. - suite.True(typFields[i].T.IsUnresolved()) - suite.True(typFields[i].T.Ordinal() > 0) - suite.Equal(ref.Ref{}, typFields[i].T.PackageRef()) - } else { - suite.EqualValues(s.Fields[i].toField(), typFields[i]) - } + if t.S != "" { + return s + t.S } - if s.Union != nil && suite.NotEmpty(typUnion) { - suite.Len(typUnion, len(s.Union)) - for i := range s.Union { - suite.EqualValues(s.Union[i], typUnion[i]) - } - } else { - suite.EqualValues(s.Union, typUnion, "If s.Union is nil, so should typUnion be.") - } - + return s + t.T.Describe() } func (suite *ParsedResultTestSuite) parseAndCheckStructs(structs ...structTestCase) { - pkgDef := "" - for _, s := range structs { - pkgDef += s.toText() + "\n" + source := "" + expectedTypes := make([]*types.Type, len(structs)) + for i, s := range structs { + source += s.String() + "\n" + fields := make([]types.Field, len(s.Fields)) + for i, f := range s.Fields { + fields[i] = f.Field + } + expectedTypes[i] = types.MakeStructType(s.Name, fields, nil) } + suite.assertTypes(source, expectedTypes...) +} + +func (suite *ParsedResultTestSuite) assertTypes(source string, ts ...*types.Type) { err := d.Try(func() { - pkg := runParser("", strings.NewReader(pkgDef)) - for _, s := range structs { - suite.checkStruct(pkg, s) + i := runParser("", strings.NewReader(source)) + for idx, t := range i.Types { + suite.True(t.Equals(ts[idx])) } }) - suite.NoError(err, pkgDef) + suite.NoError(err, source) } func (suite *ParsedResultTestSuite) TestPrimitiveField() { - suite.parseAndCheckStructs(makeStructTestCase("Simple", nil, suite.primField)) + suite.parseAndCheckStructs(makeStructTestCase("Simple", suite.prim)) } func (suite *ParsedResultTestSuite) TestPrimitiveOptionalField() { - suite.parseAndCheckStructs(makeStructTestCase("SimpleOptional", nil, suite.primOptionalField)) -} - -func (suite *ParsedResultTestSuite) TestAnonUnion() { - suite.parseAndCheckStructs(makeStructTestCase("WithAnon", suite.union, suite.primField)) -} - -func (suite *ParsedResultTestSuite) TestAnonUnionFirst() { - anonUnionFirst := makeStructTestCase("WithAnonFirst", suite.union, suite.primField) - - pkgDef := fmt.Sprintf(structTmpl, anonUnionFirst.Name, anonUnionFirst.unionToString(), anonUnionFirst.fieldsToString()) - err := d.Try(func() { - pkg := runParser("", strings.NewReader(pkgDef)) - suite.checkStruct(pkg, anonUnionFirst) - }) - suite.NoError(err, pkgDef) + suite.parseAndCheckStructs(makeStructTestCase("SimpleOptional", suite.primOptional)) } func (suite *ParsedResultTestSuite) TestCommentNextToName() { - withComment := makeStructTestCase("WithComment", suite.union, suite.primField) - - pkgDef := fmt.Sprintf(structTmpl, "/* Oy! */"+withComment.Name, withComment.unionToString(), withComment.fieldsToString()) - err := d.Try(func() { - pkg := runParser("", strings.NewReader(pkgDef)) - suite.checkStruct(pkg, withComment) - }) - suite.NoError(err, pkgDef) + n := "WithComment" + s := fmt.Sprintf("struct %s { /* Oy! */%s }", n, suite.primOptional) + suite.assertTypes(s, types.MakeStructType(n, []types.Field{suite.primOptional.Field}, nil)) } func (suite *ParsedResultTestSuite) TestCommentAmongFields() { - withComment := makeStructTestCase("WithComment", suite.union, suite.primField) - - pkgDef := fmt.Sprintf(structTmpl, withComment.Name, withComment.fieldsToString()+"\n// Nope\n", withComment.unionToString()) - err := d.Try(func() { - pkg := runParser("", strings.NewReader(pkgDef)) - suite.checkStruct(pkg, withComment) - }) - suite.NoError(err, pkgDef) + n := "WithComment" + s := fmt.Sprintf("struct %s { %s \n// Nope\n%s }", n, suite.prim, suite.primOptional) + suite.assertTypes(s, types.MakeStructType(n, []types.Field{suite.prim.Field, suite.primOptional.Field}, nil)) } func (suite *ParsedResultTestSuite) TestCompoundField() { - suite.parseAndCheckStructs(makeStructTestCase("Compound", suite.union, suite.compoundField)) + suite.parseAndCheckStructs(makeStructTestCase("Compound", suite.compound)) } func (suite *ParsedResultTestSuite) TestCompoundOfCompoundField() { - suite.parseAndCheckStructs(makeStructTestCase("CofC", suite.union, suite.compoundOfCompoundField)) + suite.parseAndCheckStructs(makeStructTestCase("CofC", suite.compoundOfCompound)) } func (suite *ParsedResultTestSuite) TestNamedTypeField() { - suite.parseAndCheckStructs(makeStructTestCase("Named", suite.union, suite.namedTypeField)) + suite.parseAndCheckStructs(makeStructTestCase("Named", suite.namedType)) } func (suite *ParsedResultTestSuite) TestNamespacedTypeField() { - suite.parseAndCheckStructs(makeStructTestCase("Namespaced", suite.union, suite.namespacedTypeField)) + suite.parseAndCheckStructs(makeStructTestCase("Namespaced", suite.namespacedType)) } func (suite *ParsedResultTestSuite) TestMapOfNamedTypeField() { - suite.parseAndCheckStructs(makeStructTestCase("MapStruct", suite.union, suite.mapOfNamedTypeField)) + suite.parseAndCheckStructs(makeStructTestCase("MapStruct", suite.mapOfNamedType)) } func (suite *ParsedResultTestSuite) TestMultipleFields() { - suite.parseAndCheckStructs(makeStructTestCase("Multi", suite.union, - suite.primField, - suite.primOptionalField, - suite.namedTypeField, - suite.namespacedTypeField, - suite.compoundField, - suite.compoundOfCompoundField, - testField{"namedUnion", suite.union, false}, - )) -} - -func (suite *ParsedResultTestSuite) TestNamedAndAnonUnion() { - suite.parseAndCheckStructs(makeStructTestCase("NamedAndAnon", suite.union, - testField{"namedUnion", suite.union, false}, - )) -} - -func (suite *ParsedResultTestSuite) TestNamedUnionOnly() { - suite.parseAndCheckStructs(makeStructTestCase("NamedUnionOnly", nil, - testField{"namedUnion", suite.union, false}, - )) -} - -func (suite *ParsedResultTestSuite) TestTwoNamedAndAnonUnion() { - suite.parseAndCheckStructs(makeStructTestCase("TwoNamedAndAnon", suite.union, - testField{"namedUnion1", suite.union, false}, - testField{"namedUnion2", suite.union, false}, + suite.parseAndCheckStructs(makeStructTestCase("Multi", + suite.prim, + suite.primOptional, + suite.namedType, + suite.namespacedType, + suite.compound, + suite.compoundOfCompound, )) } func (suite *ParsedResultTestSuite) TestMultipleStructs() { defns := []structTestCase{ - makeStructTestCase("Simple", nil, suite.primField), - makeStructTestCase("Optional", nil, suite.primOptionalField), - makeStructTestCase("Compound", nil, suite.compoundField), - makeStructTestCase("CompoundWithUnion", suite.union, suite.compoundField), - makeStructTestCase("TwoNamedAndAnon", suite.union, - testField{"namedUnion1", suite.union, false}, - testField{"namedUnion2", suite.union, false}, - ), - makeStructTestCase("Multi", suite.union, - suite.primField, - suite.primOptionalField, - suite.namespacedTypeField, - suite.compoundField, - testField{"namedUnion", suite.union, false}, + makeStructTestCase("Simple", suite.prim), + makeStructTestCase("Optional", suite.primOptional), + makeStructTestCase("Compound", suite.compound), + makeStructTestCase("Multi", + suite.prim, + suite.primOptional, + suite.namespacedType, + suite.compound, ), } suite.parseAndCheckStructs(defns...) } - -func (suite *ParsedResultTestSuite) TestExpandStruct() { - code := ` - struct T { - x: Number - u: union { - s: String - b: Bool - } - } - ` - pkg := runParser("", strings.NewReader(code)) - suite.Len(pkg.Types, 2) - - { - code := ` - struct T { - a: union { - b: String - c: Bool - } - d: union { - e: String - f: Bool - } - } - ` - pkg := runParser("", strings.NewReader(code)) - suite.Len(pkg.Types, 3) - } -} diff --git a/nomdl/pkg/unresolved_desc.go b/nomdl/pkg/unresolved_desc.go new file mode 100644 index 0000000000..13af8ccd6d --- /dev/null +++ b/nomdl/pkg/unresolved_desc.go @@ -0,0 +1,27 @@ +package pkg + +import "github.com/attic-labs/noms/types" + +const UnresolvedKind = 100 + +// UnresolvedDesc represents a named reference to a type. +type UnresolvedDesc struct { + Namespace string + Name string +} + +func (desc UnresolvedDesc) Kind() types.NomsKind { + return UnresolvedKind +} + +func (desc UnresolvedDesc) Equals(other types.TypeDesc) bool { + if other.Kind() != UnresolvedKind { + return false + } + d2 := other.(UnresolvedDesc) + return d2.Namespace == desc.Namespace && d2.Name == desc.Namespace +} + +func makeUnresolvedType(namespace, name string) *types.Type { + return &types.Type{Desc: UnresolvedDesc{namespace, name}} +} diff --git a/types/compound_list_test.go b/types/compound_list_test.go index 5a8d48e1c3..af0f84851e 100644 --- a/types/compound_list_test.go +++ b/types/compound_list_test.go @@ -5,7 +5,6 @@ import ( "sync" "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) @@ -665,7 +664,7 @@ func TestCompoundListFirstNNumbers(t *testing.T) { nums := firstNNumbers(5000) s := NewTypedList(listType, nums...) - assert.Equal(s.Ref().String(), "sha1-df0a58e5fb11b2bc0adbab07c2f39c6b3e02b42b") + assert.Equal("sha1-df0a58e5fb11b2bc0adbab07c2f39c6b3e02b42b", s.Ref().String()) } func TestCompoundListRefOfStructFirstNNumbers(t *testing.T) { @@ -675,19 +674,17 @@ func TestCompoundListRefOfStructFirstNNumbers(t *testing.T) { assert := assert.New(t) vs := NewTestValueStore() - structTypeDef := MakeStructType("num", []Field{ + structType := MakeStructType("num", []Field{ Field{"n", NumberType, false}, }, []Field{}) - pkg := NewPackage([]*Type{structTypeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - structType := MakeType(pkgRef, 0) + refOfTypeStructType := MakeRefType(structType) listType := MakeListType(refOfTypeStructType) firstNNumbers := func(n int) []Value { nums := []Value{} for i := 0; i < n; i++ { - r := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Number(i)})) + r := vs.WriteValue(NewStruct(structType, structData{"n": Number(i)})) nums = append(nums, r) } @@ -696,7 +693,7 @@ func TestCompoundListRefOfStructFirstNNumbers(t *testing.T) { nums := firstNNumbers(5000) s := NewTypedList(listType, nums...) - assert.Equal(s.Ref().String(), "sha1-f2e6c3aae6e8ac4c3776830e2d8141fc527c55c5") + assert.Equal("sha1-f2db6a2f8026ee6e12bb584cd38c813604774a69", s.Ref().String()) } func TestCompoundListModifyAfterRead(t *testing.T) { diff --git a/types/compound_map_test.go b/types/compound_map_test.go index 5f307df009..21cc9d64d3 100644 --- a/types/compound_map_test.go +++ b/types/compound_map_test.go @@ -5,7 +5,6 @@ import ( "sort" "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) @@ -390,7 +389,7 @@ func TestCompoundMapFirstNNumbers(t *testing.T) { } m := NewTypedMap(mapType, kvs...) - assert.Equal(m.Ref().String(), "sha1-60f2d39d24da082cb8e022f866c60202152b2562") + assert.Equal("sha1-60f2d39d24da082cb8e022f866c60202152b2562", m.Ref().String()) } func TestCompoundMapRefOfStructFirstNNumbers(t *testing.T) { @@ -400,12 +399,9 @@ func TestCompoundMapRefOfStructFirstNNumbers(t *testing.T) { assert := assert.New(t) vs := NewTestValueStore() - structTypeDef := MakeStructType("num", []Field{ + structType := MakeStructType("num", []Field{ Field{"n", NumberType, false}, }, []Field{}) - pkg := NewPackage([]*Type{structTypeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - structType := MakeType(pkgRef, 0) refOfTypeStructType := MakeRefType(structType) mapType := MakeMapType(refOfTypeStructType, refOfTypeStructType) @@ -413,15 +409,15 @@ func TestCompoundMapRefOfStructFirstNNumbers(t *testing.T) { kvs := []Value{} n := 5000 for i := 0; i < n; i++ { - k := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Number(i)})) - v := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Number(i + 1)})) + k := vs.WriteValue(NewStruct(structType, structData{"n": Number(i)})) + v := vs.WriteValue(NewStruct(structType, structData{"n": Number(i + 1)})) assert.NotNil(k) assert.NotNil(v) kvs = append(kvs, k, v) } m := NewTypedMap(mapType, kvs...) - assert.Equal("sha1-3ab6131151c76cc3fdc9b639b37770d8d7dcdf5d", m.Ref().String()) + assert.Equal("sha1-d7c5ef579ec638a288cb23b48f601a0b1e277fe3", m.Ref().String()) } func TestCompoundMapModifyAfterRead(t *testing.T) { diff --git a/types/compound_set_test.go b/types/compound_set_test.go index 24bcd145c7..761943d2e2 100644 --- a/types/compound_set_test.go +++ b/types/compound_set_test.go @@ -5,7 +5,6 @@ import ( "sort" "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) @@ -370,7 +369,7 @@ func TestCompoundSetFirstNNumbers(t *testing.T) { nums := firstNNumbers(5000) s := newTypedSet(setType, nums...) - assert.Equal(s.Ref().String(), "sha1-5b4cd51d88b3d99e6dafdb1cafb8cec90d5aecdf") + assert.Equal("sha1-5b4cd51d88b3d99e6dafdb1cafb8cec90d5aecdf", s.Ref().String()) } func TestCompoundSetRefOfStructFirstNNumbers(t *testing.T) { @@ -380,12 +379,9 @@ func TestCompoundSetRefOfStructFirstNNumbers(t *testing.T) { assert := assert.New(t) vs := NewTestValueStore() - structTypeDef := MakeStructType("num", []Field{ + structType := MakeStructType("num", []Field{ Field{"n", NumberType, false}, }, []Field{}) - pkg := NewPackage([]*Type{structTypeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - structType := MakeType(pkgRef, 0) refOfTypeStructType := MakeRefType(structType) setType := MakeSetType(refOfTypeStructType) @@ -393,7 +389,7 @@ func TestCompoundSetRefOfStructFirstNNumbers(t *testing.T) { firstNNumbers := func(n int) []Value { nums := []Value{} for i := 0; i < n; i++ { - r := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Number(i)})) + r := vs.WriteValue(NewStruct(structType, structData{"n": Number(i)})) nums = append(nums, r) } @@ -402,7 +398,7 @@ func TestCompoundSetRefOfStructFirstNNumbers(t *testing.T) { nums := firstNNumbers(5000) s := NewTypedSet(setType, nums...) - assert.Equal("sha1-4c2b0e159ae443ec99299b6ea266d9a408f7987d", s.Ref().String()) + assert.Equal("sha1-b06811c4abafef5e2198c04a81d3a300a709fd02", s.Ref().String()) } func TestCompoundSetModifyAfterRead(t *testing.T) { diff --git a/types/decode_noms_value.go b/types/decode_noms_value.go index 041517a6ca..333aa36dbb 100644 --- a/types/decode_noms_value.go +++ b/types/decode_noms_value.go @@ -62,6 +62,10 @@ func (r *jsonArrayReader) readUint() uint64 { return v } +func (r *jsonArrayReader) readUint8() uint8 { + return uint8(r.read().(float64)) +} + func (r *jsonArrayReader) readArray() []interface{} { return r.read().([]interface{}) } @@ -75,29 +79,30 @@ func (r *jsonArrayReader) readRef() ref.Ref { return ref.Parse(s) } -func (r *jsonArrayReader) readTypeAsTag() *Type { +func (r *jsonArrayReader) readTypeAsTag(backRefs []*Type) *Type { kind := r.readKind() switch kind { case ListKind: - elemType := r.readTypeAsTag() + elemType := r.readTypeAsTag(backRefs) return MakeListType(elemType) case SetKind: - elemType := r.readTypeAsTag() + elemType := r.readTypeAsTag(backRefs) return MakeSetType(elemType) case RefKind: - elemType := r.readTypeAsTag() + elemType := r.readTypeAsTag(backRefs) return MakeRefType(elemType) case MapKind: - keyType := r.readTypeAsTag() - valueType := r.readTypeAsTag() + keyType := r.readTypeAsTag(backRefs) + valueType := r.readTypeAsTag(backRefs) return MakeMapType(keyType, valueType) case TypeKind: return TypeType - case UnresolvedKind: - pkgRef := r.readRef() - ordinal := int16(r.readInt()) - d.Chk.NotEqual(int16(-1), ordinal) - return MakeType(pkgRef, ordinal) + case StructKind: + return r.readStructType(backRefs) + case BackRefKind: + i := r.readUint8() + d.Chk.True(i < uint8(len(backRefs))) + return backRefs[len(backRefs)-1-int(i)] } if IsPrimitiveKind(kind) { @@ -107,7 +112,7 @@ func (r *jsonArrayReader) readTypeAsTag() *Type { panic("unreachable") } -func (r *jsonArrayReader) readBlob(t *Type) Value { +func (r *jsonArrayReader) readBlob() Value { s := r.readString() decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(s)) b, err := ioutil.ReadAll(decoder) @@ -115,39 +120,39 @@ func (r *jsonArrayReader) readBlob(t *Type) Value { return newBlobLeaf(b) } -func (r *jsonArrayReader) readList(t *Type, pkg *Package) Value { +func (r *jsonArrayReader) readList(t *Type) Value { desc := t.Desc.(CompoundDesc) data := []Value{} elemType := desc.ElemTypes[0] for !r.atEnd() { - v := r.readValueWithoutTag(elemType, pkg) + v := r.readValueWithoutTag(elemType) data = append(data, v) } return newListLeaf(t, data...) } -func (r *jsonArrayReader) readSet(t *Type, pkg *Package) Value { +func (r *jsonArrayReader) readSet(t *Type) Value { desc := t.Desc.(CompoundDesc) data := setData{} elemType := desc.ElemTypes[0] for !r.atEnd() { - v := r.readValueWithoutTag(elemType, pkg) + v := r.readValueWithoutTag(elemType) data = append(data, v) } return newSetLeaf(t, data...) } -func (r *jsonArrayReader) readMap(t *Type, pkg *Package) Value { +func (r *jsonArrayReader) readMap(t *Type) Value { desc := t.Desc.(CompoundDesc) data := mapData{} keyType := desc.ElemTypes[0] valueType := desc.ElemTypes[1] for !r.atEnd() { - k := r.readValueWithoutTag(keyType, pkg) - v := r.readValueWithoutTag(valueType, pkg) + k := r.readValueWithoutTag(keyType) + v := r.readValueWithoutTag(valueType) data = append(data, mapEntry{k, v}) } @@ -169,7 +174,7 @@ func indexTypeForMetaSequence(t *Type) *Type { } } -func (r *jsonArrayReader) maybeReadMetaSequence(t *Type, pkg *Package) (Value, bool) { +func (r *jsonArrayReader) maybeReadMetaSequence(t *Type) (Value, bool) { if !r.read().(bool) { return nil, false } @@ -179,7 +184,7 @@ func (r *jsonArrayReader) maybeReadMetaSequence(t *Type, pkg *Package) (Value, b indexType := indexTypeForMetaSequence(t) for !r2.atEnd() { ref := NewTypedRef(MakeRefType(t), r2.readRef()) - v := r2.readValueWithoutTag(indexType, pkg) + v := r2.readValueWithoutTag(indexType) numLeaves := uint64(r2.readUint()) data = append(data, newMetaTuple(v, nil, ref, numLeaves)) } @@ -187,40 +192,24 @@ func (r *jsonArrayReader) maybeReadMetaSequence(t *Type, pkg *Package) (Value, b return newMetaSequenceFromData(data, t, r.vr), true } -func (r *jsonArrayReader) readPackage(t *Type, pkg *Package) Value { - r2 := newJSONArrayReader(r.readArray(), r.vr) - types := []*Type{} - for !r2.atEnd() { - types = append(types, r2.readTypeAsValue(pkg)) - } - - r3 := newJSONArrayReader(r.readArray(), r.vr) - deps := []ref.Ref{} - for !r3.atEnd() { - deps = append(deps, r3.readRef()) - } - - return NewPackage(types, deps) -} - func (r *jsonArrayReader) readRefValue(t *Type) Value { ref := r.readRef() return NewTypedRef(t, ref) } func (r *jsonArrayReader) readTopLevelValue() Value { - t := r.readTypeAsTag() - return r.readValueWithoutTag(t, nil) + t := r.readTypeAsTag(nil) + return r.readValueWithoutTag(t) } -func (r *jsonArrayReader) readValueWithoutTag(t *Type, pkg *Package) Value { +func (r *jsonArrayReader) readValueWithoutTag(t *Type) Value { switch t.Kind() { case BlobKind: - if ms, ok := r.maybeReadMetaSequence(t, pkg); ok { + if ms, ok := r.maybeReadMetaSequence(t); ok { return ms } - return r.readBlob(t) + return r.readBlob() case BoolKind: return Bool(r.read().(bool)) case NumberKind: @@ -229,137 +218,114 @@ func (r *jsonArrayReader) readValueWithoutTag(t *Type, pkg *Package) Value { return NewString(r.readString()) case ValueKind: // The value is always tagged - t := r.readTypeAsTag() - return r.readValueWithoutTag(t, pkg) + t := r.readTypeAsTag(nil) + return r.readValueWithoutTag(t) case ListKind: - if ms, ok := r.maybeReadMetaSequence(t, pkg); ok { + if ms, ok := r.maybeReadMetaSequence(t); ok { return ms } r2 := newJSONArrayReader(r.readArray(), r.vr) - return r2.readList(t, pkg) + return r2.readList(t) case MapKind: - if ms, ok := r.maybeReadMetaSequence(t, pkg); ok { + if ms, ok := r.maybeReadMetaSequence(t); ok { return ms } r2 := newJSONArrayReader(r.readArray(), r.vr) - return r2.readMap(t, pkg) - case PackageKind: - return r.readPackage(t, pkg) + return r2.readMap(t) case RefKind: return r.readRefValue(t) case SetKind: - if ms, ok := r.maybeReadMetaSequence(t, pkg); ok { + if ms, ok := r.maybeReadMetaSequence(t); ok { return ms } r2 := newJSONArrayReader(r.readArray(), r.vr) - return r2.readSet(t, pkg) + return r2.readSet(t) case StructKind: - panic("not allowed") + return r.readStruct(t) case TypeKind: - return r.readTypeKindToValue(t, pkg) - case UnresolvedKind: - return r.readUnresolvedKindToValue(t, pkg) + return r.readTypeKindToValue(t) + case BackRefKind: + panic("BackRefKind should have been replaced") } + panic("not reachable") } -func (r *jsonArrayReader) readTypeKindToValue(t *Type, pkg *Package) Value { +func (r *jsonArrayReader) readTypeKindToValue(t *Type) Value { d.Chk.IsType(PrimitiveDesc(0), t.Desc) - return r.readTypeAsValue(pkg) + return r.readTypeAsValue(nil) } -func (r *jsonArrayReader) readUnresolvedKindToValue(t *Type, pkg *Package) Value { - // When we have a struct referencing another struct in the same package the package ref is empty. In that case we use the package that is passed into this function. - d.Chk.True(t.IsUnresolved()) - pkgRef := t.PackageRef() - ordinal := t.Ordinal() - if !pkgRef.IsEmpty() { - pkg2 := LookupPackage(pkgRef) - if pkg2 != nil { - pkg = pkg2 - } else { - pkg = ReadPackage(pkgRef, r.vr) - } - } - - d.Chk.NotNil(pkg, "Woah, got a nil pkg. pkgRef: %s, ordinal: %d\n", pkgRef, ordinal) - - typeDef := pkg.types[ordinal] - - d.Chk.Equal(StructKind, typeDef.Kind()) - return r.readStruct(typeDef, t, pkg) -} - -func (r *jsonArrayReader) readTypeAsValue(pkg *Package) *Type { +func (r *jsonArrayReader) readTypeAsValue(backRefs []*Type) *Type { k := r.readKind() switch k { case ListKind, MapKind, RefKind, SetKind: r2 := newJSONArrayReader(r.readArray(), r.vr) elemTypes := []*Type{} for !r2.atEnd() { - t := r2.readTypeAsValue(pkg) + t := r2.readTypeAsValue(backRefs) elemTypes = append(elemTypes, t) } return makeCompoundType(k, elemTypes...) case StructKind: - name := r.readString() - - fields := []Field{} - choices := []Field{} - - fieldReader := newJSONArrayReader(r.readArray(), r.vr) - for !fieldReader.atEnd() { - fieldName := fieldReader.readString() - fieldType := fieldReader.readTypeAsValue(pkg) - optional := fieldReader.readBool() - fields = append(fields, Field{Name: fieldName, T: fieldType, Optional: optional}) - } - choiceReader := newJSONArrayReader(r.readArray(), r.vr) - for !choiceReader.atEnd() { - fieldName := choiceReader.readString() - fieldType := choiceReader.readTypeAsValue(pkg) - optional := choiceReader.readBool() - choices = append(choices, Field{Name: fieldName, T: fieldType, Optional: optional}) - } - return MakeStructType(name, fields, choices) - case UnresolvedKind: - pkgRef := r.readRef() - ordinal := int16(r.readInt()) - if ordinal == -1 { - namespace := r.readString() - name := r.readString() - d.Chk.True(pkgRef.IsEmpty(), "Unresolved Type may not have a package ref") - return MakeUnresolvedType(namespace, name) - } - return MakeType(pkgRef, ordinal) + return r.readStructType(backRefs) } d.Chk.True(IsPrimitiveKind(k)) return MakePrimitiveType(k) } -func (r *jsonArrayReader) readStruct(typeDef, typ *Type, pkg *Package) Value { - // We've read `[StructKind, sha1, name` at this point +func (r *jsonArrayReader) readStruct(t *Type) Value { + // We've read `[StructKind, name, fields, unions` at this point values := []Value{} - desc := typeDef.Desc.(StructDesc) + desc := t.Desc.(StructDesc) for _, f := range desc.Fields { if f.Optional { b := r.read().(bool) values = append(values, Bool(b)) if b { - values = append(values, r.readValueWithoutTag(f.T, pkg)) + values = append(values, r.readValueWithoutTag(f.T)) } } else { - values = append(values, r.readValueWithoutTag(f.T, pkg)) + values = append(values, r.readValueWithoutTag(f.T)) } } if len(desc.Union) > 0 { unionIndex := uint64(r.readUint()) - values = append(values, Number(unionIndex), r.readValueWithoutTag(desc.Union[unionIndex].T, pkg)) + values = append(values, Number(unionIndex), r.readValueWithoutTag(desc.Union[unionIndex].T)) } - return structBuilder(values, typ, typeDef) + return structBuilder(values, t) +} + +func (r *jsonArrayReader) readStructType(backRefs []*Type) *Type { + name := r.readString() + + fields := []Field{} + choices := []Field{} + st := MakeStructType(name, fields, choices) + backRefs = append(backRefs, st) + desc := st.Desc.(StructDesc) + + fieldReader := newJSONArrayReader(r.readArray(), r.vr) + for !fieldReader.atEnd() { + fieldName := fieldReader.readString() + fieldType := fieldReader.readTypeAsTag(backRefs) + optional := fieldReader.readBool() + fields = append(fields, Field{Name: fieldName, T: fieldType, Optional: optional}) + } + choiceReader := newJSONArrayReader(r.readArray(), r.vr) + for !choiceReader.atEnd() { + fieldName := choiceReader.readString() + fieldType := choiceReader.readTypeAsTag(backRefs) + optional := choiceReader.readBool() + choices = append(choices, Field{Name: fieldName, T: fieldType, Optional: optional}) + } + desc.Fields = fields + desc.Union = choices + st.Desc = desc + return st } diff --git a/types/decode_noms_value_test.go b/types/decode_noms_value_test.go index 3f174cffa6..91e4aaccc7 100644 --- a/types/decode_noms_value_test.go +++ b/types/decode_noms_value_test.go @@ -40,18 +40,13 @@ func TestReadTypeAsTag(t *testing.T) { test := func(expected *Type, s string, vs ...interface{}) { a := parseJSON(s, vs...) r := newJSONArrayReader(a, cs) - tr := r.readTypeAsTag() + tr := r.readTypeAsTag(nil) assert.True(t, expected.Equals(tr)) } test(BoolType, "[%d, true]", BoolKind) test(TypeType, "[%d, %d]", TypeKind, BoolKind) test(MakeListType(BoolType), "[%d, %d, true, false]", ListKind, BoolKind) - - pkgRef := ref.Parse("sha1-a9993e364706816aba3e25717850c26c9cd0d89d") - test(MakeType(pkgRef, 42), `[%d, "%s", "42"]`, UnresolvedKind, pkgRef.String()) - - test(TypeType, `[%d, %d, "%s", "12"]`, TypeKind, TypeKind, pkgRef.String()) } func TestReadPrimitives(t *testing.T) { @@ -239,13 +234,12 @@ func TestReadStruct(t *testing.T) { Field{"s", StringType, false}, Field{"b", BoolType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", "42", "hi", true]`, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, "A1", ["x", %d, false, "s", %d, false, "b", %d, false], [], "42", "hi", true]`, StructKind, NumberKind, StringKind, BoolKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("x").Equals(Number(42))) assert.True(v.Get("s").Equals(NewString("hi"))) assert.True(v.Get("b").Equals(Bool(true))) @@ -261,15 +255,14 @@ func TestReadStructUnion(t *testing.T) { Field{"b", BoolType, false}, Field{"s", StringType, false}, }) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", "42", "1", "hi"]`, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, "A2", ["x", %d, false], ["b", %d, false, "s", %d, false], "42", "1", "hi"]`, StructKind, NumberKind, BoolKind, StringKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("x").Equals(Number(42))) - assert.Equal(uint64(Number(1)), uint64(v.UnionIndex())) + assert.Equal(uint32(1), v.UnionIndex()) assert.True(v.UnionValue().Equals(NewString("hi"))) x, ok := v.MaybeGet("x") @@ -291,13 +284,12 @@ func TestReadStructOptional(t *testing.T) { Field{"s", StringType, true}, Field{"b", BoolType, true}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", "42", false, true, false]`, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, "A3", ["x", %d, false, "s", %d, true, "b", %d, true], [], "42", false, true, false]`, StructKind, NumberKind, StringKind, BoolKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("x").Equals(Number(42))) _, ok := v.MaybeGet("s") assert.False(ok) @@ -322,14 +314,13 @@ func TestReadStructWithList(t *testing.T) { Field{"l", MakeListType(NumberType), false}, Field{"s", StringType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", true, false, ["0", "1", "2"], "hi"]`, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, "A4", ["b", %d, false, "l", %d, %d, false, "s", %d, false], [], true, false, ["0", "1", "2"], "hi"]`, StructKind, BoolKind, ListKind, NumberKind, StringKind) r := newJSONArrayReader(a, cs) l32Tr := MakeListType(NumberType) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("b").Equals(Bool(true))) l := NewTypedList(l32Tr, Number(0), Number(1), Number(2)) assert.True(v.Get("l").Equals(l)) @@ -351,13 +342,12 @@ func TestReadStructWithValue(t *testing.T) { Field{"v", ValueType, false}, Field{"s", StringType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", true, %d, "42", "hi"]`, UnresolvedKind, pkgRef.String(), NumberKind) + a := parseJSON(`[%d, "A5", ["b", %d, false, "v", %d, false, "s", %d, false], [], true, %d, "42", "hi"]`, StructKind, BoolKind, ValueKind, StringKind, NumberKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("b").Equals(Bool(true))) assert.True(v.Get("v").Equals(Number(42))) assert.True(v.Get("s").Equals(NewString("hi"))) @@ -378,13 +368,12 @@ func TestReadValueStruct(t *testing.T) { Field{"s", StringType, false}, Field{"b", BoolType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, %d, "%s", "0", "42", "hi", true]`, ValueKind, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, %d, "A1", ["x", %d, false, "s", %d, false, "b", %d, false], [], "42", "hi", true]`, ValueKind, StructKind, NumberKind, StringKind, BoolKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(typ)) assert.True(v.Get("x").Equals(Number(42))) assert.True(v.Get("s").Equals(NewString("hi"))) assert.True(v.Get("b").Equals(Bool(true))) @@ -425,17 +414,50 @@ func TestReadStructWithBlob(t *testing.T) { typ := MakeStructType("A5", []Field{ Field{"b", BlobType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typ}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - a := parseJSON(`[%d, "%s", "0", false, "AAE="]`, UnresolvedKind, pkgRef.String()) + a := parseJSON(`[%d, "A5", ["b", %d, false], [], false, "AAE="]`, StructKind, BlobKind) r := newJSONArrayReader(a, cs) v := r.readTopLevelValue().(Struct) - + assert.True(v.Type().Equals(typ)) blob := NewBlob(bytes.NewBuffer([]byte{0x00, 0x01})) assert.True(v.Get("b").Equals(blob)) } +func TestReadRecursiveStruct(t *testing.T) { + assert := assert.New(t) + cs := NewTestValueStore() + + // struct A { + // b: struct B { + // a: List + // b: List + // } + // } + + at := MakeStructType("A", []Field{ + Field{"b", nil, false}, + }, []Field{}) + bt := MakeStructType("B", []Field{ + Field{"a", MakeListType(at), false}, + Field{"b", nil, false}, + }, []Field{}) + at.Desc.(StructDesc).Fields[0].T = bt + bt.Desc.(StructDesc).Fields[1].T = MakeListType(bt) + + a := parseJSON(`[%d, "A", + ["b", %d, "B", [ + "a", %d, %d, 1, false, + "b", %d, %d, 0, false + ], [], false], [], + false, [], false, []]`, StructKind, StructKind, ListKind, BackRefKind, ListKind, BackRefKind) + + r := newJSONArrayReader(a, cs) + + v := r.readTopLevelValue().(Struct) + assert.True(v.Type().Equals(at)) + assert.True(v.Get("b").Type().Equals(bt)) +} + func TestReadTypeValue(t *testing.T) { assert := assert.New(t) cs := NewTestValueStore() @@ -465,47 +487,4 @@ func TestReadTypeValue(t *testing.T) { Field{"v", ValueType, false}, }), `[%d, %d, "S", [], ["x", %d, false, "v", %d, false]]`, TypeKind, StructKind, NumberKind, ValueKind) - - pkgRef := ref.Parse("sha1-0123456789abcdef0123456789abcdef01234567") - test(MakeType(pkgRef, 123), `[%d, %d, "%s", "123"]`, TypeKind, UnresolvedKind, pkgRef.String()) - - test(MakeStructType("S", []Field{ - Field{"e", MakeType(pkgRef, 123), false}, - Field{"x", NumberType, false}, - }, []Field{}), - `[%d, %d, "S", ["e", %d, "%s", "123", false, "x", %d, false], []]`, TypeKind, StructKind, UnresolvedKind, pkgRef.String(), NumberKind) - - test(MakeUnresolvedType("ns", "n"), `[%d, %d, "%s", "-1", "ns", "n"]`, TypeKind, UnresolvedKind, ref.Ref{}.String()) -} - -func TestReadPackage2(t *testing.T) { - cs := NewTestValueStore() - - rr := ref.Parse("sha1-a9993e364706816aba3e25717850c26c9cd0d89d") - setTref := MakeSetType(NumberType) - pkg := NewPackage([]*Type{setTref}, []ref.Ref{rr}) - - a := []interface{}{float64(PackageKind), []interface{}{float64(SetKind), []interface{}{float64(NumberKind)}}, []interface{}{rr.String()}} - r := newJSONArrayReader(a, cs) - v := r.readTopLevelValue().(Package) - assert.True(t, pkg.Equals(v)) -} - -func TestReadPackageThroughChunkSource(t *testing.T) { - assert := assert.New(t) - cs := NewTestValueStore() - - pkg := NewPackage([]*Type{ - MakeStructType("S", []Field{ - Field{"X", NumberType, false}, - }, []Field{}), - }, []ref.Ref{}) - // Don't register - pkgRef := cs.WriteValue(pkg).TargetRef() - - a := parseJSON(`[%d, "%s", "0", "42"]`, UnresolvedKind, pkgRef.String()) - r := newJSONArrayReader(a, cs) - v := r.readTopLevelValue().(Struct) - - assert.True(v.Get("X").Equals(Number(42))) } diff --git a/types/encode_human_readable.go b/types/encode_human_readable.go index cbb0e32a55..57fb5686dd 100644 --- a/types/encode_human_readable.go +++ b/types/encode_human_readable.go @@ -60,7 +60,8 @@ func (w *hrsWriter) Write(v Value) { case BlobKind: w.maybeWriteIndentation() blob := v.(Blob) - encoder := base64.NewEncoder(base64.RawStdEncoding, w.w) + // TODO: Use RawStdEncoding + encoder := base64.NewEncoder(base64.StdEncoding, w.w) _, err := io.Copy(encoder, blob.Reader()) d.Chk.NoError(err) encoder.Close() @@ -117,62 +118,52 @@ func (w *hrsWriter) Write(v Value) { w.write("}") case TypeKind: - w.writeTypeAsValue(v.(*Type)) + w.writeType(v.(*Type), nil) - case UnresolvedKind: - w.writeUnresolved(v, true) + case StructKind: + w.writeStruct(v.(Struct), true) - case PackageKind: - panic("not implemented") - - case ValueKind, StructKind: + default: + case ValueKind, BackRefKind: panic("unreachable") } } -func (w *hrsWriter) writeUnresolved(v Value, printStructName bool) { +func (w *hrsWriter) writeStruct(v Struct, printStructName bool) { t := v.Type() - pkg := LookupPackage(t.PackageRef()) - typeDef := pkg.Types()[t.Ordinal()] - switch typeDef.Kind() { - case StructKind: - v := v.(Struct) - desc := typeDef.Desc.(StructDesc) - if printStructName { - w.write(typeDef.Name()) - w.write(" ") - } - w.write("{") - w.indent() - writeField := func(f Field, v Value, i int) { - if i == 0 { - w.newLine() - } - w.write(f.Name) - w.write(": ") - w.Write(v) - w.write(",") + desc := t.Desc.(StructDesc) + if printStructName { + w.write(t.Name()) + w.write(" ") + } + w.write("{") + w.indent() + + writeField := func(f Field, v Value, i int) { + if i == 0 { w.newLine() } - - for i, f := range desc.Fields { - if fv, present := v.MaybeGet(f.Name); present { - writeField(f, fv, i) - } - } - if len(desc.Union) > 0 { - f := desc.Union[v.UnionIndex()] - fv := v.UnionValue() - writeField(f, fv, 0) - } - - w.outdent() - w.write("}") - - default: - panic("unreachable") + w.write(f.Name) + w.write(": ") + w.Write(v) + w.write(",") + w.newLine() } + + for i, f := range desc.Fields { + if fv, present := v.MaybeGet(f.Name); present { + writeField(f, fv, i) + } + } + if len(desc.Union) > 0 { + f := desc.Union[v.UnionIndex()] + fv := v.UnionValue() + writeField(f, fv, 0) + } + + w.outdent() + w.write("}") } func (w *hrsWriter) WriteTagged(v Value) { @@ -180,98 +171,100 @@ func (w *hrsWriter) WriteTagged(v Value) { switch t.Kind() { case BoolKind, StringKind: w.Write(v) - case NumberKind, BlobKind, ListKind, MapKind, RefKind, SetKind, TypeKind: - w.writeTypeAsValue(t) + case NumberKind, BlobKind, ListKind, MapKind, RefKind, SetKind, TypeKind, BackRefKind: + // TODO: Numbers have unique syntax now... + w.writeType(t, nil) w.write("(") w.Write(v) w.write(")") - - case UnresolvedKind: - w.writeTypeAsValue(t) + case StructKind: + w.writeType(t, nil) w.write("(") - w.writeUnresolved(v, false) + w.writeStruct(v.(Struct), false) w.write(")") - case PackageKind: - panic("not implemented") - case ValueKind, StructKind: + case ValueKind: default: panic("unreachable") } } -func (w *hrsWriter) writeTypeAsValue(t *Type) { +func (w *hrsWriter) writeType(t *Type, backRefs []*Type) { switch t.Kind() { - case BlobKind, BoolKind, NumberKind, StringKind, TypeKind, ValueKind, PackageKind: + case BlobKind, BoolKind, NumberKind, StringKind, TypeKind, ValueKind: w.write(KindToString[t.Kind()]) case ListKind, RefKind, SetKind: w.write(KindToString[t.Kind()]) w.write("<") - w.writeTypeAsValue(t.Desc.(CompoundDesc).ElemTypes[0]) + w.writeType(t.Desc.(CompoundDesc).ElemTypes[0], backRefs) w.write(">") case MapKind: w.write(KindToString[t.Kind()]) w.write("<") - w.writeTypeAsValue(t.Desc.(CompoundDesc).ElemTypes[0]) + w.writeType(t.Desc.(CompoundDesc).ElemTypes[0], backRefs) w.write(", ") - w.writeTypeAsValue(t.Desc.(CompoundDesc).ElemTypes[1]) + w.writeType(t.Desc.(CompoundDesc).ElemTypes[1], backRefs) w.write(">") case StructKind: - w.write("struct ") - w.write(t.Name()) - w.write(" {") - w.indent() - desc := t.Desc.(StructDesc) - writeField := func(f Field, i int) { - if i == 0 { - w.newLine() - } - w.write(f.Name) - w.write(": ") - if f.Optional { - w.write("optional ") - } - w.writeTypeAsValue(f.T) - w.newLine() - } - for i, f := range desc.Fields { - writeField(f, i) - } - if len(desc.Union) > 0 { - w.write("union {") - w.indent() - for i, f := range desc.Union { - writeField(f, i) - } - w.outdent() - w.write("}") - w.newLine() - } - w.outdent() - w.write("}") - case UnresolvedKind: - w.writeUnresolvedTypeRef(t, true) - } -} - -func (w *hrsWriter) writeUnresolvedTypeRef(t *Type, printStructName bool) { - if !t.HasPackageRef() { - if t.Namespace() != "" { - w.write(t.Namespace()) - w.write(".") - } - w.write(t.Name()) - return - } - pkg := LookupPackage(t.PackageRef()) - typeDef := pkg.Types()[t.Ordinal()] - switch typeDef.Kind() { - case StructKind: - w.write("Struct") + w.writeStructType(t, backRefs) + case BackRefKind: + w.writeBackRef(uint8(t.Desc.(BackRefDesc))) default: panic("unreachable") } - fmt.Fprintf(w.w, "<%s, %s, %d>", typeDef.Name(), t.PackageRef(), t.Ordinal()) +} + +func (w *hrsWriter) writeStructType(t *Type, backRefs []*Type) { + idx := indexOfType(t, backRefs) + if idx != -1 { + w.writeBackRef(uint8(len(backRefs) - 1 - idx)) + return + } + backRefs = append(backRefs, t) + + w.write("struct ") + w.write(t.Name()) + w.write(" {") + w.indent() + desc := t.Desc.(StructDesc) + i := 0 + writeField := func(f Field) { + if i == 0 { + w.newLine() + } + w.write(f.Name) + w.write(": ") + if f.Optional { + w.write("optional ") + } + w.writeType(f.T, backRefs) + w.newLine() + i++ + } + for _, f := range desc.Fields { + writeField(f) + } + if len(desc.Union) > 0 { + if i == 0 { + w.newLine() + i++ + } + w.write("union {") + w.indent() + i = 0 + for _, f := range desc.Union { + writeField(f) + } + w.outdent() + w.write("}") + w.newLine() + } + w.outdent() + w.write("}") +} + +func (w *hrsWriter) writeBackRef(i uint8) { + fmt.Fprintf(w.w, "BackRef<%d>", i) } func WriteHRS(v Value) string { @@ -280,10 +273,3 @@ func WriteHRS(v Value) string { w.Write(v) return buf.String() } - -func WriteTaggedHRS(v Value) string { - var buf bytes.Buffer - w := &hrsWriter{w: &buf} - w.WriteTagged(v) - return buf.String() -} diff --git a/types/encode_human_readable_test.go b/types/encode_human_readable_test.go index 236db0258d..9b18c50305 100644 --- a/types/encode_human_readable_test.go +++ b/types/encode_human_readable_test.go @@ -4,7 +4,6 @@ import ( "bytes" "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) @@ -125,71 +124,56 @@ func TestWriteHumanReadableNested(t *testing.T) { } func TestWriteHumanReadableStruct(t *testing.T) { - pkg := NewPackage([]*Type{ - MakeStructType("S1", []Field{ - Field{Name: "x", T: NumberType, Optional: false}, - Field{Name: "y", T: NumberType, Optional: true}, - }, []Field{}), - }, []ref.Ref{}) - typeDef := pkg.Types()[0] - RegisterPackage(&pkg) - typ := MakeType(pkg.Ref(), 0) + typ := MakeStructType("S1", []Field{ + Field{Name: "x", T: NumberType, Optional: false}, + Field{Name: "y", T: NumberType, Optional: true}, + }, []Field{}) - str := NewStruct(typ, typeDef, map[string]Value{ + str := NewStruct(typ, map[string]Value{ "x": Number(1), }) assertWriteHRSEqual(t, "S1 {\n x: 1,\n}", str) - assertWriteTaggedHRSEqual(t, "Struct({\n x: 1,\n})", str) + assertWriteTaggedHRSEqual(t, "struct S1 {\n x: Number\n y: optional Number\n}({\n x: 1,\n})", str) - str2 := NewStruct(typ, typeDef, map[string]Value{ + str2 := NewStruct(typ, map[string]Value{ "x": Number(2), "y": Number(3), }) assertWriteHRSEqual(t, "S1 {\n x: 2,\n y: 3,\n}", str2) - assertWriteTaggedHRSEqual(t, "Struct({\n x: 2,\n y: 3,\n})", str2) + assertWriteTaggedHRSEqual(t, "struct S1 {\n x: Number\n y: optional Number\n}({\n x: 2,\n y: 3,\n})", str2) } func TestWriteHumanReadableStructWithUnion(t *testing.T) { - pkg := NewPackage([]*Type{ - MakeStructType("S2", []Field{}, []Field{ - Field{Name: "x", T: NumberType, Optional: false}, - Field{Name: "y", T: NumberType, Optional: false}, - }), - }, []ref.Ref{}) - typeDef := pkg.Types()[0] - RegisterPackage(&pkg) - typ := MakeType(pkg.Ref(), 0) + typ := MakeStructType("S2", []Field{}, []Field{ + Field{Name: "x", T: NumberType, Optional: false}, + Field{Name: "y", T: NumberType, Optional: false}, + }) - str := NewStruct(typ, typeDef, map[string]Value{ + str := NewStruct(typ, map[string]Value{ "x": Number(1), }) assertWriteHRSEqual(t, "S2 {\n x: 1,\n}", str) - assertWriteTaggedHRSEqual(t, "Struct({\n x: 1,\n})", str) + assertWriteTaggedHRSEqual(t, "struct S2 {\n union {\n x: Number\n y: Number\n }\n}({\n x: 1,\n})", str) - str2 := NewStruct(typ, typeDef, map[string]Value{ + str2 := NewStruct(typ, map[string]Value{ "y": Number(2), }) assertWriteHRSEqual(t, "S2 {\n y: 2,\n}", str2) - assertWriteTaggedHRSEqual(t, "Struct({\n y: 2,\n})", str2) + assertWriteTaggedHRSEqual(t, "struct S2 {\n union {\n x: Number\n y: Number\n }\n}({\n y: 2,\n})", str2) } func TestWriteHumanReadableListOfStruct(t *testing.T) { - pkg := NewPackage([]*Type{ - MakeStructType("S3", []Field{}, []Field{ - Field{Name: "x", T: NumberType, Optional: false}, - }), - }, []ref.Ref{}) - typeDef := pkg.Types()[0] - RegisterPackage(&pkg) - typ := MakeType(pkg.Ref(), 0) + typ := MakeStructType("S3", []Field{ + Field{Name: "x", T: NumberType, Optional: false}, + }, []Field{}) - str1 := NewStruct(typ, typeDef, map[string]Value{ + str1 := NewStruct(typ, map[string]Value{ "x": Number(1), }) - str2 := NewStruct(typ, typeDef, map[string]Value{ + str2 := NewStruct(typ, map[string]Value{ "x": Number(2), }) - str3 := NewStruct(typ, typeDef, map[string]Value{ + str3 := NewStruct(typ, map[string]Value{ "x": Number(3), }) lt := MakeListType(typ) @@ -205,7 +189,9 @@ func TestWriteHumanReadableListOfStruct(t *testing.T) { x: 3, }, ]`, l) - assertWriteTaggedHRSEqual(t, `List>([ + assertWriteTaggedHRSEqual(t, `List([ S3 { x: 1, }, @@ -223,12 +209,12 @@ func TestWriteHumanReadableBlob(t *testing.T) { assertWriteTaggedHRSEqual(t, "Blob()", NewEmptyBlob()) b1 := NewBlob(bytes.NewBuffer([]byte{0x01})) - assertWriteHRSEqual(t, "AQ", b1) - assertWriteTaggedHRSEqual(t, "Blob(AQ)", b1) + assertWriteHRSEqual(t, "AQ==", b1) + assertWriteTaggedHRSEqual(t, "Blob(AQ==)", b1) b2 := NewBlob(bytes.NewBuffer([]byte{0x01, 0x02})) - assertWriteHRSEqual(t, "AQI", b2) - assertWriteTaggedHRSEqual(t, "Blob(AQI)", b2) + assertWriteHRSEqual(t, "AQI=", b2) + assertWriteTaggedHRSEqual(t, "Blob(AQI=)", b2) b3 := NewBlob(bytes.NewBuffer([]byte{0x01, 0x02, 0x03})) assertWriteHRSEqual(t, "AQID", b3) @@ -239,8 +225,8 @@ func TestWriteHumanReadableBlob(t *testing.T) { bs[i] = byte(i) } b4 := NewBlob(bytes.NewBuffer(bs)) - assertWriteHRSEqual(t, "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w", b4) - assertWriteTaggedHRSEqual(t, "Blob(AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w)", b4) + assertWriteHRSEqual(t, "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==", b4) + assertWriteTaggedHRSEqual(t, "Blob(AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==)", b4) } func TestWriteHumanReadableListOfBlob(t *testing.T) { @@ -248,54 +234,20 @@ func TestWriteHumanReadableListOfBlob(t *testing.T) { b1 := NewBlob(bytes.NewBuffer([]byte{0x01})) b2 := NewBlob(bytes.NewBuffer([]byte{0x02})) l := NewTypedList(lt, b1, NewEmptyBlob(), b2) - assertWriteHRSEqual(t, "[\n AQ,\n ,\n Ag,\n]", l) - assertWriteTaggedHRSEqual(t, "List([\n AQ,\n ,\n Ag,\n])", l) + assertWriteHRSEqual(t, "[\n AQ==,\n ,\n Ag==,\n]", l) + assertWriteTaggedHRSEqual(t, "List([\n AQ==,\n ,\n Ag==,\n])", l) } func TestWriteHumanReadableType(t *testing.T) { assertWriteHRSEqual(t, "Bool", BoolType) assertWriteHRSEqual(t, "Blob", BlobType) assertWriteHRSEqual(t, "String", StringType) - assertWriteHRSEqual(t, "Number", NumberType) assertWriteHRSEqual(t, "List", MakeListType(NumberType)) assertWriteHRSEqual(t, "Set", MakeSetType(NumberType)) assertWriteHRSEqual(t, "Ref", MakeRefType(NumberType)) assertWriteHRSEqual(t, "Map", MakeMapType(NumberType, StringType)) - - pkg := NewPackage([]*Type{ - MakeStructType("Str", []Field{ - Field{Name: "c", T: MakeType(ref.Ref{}, 0), Optional: false}, - Field{Name: "o", T: StringType, Optional: true}, - }, []Field{ - Field{Name: "x", T: MakeType(ref.Ref{}, 0), Optional: false}, - Field{Name: "y", T: BoolType, Optional: false}, - }), - }, []ref.Ref{}) - RegisterPackage(&pkg) - st := MakeType(pkg.Ref(), 0) - - assertWriteHRSEqual(t, "Struct", st) - assertWriteTaggedHRSEqual(t, "Type(Struct)", st) - - sTypeDef := pkg.Types()[0] - assertWriteHRSEqual(t, `struct Str { - c: Struct - o: optional String - union { - x: Struct - y: Bool - } -}`, sTypeDef) - assertWriteTaggedHRSEqual(t, `Type(struct Str { - c: Struct - o: optional String - union { - x: Struct - y: Bool - } -})`, sTypeDef) } func TestWriteHumanReadableTaggedPrimitiveValues(t *testing.T) { @@ -339,3 +291,63 @@ func TestWriteHumanReadableTaggedType(t *testing.T) { assertWriteTaggedHRSEqual(t, "Type(Map)", MakeMapType(NumberType, StringType)) } + +func TestRecursiveStruct(t *testing.T) { + // struct A { + // b: A + // c: List + // d: struct D { + // e: D + // f: A + // } + // } + + a := MakeStructType("A", []Field{ + Field{Name: "b", T: nil, Optional: false}, + Field{Name: "c", T: nil, Optional: false}, + Field{Name: "d", T: nil, Optional: false}, + }, []Field{}) + d := MakeStructType("D", []Field{ + Field{Name: "e", T: nil, Optional: false}, + Field{Name: "f", T: a, Optional: false}, + }, []Field{}) + a.Desc.(StructDesc).Fields[0].T = a + a.Desc.(StructDesc).Fields[1].T = MakeListType(a) + a.Desc.(StructDesc).Fields[2].T = d + d.Desc.(StructDesc).Fields[0].T = d + d.Desc.(StructDesc).Fields[1].T = a + + assertWriteHRSEqual(t, `struct A { + b: BackRef<0> + c: List> + d: struct D { + e: BackRef<0> + f: BackRef<1> + } +}`, a) + assertWriteTaggedHRSEqual(t, `Type(struct A { + b: BackRef<0> + c: List> + d: struct D { + e: BackRef<0> + f: BackRef<1> + } +})`, a) + + assertWriteHRSEqual(t, `struct D { + e: BackRef<0> + f: struct A { + b: BackRef<0> + c: List> + d: BackRef<1> + } +}`, d) + assertWriteTaggedHRSEqual(t, `Type(struct D { + e: BackRef<0> + f: struct A { + b: BackRef<0> + c: List> + d: BackRef<1> + } +})`, d) +} diff --git a/types/encode_noms_value.go b/types/encode_noms_value.go index 128cd3c022..132ca4936f 100644 --- a/types/encode_noms_value.go +++ b/types/encode_noms_value.go @@ -54,6 +54,10 @@ func (w *jsonArrayWriter) writeUint(v uint64) { w.write(strconv.FormatUint(v, 10)) } +func (w *jsonArrayWriter) writeUint8(v uint8) { + w.write(v) +} + func (w *jsonArrayWriter) toArray() []interface{} { return w.a } @@ -62,36 +66,28 @@ func (w *jsonArrayWriter) writeRef(r ref.Ref) { w.write(r.String()) } -func (w *jsonArrayWriter) writeTypeAsTag(t *Type) { +func (w *jsonArrayWriter) writeTypeAsTag(t *Type, backRefs []*Type) { k := t.Kind() - w.write(k) switch k { case StructKind: - panic("unreachable") + w.writeStructType(t, backRefs) case ListKind, MapKind, RefKind, SetKind: + w.write(k) for _, elemType := range t.Desc.(CompoundDesc).ElemTypes { - w.writeTypeAsTag(elemType) - } - case UnresolvedKind: - pkgRef := t.PackageRef() - d.Chk.NotEqual(ref.Ref{}, pkgRef) - w.writeRef(pkgRef) - w.writeInt(int64(t.Ordinal())) - - pkg := LookupPackage(pkgRef) - if pkg != nil && w.vw != nil { - w.vw.WriteValue(*pkg) + w.writeTypeAsTag(elemType, backRefs) } + default: + w.write(k) } } func (w *jsonArrayWriter) writeTopLevelValue(v Value) { tr := v.Type() - w.writeTypeAsTag(tr) - w.writeValue(v, tr, nil) + w.writeTypeAsTag(tr, nil) + w.writeValue(v, tr) } -func (w *jsonArrayWriter) maybeWriteMetaSequence(v Value, tr *Type, pkg *Package) bool { +func (w *jsonArrayWriter) maybeWriteMetaSequence(v Value, tr *Type) bool { ms, ok := v.(metaSequence) if !ok { w.write(false) // not a meta sequence @@ -107,17 +103,17 @@ func (w *jsonArrayWriter) maybeWriteMetaSequence(v Value, tr *Type, pkg *Package w.vw.WriteValue(tuple.child) } w2.writeRef(tuple.ChildRef().TargetRef()) - w2.writeValue(tuple.value, indexType, pkg) + w2.writeValue(tuple.value, indexType) w2.writeUint(tuple.numLeaves) } w.write(w2.toArray()) return true } -func (w *jsonArrayWriter) writeValue(v Value, tr *Type, pkg *Package) { +func (w *jsonArrayWriter) writeValue(v Value, tr *Type) { switch tr.Kind() { case BlobKind: - if w.maybeWriteMetaSequence(v, tr, pkg) { + if w.maybeWriteMetaSequence(v, tr) { return } w.writeBlob(v.(Blob)) @@ -126,142 +122,116 @@ func (w *jsonArrayWriter) writeValue(v Value, tr *Type, pkg *Package) { case NumberKind: w.writeFloat(float64(v.(Number))) case ListKind: - if w.maybeWriteMetaSequence(v, tr, pkg) { + if w.maybeWriteMetaSequence(v, tr) { return } w2 := newJSONArrayWriter(w.vw) elemType := tr.Desc.(CompoundDesc).ElemTypes[0] v.(List).IterAll(func(v Value, i uint64) { - w2.writeValue(v, elemType, pkg) + w2.writeValue(v, elemType) }) w.write(w2.toArray()) case MapKind: - if w.maybeWriteMetaSequence(v, tr, pkg) { + if w.maybeWriteMetaSequence(v, tr) { return } w2 := newJSONArrayWriter(w.vw) elemTypes := tr.Desc.(CompoundDesc).ElemTypes v.(Map).IterAll(func(k, v Value) { - w2.writeValue(k, elemTypes[0], pkg) - w2.writeValue(v, elemTypes[1], pkg) + w2.writeValue(k, elemTypes[0]) + w2.writeValue(v, elemTypes[1]) }) w.write(w2.toArray()) - case PackageKind: - ptr := TypeType - w2 := newJSONArrayWriter(w.vw) - p := v.(Package) - for _, t := range p.types { - w2.writeValue(t, ptr, &p) - } - w.write(w2.toArray()) - w3 := newJSONArrayWriter(w.vw) - for _, r := range v.(Package).dependencies { - w3.writeRef(r) - } - w.write(w3.toArray()) case RefKind: w.writeRef(v.(Ref).TargetRef()) case SetKind: - if w.maybeWriteMetaSequence(v, tr, pkg) { + if w.maybeWriteMetaSequence(v, tr) { return } w2 := newJSONArrayWriter(w.vw) elemType := tr.Desc.(CompoundDesc).ElemTypes[0] v.(Set).IterAll(func(v Value) { - w2.writeValue(v, elemType, pkg) + w2.writeValue(v, elemType) }) w.write(w2.toArray()) case StringKind: w.write(v.(String).String()) case TypeKind: - w.writeTypeKindValue(v, tr, pkg) - case UnresolvedKind: - if tr.HasPackageRef() { - pkg = LookupPackage(tr.PackageRef()) - } - w.writeUnresolvedKindValue(v, tr, pkg) + vt := v.(*Type) + w.writeTypeAsValue(vt, nil) + case StructKind: + w.writeStruct(v, tr) case ValueKind: - w.writeTypeAsTag(v.Type()) - w.writeValue(v, v.Type(), pkg) + vt := v.Type() + w.writeTypeAsTag(vt, nil) + w.writeValue(v, v.Type()) + case BackRefKind: + w.writeUint8(uint8(v.(*Type).Desc.(BackRefDesc))) default: d.Chk.Fail("Unknown NomsKind") } } -func (w *jsonArrayWriter) writeTypeAsValue(t *Type, pkg *Package) { +func (w *jsonArrayWriter) writeTypeAsValue(t *Type, backRefs []*Type) { k := t.Kind() - w.write(k) switch k { case ListKind, MapKind, RefKind, SetKind: + w.write(k) w2 := newJSONArrayWriter(w.vw) for _, elemType := range t.Desc.(CompoundDesc).ElemTypes { - w2.writeTypeAsValue(elemType, pkg) + w2.writeTypeAsValue(elemType, backRefs) } w.write(w2.toArray()) case StructKind: - w.write(t.Name()) - fieldWriter := newJSONArrayWriter(w.vw) - for _, field := range t.Desc.(StructDesc).Fields { - fieldWriter.write(field.Name) - fieldWriter.writeTypeAsValue(field.T, pkg) - fieldWriter.write(field.Optional) - } - w.write(fieldWriter.toArray()) - choiceWriter := newJSONArrayWriter(w.vw) - for _, choice := range t.Desc.(StructDesc).Union { - choiceWriter.write(choice.Name) - choiceWriter.writeTypeAsValue(choice.T, pkg) - choiceWriter.write(choice.Optional) - } - w.write(choiceWriter.toArray()) - case UnresolvedKind: - pkgRef := t.PackageRef() - isCurrentPackage := pkg != nil && pkg.ref != nil && *pkg.ref == pkgRef - if isCurrentPackage { - w.writeRef(ref.Ref{}) - } else { - w.writeRef(pkgRef) - } - // Don't use Ordinal() here since we might need to serialize a Type that hasn't gotten a valid ordinal yet. - ordinal := t.Desc.(UnresolvedDesc).ordinal - w.writeInt(int64(ordinal)) - if ordinal == -1 { - w.write(t.Namespace()) - w.write(t.Name()) - } - - if !isCurrentPackage { - pkg := LookupPackage(pkgRef) - if pkg != nil && w.vw != nil { - w.vw.WriteValue(*pkg) - } - } - + w.writeStructType(t, backRefs) default: + w.write(k) d.Chk.True(IsPrimitiveKind(k), "Kind: %v Desc: %s\n", t.Kind(), t.Describe()) } } -// writeTypeKindValue writes either a struct or a Type value -func (w *jsonArrayWriter) writeTypeKindValue(v Value, tr *Type, pkg *Package) { - _, ok := v.(*Type) - d.Chk.True(ok) - w.writeTypeAsValue(v.(*Type), pkg) +func indexOfType(t *Type, ts []*Type) int { + for i, tt := range ts { + if t == tt { + return i + } + } + return -1 } -// writeUnresolvedKindValue writes a struct. -func (w *jsonArrayWriter) writeUnresolvedKindValue(v Value, tr *Type, pkg *Package) { - d.Chk.NotNil(pkg) - typeDef := pkg.types[tr.Ordinal()] - switch typeDef.Kind() { - default: - d.Chk.Fail("An Unresolved Type can only reference a StructKind.", "Actually referenced: %+v", typeDef) - case StructKind: - w.writeStruct(v, tr, typeDef, pkg) +func (w *jsonArrayWriter) writeBackRef(i int) { + w.write(BackRefKind) + w.write(uint8(i)) +} + +func (w *jsonArrayWriter) writeStructType(t *Type, backRefs []*Type) { + // The runtime representaion of struct types can contain cycles. These cycles are broken when encoding and decoding using special "back ref" placeholders. + i := indexOfType(t, backRefs) + if i != -1 { + w.writeBackRef(len(backRefs) - i - 1) + return } + backRefs = append(backRefs, t) + + w.write(StructKind) + w.write(t.Name()) + fieldWriter := newJSONArrayWriter(w.vw) + for _, field := range t.Desc.(StructDesc).Fields { + fieldWriter.write(field.Name) + fieldWriter.writeTypeAsTag(field.T, backRefs) + fieldWriter.write(field.Optional) + } + w.write(fieldWriter.toArray()) + choiceWriter := newJSONArrayWriter(w.vw) + for _, choice := range t.Desc.(StructDesc).Union { + choiceWriter.write(choice.Name) + choiceWriter.writeTypeAsTag(choice.T, backRefs) + choiceWriter.write(choice.Optional) + } + w.write(choiceWriter.toArray()) } func (w *jsonArrayWriter) writeBlob(b Blob) { @@ -274,10 +244,10 @@ func (w *jsonArrayWriter) writeBlob(b Blob) { w.write(buf.String()) } -func (w *jsonArrayWriter) writeStruct(v Value, typ, typeDef *Type, pkg *Package) { +func (w *jsonArrayWriter) writeStruct(v Value, t *Type) { i := 0 - values := structReader(v.(Struct), typ, typeDef) - desc := typeDef.Desc.(StructDesc) + values := structReader(v.(Struct), t) + desc := t.Desc.(StructDesc) for _, f := range desc.Fields { if f.Optional { @@ -285,11 +255,11 @@ func (w *jsonArrayWriter) writeStruct(v Value, typ, typeDef *Type, pkg *Package) i++ w.write(ok) if ok { - w.writeValue(values[i], f.T, pkg) + w.writeValue(values[i], f.T) i++ } } else { - w.writeValue(values[i], f.T, pkg) + w.writeValue(values[i], f.T) i++ } } @@ -297,7 +267,7 @@ func (w *jsonArrayWriter) writeStruct(v Value, typ, typeDef *Type, pkg *Package) unionIndex := uint64(values[i].(Number)) i++ w.writeUint(unionIndex) - w.writeValue(values[i], desc.Union[unionIndex].T, pkg) + w.writeValue(values[i], desc.Union[unionIndex].T) i++ } } diff --git a/types/encode_noms_value_test.go b/types/encode_noms_value_test.go index 116629cddb..53eec11f05 100644 --- a/types/encode_noms_value_test.go +++ b/types/encode_noms_value_test.go @@ -135,138 +135,126 @@ func TestWriteCompoundBlob(t *testing.T) { func TestWriteEmptyStruct(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{}, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - v := NewStruct(typ, typeDef, nil) + typ := MakeStructType("S", []Field{}, []Field{}) + v := NewStruct(typ, nil) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0"}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{}, []interface{}{}}, w.toArray()) } func TestWriteStruct(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + typ := MakeStructType("S", []Field{ Field{"x", NumberType, false}, Field{"b", BoolType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - v := NewStruct(typ, typeDef, structData{"x": Number(42), "b": Bool(true)}) + v := NewStruct(typ, structData{"x": Number(42), "b": Bool(true)}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", "42", true}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"x", NumberKind, false, "b", BoolKind, false}, []interface{}{}, "42", true}, w.toArray()) } func TestWriteStructOptionalField(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + typ := MakeStructType("S", []Field{ Field{"x", NumberType, true}, Field{"b", BoolType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - v := NewStruct(typ, typeDef, structData{"x": Number(42), "b": Bool(true)}) + + v := NewStruct(typ, structData{"x": Number(42), "b": Bool(true)}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", true, "42", true}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"x", NumberKind, true, "b", BoolKind, false}, []interface{}{}, true, "42", true}, w.toArray()) - v = NewStruct(typ, typeDef, structData{"b": Bool(true)}) + v = NewStruct(typ, structData{"b": Bool(true)}) w = newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", false, true}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"x", NumberKind, true, "b", BoolKind, false}, []interface{}{}, false, true}, w.toArray()) } func TestWriteStructWithUnion(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + typ := MakeStructType("S", []Field{ Field{"x", NumberType, false}, }, []Field{ Field{"b", BoolType, false}, Field{"s", StringType, false}, }) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - v := NewStruct(typ, typeDef, structData{"x": Number(42), "s": NewString("hi")}) + v := NewStruct(typ, structData{"x": Number(42), "s": NewString("hi")}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", "42", "1", "hi"}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"x", NumberKind, false}, []interface{}{"b", BoolKind, false, "s", StringKind, false}, "42", "1", "hi"}, w.toArray()) - v = NewStruct(typ, typeDef, structData{"x": Number(42), "b": Bool(true)}) + v = NewStruct(typ, structData{"x": Number(42), "b": Bool(true)}) w = newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", "42", "0", true}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"x", NumberKind, false}, []interface{}{"b", BoolKind, false, "s", StringKind, false}, "42", "0", true}, w.toArray()) } func TestWriteStructWithList(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + typ := MakeStructType("S", []Field{ Field{"l", MakeListType(StringType), false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - v := NewStruct(typ, typeDef, structData{"l": NewList(NewString("a"), NewString("b"))}) + v := NewStruct(typ, structData{"l": NewList(NewString("a"), NewString("b"))}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", false, []interface{}{"a", "b"}}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"l", ListKind, StringKind, false}, []interface{}{}, false, []interface{}{"a", "b"}}, w.toArray()) - v = NewStruct(typ, typeDef, structData{"l": NewList()}) + v = NewStruct(typ, structData{"l": NewList()}) w = newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", false, []interface{}{}}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"l", ListKind, StringKind, false}, []interface{}{}, false, []interface{}{}}, w.toArray()) } func TestWriteStructWithStruct(t *testing.T) { assert := assert.New(t) - s2TypeDef := MakeStructType("S2", []Field{ + // struct S2 { + // x: Number + // } + // struct S { + // s: S2 + // } + + s2Type := MakeStructType("S2", []Field{ Field{"x", NumberType, false}, }, []Field{}) - sTypeDef := MakeStructType("S", []Field{ - Field{"s", MakeType(ref.Ref{}, 0), false}, + sType := MakeStructType("S", []Field{ + Field{"s", MakeStructType("S2", []Field{ + Field{"x", NumberType, false}, + }, []Field{}), false}, }, []Field{}) - pkg := NewPackage([]*Type{s2TypeDef, sTypeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - s2Type := MakeType(pkgRef, 0) - sType := MakeType(pkgRef, 1) - v := NewStruct(sType, sTypeDef, structData{"s": NewStruct(s2Type, s2TypeDef, structData{"x": Number(42)})}) + v := NewStruct(sType, structData{"s": NewStruct(s2Type, structData{"x": Number(42)})}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "1", "42"}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"s", StructKind, "S2", []interface{}{"x", NumberKind, false}, []interface{}{}, false}, []interface{}{}, "42"}, w.toArray()) } func TestWriteStructWithBlob(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + typ := MakeStructType("S", []Field{ Field{"b", BlobType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) b := NewBlob(bytes.NewBuffer([]byte{0x00, 0x01})) - v := NewStruct(typ, typeDef, structData{"b": b}) + v := NewStruct(typ, structData{"b": b}) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{UnresolvedKind, pkgRef.String(), "0", false, "AAE="}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "S", []interface{}{"b", BlobKind, false}, []interface{}{}, false, "AAE="}, w.toArray()) } func TestWriteCompoundList(t *testing.T) { @@ -327,35 +315,30 @@ func TestWriteListOfValue(t *testing.T) { func TestWriteListOfValueWithStruct(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S", []Field{ + structType := MakeStructType("S", []Field{ Field{"x", NumberType, false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) listType := MakeListType(ValueType) - structType := MakeType(pkgRef, 0) - v := NewTypedList(listType, NewStruct(structType, typeDef, structData{"x": Number(42)})) + v := NewTypedList(listType, NewStruct(structType, structData{"x": Number(42)})) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{ListKind, ValueKind, false, []interface{}{UnresolvedKind, pkgRef.String(), "0", "42"}}, w.toArray()) + assert.EqualValues([]interface{}{ListKind, ValueKind, false, []interface{}{StructKind, "S", []interface{}{"x", NumberKind, false}, []interface{}{}, "42"}}, w.toArray()) } func TestWriteListOfValueWithType(t *testing.T) { assert := assert.New(t) - pkg := NewPackage([]*Type{ - MakeStructType("S", []Field{ - Field{"x", NumberType, false}, - }, []Field{})}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) + structType := MakeStructType("S", []Field{ + Field{"x", NumberType, false}, + }, []Field{}) typ := MakeListType(ValueType) v := NewTypedList(typ, Bool(true), NumberType, TypeType, - MakeType(pkgRef, 0), + structType, ) w := newJSONArrayWriter(NewTestValueStore()) @@ -364,7 +347,7 @@ func TestWriteListOfValueWithType(t *testing.T) { BoolKind, true, TypeKind, NumberKind, TypeKind, TypeKind, - TypeKind, UnresolvedKind, pkgRef.String(), "0", + TypeKind, StructKind, "S", []interface{}{"x", NumberKind, false}, []interface{}{}, }}, w.toArray()) } @@ -406,19 +389,6 @@ func TestWriteTypeValue(t *testing.T) { Field{"x", NumberType, false}, Field{"v", ValueType, false}, })) - - pkgRef := ref.Parse("sha1-0123456789abcdef0123456789abcdef01234567") - test([]interface{}{TypeKind, UnresolvedKind, pkgRef.String(), "123"}, - MakeType(pkgRef, 123)) - - test([]interface{}{TypeKind, StructKind, "S", []interface{}{"e", UnresolvedKind, pkgRef.String(), "123", false, "x", NumberKind, false}, []interface{}{}}, - MakeStructType("S", []Field{ - Field{"e", MakeType(pkgRef, 123), false}, - Field{"x", NumberType, false}, - }, []Field{})) - - test([]interface{}{TypeKind, UnresolvedKind, ref.Ref{}.String(), "-1", "ns", "n"}, - MakeUnresolvedType("ns", "n")) } func TestWriteListOfTypes(t *testing.T) { @@ -432,14 +402,33 @@ func TestWriteListOfTypes(t *testing.T) { assert.EqualValues([]interface{}{ListKind, TypeKind, false, []interface{}{BoolKind, StringKind}}, w.toArray()) } -func TestWritePackage(t *testing.T) { +func TestWriteRecursiveStruct(t *testing.T) { assert := assert.New(t) - setTref := MakeSetType(NumberType) - r := ref.Parse("sha1-0123456789abcdef0123456789abcdef01234567") - v := Package{[]*Type{setTref}, []ref.Ref{r}, &ref.Ref{}} + // struct A6 { + // v: Number + // cs: List + // } + + structType := MakeStructType("A6", []Field{ + Field{"v", NumberType, false}, + Field{"cs", nil, false}, + }, []Field{}) + listType := MakeListType(structType) + // Mutate... + structType.Desc.(StructDesc).Fields[1].T = listType + + NewTypedList(listType) + + v := NewStruct(structType, structData{ + "v": Number(42), + "cs": NewTypedList(listType, NewStruct(structType, structData{ + "v": Number(555), + "cs": NewTypedList(listType), + })), + }) w := newJSONArrayWriter(NewTestValueStore()) w.writeTopLevelValue(v) - assert.EqualValues([]interface{}{PackageKind, []interface{}{SetKind, []interface{}{NumberKind}}, []interface{}{r.String()}}, w.toArray()) + assert.EqualValues([]interface{}{StructKind, "A6", []interface{}{"v", NumberKind, false, "cs", ListKind, BackRefKind, uint8(0), false}, []interface{}{}, "42", false, []interface{}{"555", false, []interface{}{}}}, w.toArray()) } diff --git a/types/equals_test.go b/types/equals_test.go index 8ef4343090..ca41406538 100644 --- a/types/equals_test.go +++ b/types/equals_test.go @@ -10,9 +10,6 @@ import ( func TestValueEquals(t *testing.T) { assert := assert.New(t) - r1 := Number(1).Ref() - r2 := Number(2).Ref() - values := []func() Value{ func() Value { return nil }, func() Value { return Bool(false) }, @@ -64,12 +61,6 @@ func TestValueEquals(t *testing.T) { func() Value { return MakeMapType(NumberType, ValueType) }, - func() Value { return MakeType(r1, 0) }, - func() Value { return MakeType(r1, 1) }, - func() Value { return MakeType(r2, 0) }, - func() Value { return MakeUnresolvedType("ns", "a") }, - func() Value { return MakeUnresolvedType("ns", "b") }, - func() Value { return MakeUnresolvedType("ns2", "a") }, } for i, f1 := range values { diff --git a/types/fixup_type.go b/types/fixup_type.go deleted file mode 100644 index 9c86ce84dd..0000000000 --- a/types/fixup_type.go +++ /dev/null @@ -1,38 +0,0 @@ -package types - -// FixupType goes trough the object graph of tr and updates the PackageRef to pkg if the the old PackageRef was an empty ref. -func FixupType(tr *Type, pkg *Package) *Type { - switch desc := tr.Desc.(type) { - case PrimitiveDesc: - return tr - - case CompoundDesc: - elemTypes := make([]*Type, len(desc.ElemTypes)) - for i, elemType := range desc.ElemTypes { - elemTypes[i] = FixupType(elemType, pkg) - } - return makeCompoundType(tr.Kind(), elemTypes...) - - case UnresolvedDesc: - if tr.HasPackageRef() { - return tr - } - return MakeType(pkg.Ref(), tr.Ordinal()) - - case StructDesc: - fixField := func(f Field) Field { - newT := FixupType(f.T, pkg) - return Field{Name: f.Name, T: newT, Optional: f.Optional} - } - fixFields := func(fields []Field) []Field { - newFields := make([]Field, len(fields)) - for i, f := range fields { - newFields[i] = fixField(f) - } - return newFields - } - return MakeStructType(tr.Name(), fixFields(desc.Fields), fixFields(desc.Union)) - } - - panic("unreachable") -} diff --git a/types/noms_kind.go b/types/noms_kind.go index 3b3541f489..db29b5084f 100644 --- a/types/noms_kind.go +++ b/types/noms_kind.go @@ -16,14 +16,13 @@ const ( SetKind StructKind TypeKind - UnresolvedKind - PackageKind + BackRefKind ) // IsPrimitiveKind returns true if k represents a Noms primitive type, which excludes collections (List, Map, Set), Refs, Structs, Symbolic and Unresolved types. func IsPrimitiveKind(k NomsKind) bool { switch k { - case BoolKind, NumberKind, StringKind, BlobKind, ValueKind, TypeKind, PackageKind: + case BoolKind, NumberKind, StringKind, BlobKind, ValueKind, TypeKind, BackRefKind: return true default: return false diff --git a/types/package.go b/types/package.go deleted file mode 100644 index 365124eaa6..0000000000 --- a/types/package.go +++ /dev/null @@ -1,113 +0,0 @@ -package types - -import ( - "sort" - - "github.com/attic-labs/noms/d" - "github.com/attic-labs/noms/ref" -) - -var ( - packages map[ref.Ref]*Package = map[ref.Ref]*Package{} -) - -// LookupPackage looks for a Package by ref.Ref in the global cache of Noms type packages. -func LookupPackage(r ref.Ref) *Package { - return packages[r] -} - -// RegisterPackage puts p into the global cache of Noms type packages. -func RegisterPackage(p *Package) (r ref.Ref) { - d.Chk.NotNil(p) - r = p.Ref() - packages[r] = p - return -} - -func ReadPackage(r ref.Ref, vr ValueReader) *Package { - p := vr.ReadValue(r).(Package) - RegisterPackage(&p) - return &p -} - -type Package struct { - types []*Type - dependencies ref.RefSlice - ref *ref.Ref -} - -func NewPackage(types []*Type, dependencies ref.RefSlice) Package { - p := Package{types: types} - // The order |Package.dependencies| must be stable for the Package to have a stable ref. - // See https://github.com/attic-labs/noms/issues/814 for stable ordering of |Package.types|. - p.dependencies = append(p.dependencies, dependencies...) - sort.Sort(p.dependencies) - r := getRef(p) - p.ref = &r - - newTypes := make([]*Type, len(types)) - for i, t := range types { - newTypes[i] = FixupType(t, &p) - } - p.types = newTypes - - return p -} - -func (p Package) Equals(other Value) bool { - return other != nil && typeForPackage.Equals(other.Type()) && p.Ref() == other.Ref() -} - -func (p Package) Ref() ref.Ref { - return EnsureRef(p.ref, p) -} - -func (p Package) Chunks() (chunks []Ref) { - for _, t := range p.types { - chunks = append(chunks, t.Chunks()...) - } - for _, d := range p.dependencies { - chunks = append(chunks, NewTypedRef(MakeRefType(typeForPackage), d)) - } - return -} - -func (p Package) ChildValues() (res []Value) { - for _, t := range p.types { - res = append(res, t) - } - for _, d := range p.dependencies { - res = append(res, NewTypedRef(p.Type(), d)) - } - return -} - -var typeForPackage = PackageType -var typeForRefOfPackage = MakeRefType(PackageType) - -func (p Package) Type() *Type { - return typeForPackage -} - -func (p Package) GetOrdinal(n string) (ordinal int64) { - for i, t := range p.types { - if t.Name() == n && t.Namespace() == "" { - return int64(i) - } - } - return -1 -} - -func (p Package) Dependencies() (dependencies []ref.Ref) { - dependencies = append(dependencies, p.dependencies...) - return -} - -func (p Package) Types() (types []*Type) { - types = append(types, p.types...) - return -} - -func NewSetOfRefOfPackage() Set { - return NewTypedSet(MakeSetType(typeForRefOfPackage)) -} diff --git a/types/package_test.go b/types/package_test.go deleted file mode 100644 index 0cb8ee45c7..0000000000 --- a/types/package_test.go +++ /dev/null @@ -1,16 +0,0 @@ -package types - -import ( - "testing" - - "github.com/attic-labs/noms/ref" - "github.com/stretchr/testify/assert" -) - -func TestType(t *testing.T) { - assert := assert.New(t) - - st := NewPackage([]*Type{}, []ref.Ref{}) - typ := st.Type() - assert.Equal(PackageKind, typ.Kind()) -} diff --git a/types/primitives.go b/types/primitives.go index 464b879563..67ccb573f2 100644 --- a/types/primitives.go +++ b/types/primitives.go @@ -26,10 +26,8 @@ func (v Bool) ToPrimitive() interface{} { return bool(v) } -var typeForBool = BoolType - func (v Bool) Type() *Type { - return typeForBool + return BoolType } type Number float64 @@ -54,10 +52,8 @@ func (v Number) ToPrimitive() interface{} { return float64(v) } -var typeForNumber = NumberType - func (v Number) Type() *Type { - return typeForNumber + return NumberType } func (v Number) Less(other OrderedValue) bool { diff --git a/types/struct.go b/types/struct.go index cddca68651..2a700101bd 100644 --- a/types/struct.go +++ b/types/struct.go @@ -10,26 +10,22 @@ type structData map[string]Value type Struct struct { data structData t *Type - typeDef *Type unionIndex uint32 unionValue Value ref *ref.Ref } -func newStructFromData(data structData, unionIndex uint32, unionValue Value, typ, typeDef *Type) Struct { - d.Chk.Equal(typ.Kind(), UnresolvedKind) - d.Chk.True(typ.HasPackageRef()) - d.Chk.True(typ.HasOrdinal()) - d.Chk.Equal(typeDef.Kind(), StructKind) - return Struct{data, typ, typeDef, unionIndex, unionValue, &ref.Ref{}} +func newStructFromData(data structData, unionIndex uint32, unionValue Value, t *Type) Struct { + d.Chk.Equal(t.Kind(), StructKind) + return Struct{data, t, unionIndex, unionValue, &ref.Ref{}} } -func NewStruct(typ, typeDef *Type, data structData) Struct { +func NewStruct(t *Type, data structData) Struct { newData := make(structData) unionIndex := uint32(0) var unionValue Value - desc := typeDef.Desc.(StructDesc) + desc := t.Desc.(StructDesc) for _, f := range desc.Fields { if v, ok := data[f.Name]; ok { newData[f.Name] = v @@ -45,7 +41,7 @@ func NewStruct(typ, typeDef *Type, data structData) Struct { break } } - return newStructFromData(newData, unionIndex, unionValue, typ, typeDef) + return newStructFromData(newData, unionIndex, unionValue, t) } func (s Struct) Equals(other Value) bool { @@ -93,7 +89,7 @@ func (s Struct) Type() *Type { } func (s Struct) desc() StructDesc { - return s.typeDef.Desc.(StructDesc) + return s.t.Desc.(StructDesc) } func (s Struct) hasUnion() bool { @@ -145,7 +141,7 @@ func (s Struct) Set(n string, v Value) Struct { unionValue = v } - return newStructFromData(data, unionIndex, unionValue, s.t, s.typeDef) + return newStructFromData(data, unionIndex, unionValue, s.t) } func (s Struct) UnionIndex() uint32 { @@ -170,9 +166,9 @@ func (s Struct) findField(n string) (Field, int32, bool) { return Field{}, -1, false } -func structBuilder(values []Value, typ, typeDef *Type) Value { +func structBuilder(values []Value, t *Type) Value { i := 0 - desc := typeDef.Desc.(StructDesc) + desc := t.Desc.(StructDesc) data := structData{} unionIndex := uint32(0) var unionValue Value @@ -197,13 +193,14 @@ func structBuilder(values []Value, typ, typeDef *Type) Value { i++ } - return newStructFromData(data, unionIndex, unionValue, typ, typeDef) + return newStructFromData(data, unionIndex, unionValue, t) } -func structReader(s Struct, typ, typeDef *Type) []Value { +func structReader(s Struct, t *Type) []Value { + d.Chk.Equal(t.Kind(), StructKind) values := []Value{} - desc := typeDef.Desc.(StructDesc) + desc := t.Desc.(StructDesc) for _, f := range desc.Fields { v, ok := s.data[f.Name] if f.Optional { diff --git a/types/struct_test.go b/types/struct_test.go index e1c918fcdc..70a60c64eb 100644 --- a/types/struct_test.go +++ b/types/struct_test.go @@ -3,25 +3,21 @@ package types import ( "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) func TestGenericStructEquals(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S1", []Field{ + typ := MakeStructType("S1", []Field{ Field{"x", BoolType, false}, Field{"o", StringType, true}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) data1 := structData{"x": Bool(true)} - s1 := newStructFromData(data1, 0, nil, typ, typeDef) + s1 := newStructFromData(data1, 0, nil, typ) data2 := structData{"x": Bool(true), "extra": NewString("is ignored")} - s2 := newStructFromData(data2, 0, nil, typ, typeDef) + s2 := newStructFromData(data2, 0, nil, typ) assert.True(s1.Equals(s2)) assert.True(s2.Equals(s1)) @@ -30,86 +26,69 @@ func TestGenericStructEquals(t *testing.T) { func TestGenericStructChunks(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S1", []Field{ + typ := MakeStructType("S1", []Field{ Field{"r", MakeRefType(BoolType), false}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) b := Bool(true) data1 := structData{"r": NewRef(b.Ref())} - s1 := newStructFromData(data1, 0, nil, typ, typeDef) + s1 := newStructFromData(data1, 0, nil, typ) - assert.Len(s1.Chunks(), 2) - assert.Equal(pkgRef, s1.Chunks()[0].TargetRef()) - assert.Equal(b.Ref(), s1.Chunks()[1].TargetRef()) + assert.Len(s1.Chunks(), 1) + assert.Equal(b.Ref(), s1.Chunks()[0].TargetRef()) } func TestGenericStructChunksOptional(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S1", []Field{ + typ := MakeStructType("S1", []Field{ Field{"r", MakeRefType(BoolType), true}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) b := Bool(true) data1 := structData{} - s1 := newStructFromData(data1, 0, nil, typ, typeDef) + s1 := newStructFromData(data1, 0, nil, typ) - assert.Len(s1.Chunks(), 1) - assert.Equal(pkgRef, s1.Chunks()[0].TargetRef()) + assert.Len(s1.Chunks(), 0) data2 := structData{"r": NewRef(b.Ref())} - s2 := newStructFromData(data2, 0, nil, typ, typeDef) + s2 := newStructFromData(data2, 0, nil, typ) - assert.Len(s2.Chunks(), 2) - assert.Equal(pkgRef, s2.Chunks()[0].TargetRef()) - assert.Equal(b.Ref(), s2.Chunks()[1].TargetRef()) + assert.Len(s2.Chunks(), 1) + assert.Equal(b.Ref(), s2.Chunks()[0].TargetRef()) } func TestGenericStructChunksUnion(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S1", []Field{}, []Field{ + typ := MakeStructType("S1", []Field{}, []Field{ Field{"r", MakeRefType(BoolType), false}, Field{"s", StringType, false}, }) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) b := Bool(true) - s1 := NewStruct(typ, typeDef, structData{"s": NewString("hi")}) + s1 := NewStruct(typ, structData{"s": NewString("hi")}) - assert.Len(s1.Chunks(), 1) - assert.Equal(pkgRef, s1.Chunks()[0].TargetRef()) + assert.Len(s1.Chunks(), 0) - s2 := NewStruct(typ, typeDef, structData{"r": NewRef(b.Ref())}) + s2 := NewStruct(typ, structData{"r": NewRef(b.Ref())}) - assert.Len(s2.Chunks(), 2) - assert.Equal(pkgRef, s2.Chunks()[0].TargetRef()) - assert.Equal(b.Ref(), s2.Chunks()[1].TargetRef()) + assert.Len(s2.Chunks(), 1) + assert.Equal(b.Ref(), s2.Chunks()[0].TargetRef()) } func TestGenericStructNew(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S2", []Field{ + typ := MakeStructType("S2", []Field{ Field{"b", BoolType, false}, Field{"o", StringType, true}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - s := NewStruct(typ, typeDef, map[string]Value{"b": Bool(true)}) + s := NewStruct(typ, map[string]Value{"b": Bool(true)}) assert.True(s.Get("b").Equals(Bool(true))) _, ok := s.MaybeGet("o") assert.False(ok) @@ -117,28 +96,25 @@ func TestGenericStructNew(t *testing.T) { _, ok = s.MaybeGet("x") assert.False(ok) - s2 := NewStruct(typ, typeDef, map[string]Value{"b": Bool(false), "o": NewString("hi")}) + s2 := NewStruct(typ, map[string]Value{"b": Bool(false), "o": NewString("hi")}) assert.True(s2.Get("b").Equals(Bool(false))) o, ok := s2.MaybeGet("o") assert.True(ok) assert.True(NewString("hi").Equals(o)) - assert.Panics(func() { NewStruct(typ, typeDef, nil) }) - assert.Panics(func() { NewStruct(typ, typeDef, map[string]Value{"o": NewString("hi")}) }) + assert.Panics(func() { NewStruct(typ, nil) }) + assert.Panics(func() { NewStruct(typ, map[string]Value{"o": NewString("hi")}) }) } func TestGenericStructNewUnion(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S3", []Field{}, []Field{ + typ := MakeStructType("S3", []Field{}, []Field{ Field{"b", BoolType, false}, Field{"o", StringType, false}, }) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - s := NewStruct(typ, typeDef, map[string]Value{"b": Bool(true)}) + s := NewStruct(typ, map[string]Value{"b": Bool(true)}) assert.True(s.Get("b").Equals(Bool(true))) _, ok := s.MaybeGet("o") assert.False(ok) @@ -147,15 +123,12 @@ func TestGenericStructNewUnion(t *testing.T) { func TestGenericStructSet(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S3", []Field{ + typ := MakeStructType("S3", []Field{ Field{"b", BoolType, false}, Field{"o", StringType, true}, }, []Field{}) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - s := NewStruct(typ, typeDef, map[string]Value{"b": Bool(true)}) + s := NewStruct(typ, map[string]Value{"b": Bool(true)}) s2 := s.Set("b", Bool(false)) assert.Panics(func() { s.Set("b", Number(1)) }) @@ -168,15 +141,12 @@ func TestGenericStructSet(t *testing.T) { func TestGenericStructSetUnion(t *testing.T) { assert := assert.New(t) - typeDef := MakeStructType("S3", []Field{}, []Field{ + typ := MakeStructType("S3", []Field{}, []Field{ Field{"b", BoolType, false}, Field{"s", StringType, false}, }) - pkg := NewPackage([]*Type{typeDef}, []ref.Ref{}) - pkgRef := RegisterPackage(&pkg) - typ := MakeType(pkgRef, 0) - s := NewStruct(typ, typeDef, map[string]Value{"b": Bool(true)}) + s := NewStruct(typ, map[string]Value{"b": Bool(true)}) assert.Equal(uint32(0), s.UnionIndex()) assert.True(Bool(true).Equals(s.UnionValue())) s2 := s.Set("s", NewString("hi")) diff --git a/types/type.go b/types/type.go index cbaa38fef8..9744d51c32 100644 --- a/types/type.go +++ b/types/type.go @@ -6,8 +6,6 @@ import ( ) // Type defines and describes Noms types, both custom and built-in. -// StructKind types, and possibly others if we do type aliases, will have a Name(). Named types are -// 'exported' in that they can be addressed from other type packages. // Desc provides more details of the type. It may contain only a types.NomsKind, in the case of // primitives, or it may contain additional information -- e.g. element Types for compound type // specializations, field descriptions for structs, etc. Either way, checking Kind() allows code @@ -15,40 +13,10 @@ import ( // If Kind() refers to a primitive, then Desc has no more info. // If Kind() refers to List, Map, Set or Ref, then Desc is a list of Types describing the element type(s). // If Kind() refers to Struct, then Desc contains a []Field and Choices. -// If Kind() refers to an UnresolvedKind, then Desc contains a PackageRef, which is the Ref of the -// package where the type definition is defined. The ordinal, if not -1, is the index into the -// Types list of the package. If the Name is set then the ordinal needs to be found. type Type struct { - name name Desc TypeDesc - - ref *ref.Ref -} - -type name struct { - namespace, name string -} - -func (n name) compose() (out string) { - d.Chk.True(n.namespace == "" || (n.namespace != "" && n.name != ""), "If a Type's namespace is set, so must name be.") - if n.namespace != "" { - out = n.namespace + "." - } - if n.name != "" { - out += n.name - } - return -} - -// IsUnresolved returns true if t doesn't contain description information. The caller should look the type up by Ordinal in the Types of the appropriate Package. -func (t *Type) IsUnresolved() bool { - _, ok := t.Desc.(UnresolvedDesc) - return ok -} - -func (t *Type) HasPackageRef() bool { - return t.IsUnresolved() && !t.PackageRef().IsEmpty() + ref *ref.Ref } // Describe generate text that should parse into the struct being described. @@ -69,27 +37,10 @@ func (t *Type) IsOrdered() bool { } } -func (t *Type) PackageRef() ref.Ref { - desc, ok := t.Desc.(UnresolvedDesc) - d.Chk.True(ok, "PackageRef only works on unresolved types") - return desc.pkgRef -} - -func (t *Type) Ordinal() int16 { - d.Chk.True(t.HasOrdinal(), "Ordinal has not been set") - return t.Desc.(UnresolvedDesc).ordinal -} - -func (t *Type) HasOrdinal() bool { - return t.IsUnresolved() && t.Desc.(UnresolvedDesc).ordinal >= 0 -} - func (t *Type) Name() string { - return t.name.name -} - -func (t *Type) Namespace() string { - return t.name.namespace + // TODO: Remove from Type + d.Chk.IsType(StructDesc{}, t.Desc, "Name only works on Struct types") + return t.Desc.(StructDesc).Name } func (t *Type) Ref() ref.Ref { @@ -101,44 +52,26 @@ func (t *Type) Equals(other Value) (res bool) { } func (t *Type) Chunks() (chunks []Ref) { - if t.IsUnresolved() { - if t.HasPackageRef() { - chunks = append(chunks, NewTypedRef(MakeRefType(typeForPackage), t.PackageRef())) - } - return - } - if desc, ok := t.Desc.(CompoundDesc); ok { - for _, t := range desc.ElemTypes { - chunks = append(chunks, t.Chunks()...) - } - } return } func (t *Type) ChildValues() (res []Value) { - if t.HasPackageRef() { - res = append(res, NewTypedRef(typeForRefOfPackage, t.PackageRef())) - } - if !t.IsUnresolved() { - switch desc := t.Desc.(type) { - case CompoundDesc: - for _, t := range desc.ElemTypes { - res = append(res, t) - } - case StructDesc: - for _, t := range desc.Fields { - res = append(res, t.T) - } - for _, t := range desc.Union { - res = append(res, t.T) - } - case UnresolvedDesc: - // Nothing, this is handled by the HasPackageRef() check above - case PrimitiveDesc: - // Nothing, these have no child values - default: - d.Chk.Fail("Unexpected type desc implementation: %#v", t) + switch desc := t.Desc.(type) { + case CompoundDesc: + for _, t := range desc.ElemTypes { + res = append(res, t) } + case StructDesc: + for _, t := range desc.Fields { + res = append(res, t.T) + } + for _, t := range desc.Union { + res = append(res, t.T) + } + case PrimitiveDesc: + // Nothing, these have no child values + default: + d.Chk.Fail("Unexpected type desc implementation: %#v", t) } return } @@ -163,15 +96,13 @@ func MakePrimitiveType(k NomsKind) *Type { return ValueType case TypeKind: return TypeType - case PackageKind: - return PackageType } d.Chk.Fail("invalid NomsKind: %d", k) return nil } func makePrimitiveType(k NomsKind) *Type { - return buildType("", PrimitiveDesc(k)) + return buildType(PrimitiveDesc(k)) } func MakePrimitiveTypeByString(p string) *Type { @@ -188,8 +119,6 @@ func MakePrimitiveTypeByString(p string) *Type { return ValueType case "Type": return TypeType - case "Package": - return PackageType } d.Chk.Fail("invalid type string: %s", p) return nil @@ -203,55 +132,41 @@ func makeCompoundType(kind NomsKind, elemTypes ...*Type) *Type { d.Chk.Equal(MapKind, kind) d.Chk.Len(elemTypes, 2, "MapKind requires 2 element types.") } - return buildType("", CompoundDesc{kind, elemTypes}) + return buildType(CompoundDesc{kind, elemTypes}) } func MakeStructType(name string, fields []Field, choices []Field) *Type { - return buildType(name, StructDesc{fields, choices}) -} - -func MakeType(pkgRef ref.Ref, ordinal int16) *Type { - d.Chk.True(ordinal >= 0) - return &Type{Desc: UnresolvedDesc{pkgRef, ordinal}, ref: &ref.Ref{}} -} - -func MakeUnresolvedType(namespace, n string) *Type { - return &Type{name: name{namespace, n}, Desc: UnresolvedDesc{ordinal: -1}, ref: &ref.Ref{}} + return buildType(StructDesc{name, fields, choices}) } func MakeListType(elemType *Type) *Type { - return buildType("", CompoundDesc{ListKind, []*Type{elemType}}) + return buildType(CompoundDesc{ListKind, []*Type{elemType}}) } func MakeSetType(elemType *Type) *Type { - return buildType("", CompoundDesc{SetKind, []*Type{elemType}}) + return buildType(CompoundDesc{SetKind, []*Type{elemType}}) } func MakeMapType(keyType, valType *Type) *Type { - return buildType("", CompoundDesc{MapKind, []*Type{keyType, valType}}) + return buildType(CompoundDesc{MapKind, []*Type{keyType, valType}}) } func MakeRefType(elemType *Type) *Type { - return buildType("", CompoundDesc{RefKind, []*Type{elemType}}) + return buildType(CompoundDesc{RefKind, []*Type{elemType}}) } -func buildType(n string, desc TypeDesc) *Type { - if IsPrimitiveKind(desc.Kind()) { - return &Type{name: name{name: n}, Desc: desc, ref: &ref.Ref{}} - } - switch desc.Kind() { - case ListKind, RefKind, SetKind, MapKind, StructKind, UnresolvedKind: - return &Type{name: name{name: n}, Desc: desc, ref: &ref.Ref{}} - default: - d.Exp.Fail("Unrecognized Kind:", "%v", desc.Kind()) - panic("unreachable") - } +func MakeBackRef(n uint8) *Type { + return buildType(BackRefDesc(n)) +} + +func buildType(desc TypeDesc) *Type { + return &Type{Desc: desc, ref: &ref.Ref{}} } var NumberType = makePrimitiveType(NumberKind) var BoolType = makePrimitiveType(BoolKind) var StringType = makePrimitiveType(StringKind) var BlobType = makePrimitiveType(BlobKind) -var PackageType = makePrimitiveType(PackageKind) var TypeType = makePrimitiveType(TypeKind) var ValueType = makePrimitiveType(ValueKind) +var BackRefType = makePrimitiveType(BackRefKind) diff --git a/types/type_desc.go b/types/type_desc.go index b9269da4fe..99b99b17b3 100644 --- a/types/type_desc.go +++ b/types/type_desc.go @@ -1,6 +1,6 @@ package types -import "github.com/attic-labs/noms/ref" +import "fmt" // TypeDesc describes a type of the kind returned by Kind(), e.g. Map, Number, or a custom type. type TypeDesc interface { @@ -32,28 +32,12 @@ var KindToString = map[NomsKind]string{ ListKind: "List", MapKind: "Map", NumberKind: "Number", - PackageKind: "Package", RefKind: "Ref", SetKind: "Set", StringKind: "String", TypeKind: "Type", ValueKind: "Value", -} - -type UnresolvedDesc struct { - pkgRef ref.Ref - ordinal int16 -} - -func (u UnresolvedDesc) Kind() NomsKind { - return UnresolvedKind -} - -func (u UnresolvedDesc) Equals(other TypeDesc) bool { - if other, ok := other.(UnresolvedDesc); ok { - return u.pkgRef == other.pkgRef && u.ordinal == other.ordinal - } - return false + BackRefKind: "BackRef", } // CompoundDesc describes a List, Map, Set or Ref type. @@ -82,6 +66,7 @@ func (c CompoundDesc) Equals(other TypeDesc) bool { // StructDesc describes a custom Noms Struct. // Structs can contain at most one anonymous union, so Union may be nil. type StructDesc struct { + Name string Fields []Field Union []Field } @@ -118,3 +103,18 @@ type Field struct { func (f Field) Equals(other Field) bool { return f.Name == other.Name && f.Optional == other.Optional && f.T.Equals(other.T) } + +// BackRefDesc is used to symbolize back references in recursive struct types +type BackRefDesc uint8 + +func (b BackRefDesc) Kind() NomsKind { + return BackRefKind +} + +func (b BackRefDesc) Equals(other TypeDesc) bool { + return b.Kind() == other.Kind() && other.(BackRefDesc) == b +} + +func (b BackRefDesc) Describe() string { + return fmt.Sprintf("%s(%d)", KindToString[b.Kind()], b) +} diff --git a/types/type_test.go b/types/type_test.go index 0a0e16ec09..6fac31d737 100644 --- a/types/type_test.go +++ b/types/type_test.go @@ -3,7 +3,6 @@ package types import ( "testing" - "github.com/attic-labs/noms/ref" "github.com/stretchr/testify/assert" ) @@ -11,48 +10,32 @@ func TestTypes(t *testing.T) { assert := assert.New(t) vs := NewTestValueStore() - boolType := BoolType - numberType := NumberType - stringType := StringType - mapType := MakeMapType(stringType, numberType) - setType := MakeSetType(stringType) + mapType := MakeMapType(StringType, NumberType) + setType := MakeSetType(StringType) mahType := MakeStructType("MahStruct", []Field{ - Field{"Field1", stringType, false}, - Field{"Field2", boolType, true}, + Field{"Field1", StringType, false}, + Field{"Field2", BoolType, true}, }, []Field{}) otherType := MakeStructType("MahOtherStruct", []Field{}, []Field{ Field{"StructField", mahType, false}, - Field{"StringField", stringType, false}, + Field{"StringField", StringType, false}, }) - pkgRef := vs.WriteValue(NewPackage([]*Type{}, ref.RefSlice{})).TargetRef() - trType := MakeType(pkgRef, 42) + recType := MakeStructType("RecursiveStruct", []Field{ + Field{Name: "self", T: nil}, + }, []Field{}) + recType.Desc.(StructDesc).Fields[0].T = recType mRef := vs.WriteValue(mapType).TargetRef() setRef := vs.WriteValue(setType).TargetRef() otherRef := vs.WriteValue(otherType).TargetRef() mahRef := vs.WriteValue(mahType).TargetRef() - trRef := vs.WriteValue(trType).TargetRef() + recRef := vs.WriteValue(recType).TargetRef() assert.True(otherType.Equals(vs.ReadValue(otherRef))) assert.True(mapType.Equals(vs.ReadValue(mRef))) assert.True(setType.Equals(vs.ReadValue(setRef))) assert.True(mahType.Equals(vs.ReadValue(mahRef))) - assert.True(trType.Equals(vs.ReadValue(trRef))) -} - -func TestTypeWithPkgRef(t *testing.T) { - assert := assert.New(t) - vs := NewTestValueStore() - - pkg := NewPackage([]*Type{NumberType}, []ref.Ref{}) - - pkgRef := RegisterPackage(&pkg) - unresolvedType := MakeType(pkgRef, 42) - unresolvedRef := vs.WriteValue(unresolvedType).TargetRef() - - v := vs.ReadValue(unresolvedRef) - assert.EqualValues(pkgRef, v.Chunks()[0].TargetRef()) - assert.NotNil(vs.ReadValue(pkgRef)) + assert.True(recType.Equals(vs.ReadValue(recRef))) } func TestTypeType(t *testing.T) { @@ -61,30 +44,27 @@ func TestTypeType(t *testing.T) { func TestTypeRefDescribe(t *testing.T) { assert := assert.New(t) - boolType := BoolType - numberType := NumberType - stringType := StringType - mapType := MakeMapType(stringType, numberType) - setType := MakeSetType(stringType) + mapType := MakeMapType(StringType, NumberType) + setType := MakeSetType(StringType) - assert.Equal("Bool", boolType.Describe()) - assert.Equal("Number", numberType.Describe()) - assert.Equal("String", stringType.Describe()) + assert.Equal("Bool", BoolType.Describe()) + assert.Equal("Number", NumberType.Describe()) + assert.Equal("String", StringType.Describe()) assert.Equal("Map", mapType.Describe()) assert.Equal("Set", setType.Describe()) mahType := MakeStructType("MahStruct", []Field{ - Field{"Field1", stringType, false}, - Field{"Field2", boolType, true}, + Field{"Field1", StringType, false}, + Field{"Field2", BoolType, true}, }, []Field{}) assert.Equal("struct MahStruct {\n Field1: String\n Field2: optional Bool\n}", mahType.Describe()) otherType := MakeStructType("MahOtherStruct", []Field{ - Field{"Field1", stringType, false}, - Field{"Field2", boolType, true}, + Field{"Field1", StringType, false}, + Field{"Field2", BoolType, true}, }, []Field{ - Field{"NumberField", numberType, false}, - Field{"StringField", stringType, false}, + Field{"NumberField", NumberType, false}, + Field{"StringField", StringType, false}, }) assert.Equal("struct MahOtherStruct {\n Field1: String\n Field2: optional Bool\n union {\n NumberField: Number\n StringField: String\n }\n}", otherType.Describe()) diff --git a/types/value_store.go b/types/value_store.go index da1eb51251..8cacd786ca 100644 --- a/types/value_store.go +++ b/types/value_store.go @@ -91,12 +91,6 @@ func (lvs *ValueStore) cacheChunks(v Value, r ref.Ref) { hash := reachable.TargetRef() if cur := lvs.check(hash); cur == nil || cur.Hint().IsEmpty() || cur.Hint() == hash { lvs.set(hash, hintedChunk{getTargetType(reachable), r}) - // Code-genned Packages are side-loaded when reading Values for performance reasons. This means that they won't pass through the ReadValue() codepath above, which means that they won't have their Chunks added to the cache. So, if reachable is a RefOfPackage, go look the package up in the PackageRegistry and recursively add its Chunks to the cache. - if (reachable.Type().Equals(typeForRefOfPackage)) { - if p := LookupPackage(hash); p != nil { - lvs.cacheChunks(p, hash) - } - } } } } diff --git a/types/value_store_test.go b/types/value_store_test.go index cf7400f521..4e5bcaacf1 100644 --- a/types/value_store_test.go +++ b/types/value_store_test.go @@ -49,10 +49,6 @@ func TestWriteValue(t *testing.T) { testEncode(string([]byte{'b', ' ', 0x00, 0x01, 0x02}), b) testEncode(fmt.Sprintf("t [%d,\"hi\"]", StringKind), NewString("hi")) - - testEncode(fmt.Sprintf("t [%d,[],[]]", PackageKind), Package{types: []*Type{}, dependencies: []ref.Ref{}, ref: &ref.Ref{}}) - ref1 := testEncode(fmt.Sprintf("t [%d,[%d],[]]", PackageKind, BoolKind), Package{types: []*Type{BoolType}, dependencies: []ref.Ref{}, ref: &ref.Ref{}}) - testEncode(fmt.Sprintf("t [%d,[],[\"%s\"]]", PackageKind, ref1), Package{types: []*Type{}, dependencies: []ref.Ref{ref1}, ref: &ref.Ref{}}) } func TestWriteBlobLeaf(t *testing.T) { @@ -76,40 +72,6 @@ func TestWriteBlobLeaf(t *testing.T) { assert.Equal("sha1-135fe1453330547994b2ce8a1b238adfbd7df87e", r2.String()) } -func TestWritePackageWhenValueIsWritten(t *testing.T) { - assert := assert.New(t) - vs := NewTestValueStore() - - typeDef := MakeStructType("S", []Field{ - Field{"X", NumberType, false}, - }, []Field{}) - pkg1 := NewPackage([]*Type{typeDef}, []ref.Ref{}) - // Don't write package - pkgRef1 := RegisterPackage(&pkg1) - typ := MakeType(pkgRef1, 0) - - s := NewStruct(typ, typeDef, structData{"X": Number(42)}) - vs.WriteValue(s) - - pkg2 := vs.ReadValue(pkgRef1) - assert.True(pkg1.Equals(pkg2)) -} - -func TestWritePackageDepWhenPackageIsWritten(t *testing.T) { - assert := assert.New(t) - vs := NewTestValueStore() - - pkg1 := NewPackage([]*Type{}, []ref.Ref{}) - // Don't write package - pkgRef1 := RegisterPackage(&pkg1) - - pkg2 := NewPackage([]*Type{}, []ref.Ref{pkgRef1}) - vs.WriteValue(pkg2) - - pkg3 := vs.ReadValue(pkgRef1) - assert.True(pkg1.Equals(pkg3)) -} - func TestCheckChunksInCache(t *testing.T) { assert := assert.New(t) cs := chunks.NewTestStore() diff --git a/types/write_value.go b/types/write_value.go index ecbec6cb6e..06e5342672 100644 --- a/types/write_value.go +++ b/types/write_value.go @@ -23,21 +23,6 @@ func toEncodeable(v Value, vw ValueWriter) interface{} { switch v := v.(type) { case blobLeaf: return v.Reader() - case Package: - processPackageChildren(v, vw) } return encNomsValue(v, vw) } - -func processPackageChildren(p Package, vw ValueWriter) { - if vw == nil { - return - } - - for _, r := range p.dependencies { - p := LookupPackage(r) - if p != nil && vw != nil { - vw.WriteValue(*p) - } - } -}