Merge pull request #1199 from rafael-atticlabs/writeValueReturnsRefBase

(Go) WriteValue returns RefBase, (JS) writeValue returns RefValue
This commit is contained in:
Rafael Weinstein
2016-04-11 17:16:06 -07:00
63 changed files with 207 additions and 226 deletions

View File

@@ -513,5 +513,5 @@ func (r RefOfIncident) TargetValue(vr types.ValueReader) Incident {
}
func (r RefOfIncident) SetTargetValue(val Incident, vw types.ValueWriter) RefOfIncident {
return NewRefOfIncident(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfIncident)
}

View File

@@ -582,7 +582,7 @@ func (r RefOfValue) TargetValue(vr types.ValueReader) types.Value {
}
func (r RefOfValue) SetTargetValue(val types.Value, vw types.ValueWriter) RefOfValue {
return NewRefOfValue(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfValue)
}
// ListOfNode
@@ -1193,5 +1193,5 @@ func (r RefOfSQuadTree) TargetValue(vr types.ValueReader) SQuadTree {
}
func (r RefOfSQuadTree) SetTargetValue(val SQuadTree, vw types.ValueWriter) RefOfSQuadTree {
return NewRefOfSQuadTree(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfSQuadTree)
}

View File

@@ -115,11 +115,11 @@ func importCompanies(ds dataset.Dataset, fileName string) ref.Ref {
rounds := roundsByPermalink[permalink]
roundRefs := SetOfRefOfRoundDef{}
for _, r := range rounds {
ref := ds.Store().WriteValue(r)
ref := ds.Store().WriteValue(r).TargetRef()
roundRefs[ref] = true
}
company = company.SetRounds(roundRefs.New())
ref := ds.Store().WriteValue(company)
ref := ds.Store().WriteValue(company).TargetRef()
companyRefsDef[company.Permalink()] = ref
}
}
@@ -130,7 +130,7 @@ func importCompanies(ds dataset.Dataset, fileName string) ref.Ref {
// fmt.Printf("\rImported %d companies with %d rounds\n", companyRefs.Len(), numRounds)
// Write the list of companyRefs
return ds.Store().WriteValue(companyRefs)
return ds.Store().WriteValue(companyRefs).TargetRef()
}
func getExistingCompaniesRef(ds dataset.Dataset, h hash.Hash) ref.Ref {

View File

@@ -213,7 +213,7 @@ func (r RefOfMapOfStringToRefOfCompany) TargetValue(vr types.ValueReader) MapOfS
}
func (r RefOfMapOfStringToRefOfCompany) SetTargetValue(val MapOfStringToRefOfCompany, vw types.ValueWriter) RefOfMapOfStringToRefOfCompany {
return NewRefOfMapOfStringToRefOfCompany(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfMapOfStringToRefOfCompany)
}
// MapOfStringToRefOfCompany
@@ -407,5 +407,5 @@ func (r RefOfCompany) TargetValue(vr types.ValueReader) Company {
}
func (r RefOfCompany) SetTargetValue(val Company, vw types.ValueWriter) RefOfCompany {
return NewRefOfCompany(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfCompany)
}

View File

@@ -958,5 +958,5 @@ func (r RefOfRound) TargetValue(vr types.ValueReader) Round {
}
func (r RefOfRound) SetTargetValue(val Round, vw types.ValueWriter) RefOfRound {
return NewRefOfRound(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfRound)
}

View File

@@ -120,7 +120,7 @@ func main() {
for e := range c {
key := e.key
ref := e.ref
keyRef := ds.WriteValue(key)
keyRef := ds.WriteValue(key).TargetRef()
setDef := mapOfSets[keyRef]
if setDef == nil {
setDef = SetOfRefOfRoundDef{}

View File

@@ -662,5 +662,5 @@ func (r RefOfKey) TargetValue(vr types.ValueReader) Key {
}
func (r RefOfKey) SetTargetValue(val Key, vw types.ValueWriter) RefOfKey {
return NewRefOfKey(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfKey)
}

View File

@@ -213,7 +213,7 @@ func (r RefOfMapOfStringToRefOfCompany) TargetValue(vr types.ValueReader) MapOfS
}
func (r RefOfMapOfStringToRefOfCompany) SetTargetValue(val MapOfStringToRefOfCompany, vw types.ValueWriter) RefOfMapOfStringToRefOfCompany {
return NewRefOfMapOfStringToRefOfCompany(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfMapOfStringToRefOfCompany)
}
// MapOfStringToRefOfCompany
@@ -407,5 +407,5 @@ func (r RefOfCompany) TargetValue(vr types.ValueReader) Company {
}
func (r RefOfCompany) SetTargetValue(val Company, vw types.ValueWriter) RefOfCompany {
return NewRefOfCompany(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfCompany)
}

View File

@@ -958,5 +958,5 @@ func (r RefOfRound) TargetValue(vr types.ValueReader) Round {
}
func (r RefOfRound) SetTargetValue(val Round, vw types.ValueWriter) RefOfRound {
return NewRefOfRound(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfRound)
}

View File

@@ -60,8 +60,8 @@ func main() {
printStats(user)
userRef := ds.Store().WriteValue(user)
fmt.Printf("userRef: %s\n", userRef)
_, err := ds.Commit(NewRefOfUser(userRef))
fmt.Printf("userRef: %s\n", userRef.TargetRef())
_, err := ds.Commit(userRef)
d.Exp.NoError(err)
}
@@ -119,7 +119,7 @@ func getPhotos() SetOfRefOfRemotePhoto {
float32(entry.Images[0].Width),
float32(entry.Images[0].Height)))
photos = photos.Insert(NewRefOfRemotePhoto(ds.Store().WriteValue(photo)))
photos = photos.Insert(ds.Store().WriteValue(photo).(RefOfRemotePhoto))
numFetched++
// Be defensive and use Min(1.0) here - the user might have more than 1000 albums, or they

View File

@@ -211,7 +211,7 @@ func (r RefOfUser) TargetValue(vr types.ValueReader) User {
}
func (r RefOfUser) SetTargetValue(val User, vw types.ValueWriter) RefOfUser {
return NewRefOfUser(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfUser)
}
// SetOfRefOfRemotePhoto
@@ -417,5 +417,5 @@ func (r RefOfRemotePhoto) TargetValue(vr types.ValueReader) RemotePhoto {
}
func (r RefOfRemotePhoto) SetTargetValue(val RemotePhoto, vw types.ValueWriter) RefOfRemotePhoto {
return NewRefOfRemotePhoto(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfRemotePhoto)
}

View File

@@ -212,7 +212,7 @@ func getAlbum(api flickrAPI, id string, gotPhoto chan struct{}) idAndRefOfAlbum
SetTitle(response.Photoset.Title.Content).
SetPhotos(photos)
// TODO: Write albums in batches.
ref := NewRefOfAlbum(ds.Store().WriteValue(album))
ref := ds.Store().WriteValue(album).(RefOfAlbum)
return idAndRefOfAlbum{id, ref}
}
@@ -350,7 +350,7 @@ func getAlbumPhotos(api flickrAPI, id string, gotPhoto chan struct{}) SetOfRefOf
}
// TODO: Write photos in batches.
photos = photos.Insert(NewRefOfRemotePhoto(store.WriteValue(photo)))
photos = photos.Insert(store.WriteValue(photo).(RefOfRemotePhoto))
gotPhoto <- struct{}{}
}
@@ -422,7 +422,7 @@ func awaitOAuthResponse(l net.Listener, tempCred *oauth.Credentials) (tokenCred
func commitUser() {
var err error
r := NewRefOfUser(ds.Store().WriteValue(user))
r := ds.Store().WriteValue(user).(RefOfUser)
*ds, err = ds.Commit(r)
d.Exp.NoError(err)
}

View File

@@ -344,7 +344,7 @@ func (r RefOfUser) TargetValue(vr types.ValueReader) User {
}
func (r RefOfUser) SetTargetValue(val User, vw types.ValueWriter) RefOfUser {
return NewRefOfUser(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfUser)
}
// MapOfStringToRefOfAlbum
@@ -683,7 +683,7 @@ func (r RefOfAlbum) TargetValue(vr types.ValueReader) Album {
}
func (r RefOfAlbum) SetTargetValue(val Album, vw types.ValueWriter) RefOfAlbum {
return NewRefOfAlbum(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfAlbum)
}
// RefOfRemotePhoto
@@ -744,5 +744,5 @@ func (r RefOfRemotePhoto) TargetValue(vr types.ValueReader) RemotePhoto {
}
func (r RefOfRemotePhoto) SetTargetValue(val RemotePhoto, vw types.ValueWriter) RefOfRemotePhoto {
return NewRefOfRemotePhoto(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfRemotePhoto)
}

View File

@@ -66,7 +66,7 @@ func main() {
userRef := ds.Store().WriteValue(user)
fmt.Printf("userRef: %s\n", userRef)
_, err := ds.Commit(NewRefOfUser(userRef))
_, err := ds.Commit(userRef)
d.Exp.NoError(err)
}
@@ -128,7 +128,7 @@ func getUser() User {
for {
album := <-ch
// TODO: batch write albums.
r := ds.Store().WriteValue(album)
r := ds.Store().WriteValue(album).TargetRef()
albums = append(albums, r)
wg.Done()
}
@@ -212,7 +212,7 @@ func getRemotePhotos(albumId string, numPhotos int, shapes shapeMap, progress ch
mu.Lock()
// TODO: batch write photos.
remotePhotos[ds.Store().WriteValue(p)] = true
remotePhotos[ds.Store().WriteValue(p).TargetRef()] = true
mu.Unlock()
progress <- struct{}{}
}

View File

@@ -344,7 +344,7 @@ func (r RefOfUser) TargetValue(vr types.ValueReader) User {
}
func (r RefOfUser) SetTargetValue(val User, vw types.ValueWriter) RefOfUser {
return NewRefOfUser(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfUser)
}
// ListOfRefOfAlbum
@@ -692,7 +692,7 @@ func (r RefOfAlbum) TargetValue(vr types.ValueReader) Album {
}
func (r RefOfAlbum) SetTargetValue(val Album, vw types.ValueWriter) RefOfAlbum {
return NewRefOfAlbum(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfAlbum)
}
// RefOfRemotePhoto
@@ -753,5 +753,5 @@ func (r RefOfRemotePhoto) TargetValue(vr types.ValueReader) RemotePhoto {
}
func (r RefOfRemotePhoto) SetTargetValue(val RemotePhoto, vw types.ValueWriter) RefOfRemotePhoto {
return NewRefOfRemotePhoto(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfRemotePhoto)
}

View File

@@ -137,7 +137,7 @@ func getIndex(input ListOfRefOfMapOfStringToValue, vrw types.ValueReadWriter) Ma
namedPitchCounts := MapOfStringToRefOfListOfPitchDef{}
for id, p := range pitchCounts {
if name, ok := pitchers[id]; d.Chk.True(ok, "Unknown pitcher: %s", id) {
namedPitchCounts[name] = vrw.WriteValue(p.New())
namedPitchCounts[name] = vrw.WriteValue(p.New()).TargetRef()
}
}
return namedPitchCounts.New()

View File

@@ -463,7 +463,7 @@ func (r RefOfMapOfStringToValue) TargetValue(vr types.ValueReader) MapOfStringTo
}
func (r RefOfMapOfStringToValue) SetTargetValue(val MapOfStringToValue, vw types.ValueWriter) RefOfMapOfStringToValue {
return NewRefOfMapOfStringToValue(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfMapOfStringToValue)
}
// MapOfStringToValue
@@ -799,5 +799,5 @@ func (r RefOfListOfPitch) TargetValue(vr types.ValueReader) ListOfPitch {
}
func (r RefOfListOfPitch) SetTargetValue(val ListOfPitch, vw types.ValueWriter) RefOfListOfPitch {
return NewRefOfListOfPitch(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfListOfPitch)
}

View File

@@ -38,7 +38,7 @@ type incidentWithIndex struct {
}
type refIndex struct {
ref types.Ref
ref types.RefBase
index int
}
@@ -153,8 +153,7 @@ func main() {
fmt.Printf("Converting refs list to noms list: %.2f secs\n", time.Now().Sub(start).Seconds())
}
ref := ds.Store().WriteValue(incidentRefs)
_, err = ds.Commit(types.NewRef(ref))
_, err = ds.Commit(ds.Store().WriteValue(incidentRefs))
d.Exp.NoError(err)
if !*quietFlag {
@@ -174,8 +173,7 @@ func getNomsWriter(vw types.ValueWriter) (iChan chan incidentWithIndex, rChan ch
go func() {
for incidentRecord := range iChan {
v := incidentRecord.incident.New()
r := vw.WriteValue(v)
rChan <- refIndex{types.NewRef(r), incidentRecord.index}
rChan <- refIndex{vw.WriteValue(v), incidentRecord.index}
}
wg.Done()
}()

View File

@@ -207,7 +207,7 @@ func (r RefOfMapOfStringToValue) TargetValue(vr types.ValueReader) MapOfStringTo
}
func (r RefOfMapOfStringToValue) SetTargetValue(val MapOfStringToValue, vw types.ValueWriter) RefOfMapOfStringToValue {
return NewRefOfMapOfStringToValue(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfMapOfStringToValue)
}
// MapOfStringToValue

View File

@@ -98,7 +98,7 @@ func main() {
r := ref.Ref{}
if !*noIO {
r = ds.Store().WriteValue(nomsObj)
r = ds.Store().WriteValue(nomsObj).TargetRef()
}
refsChan <- refIndex{r, f.index}

View File

@@ -84,13 +84,14 @@ func (ds *dataStoreCommon) ReadValue(r ref.Ref) types.Value {
}
// WriteValue takes a Value, schedules it to be written it to ds, and returns v.Ref(). v is not guaranteed to be actually written until after a successful Commit().
func (ds *dataStoreCommon) WriteValue(v types.Value) (r ref.Ref) {
func (ds *dataStoreCommon) WriteValue(v types.Value) (r types.RefBase) {
if v == nil {
return
}
r = v.Ref()
if entry := ds.checkCache(r); entry != nil && entry.Present() {
targetRef := v.Ref()
r = types.PrivateRefFromType(targetRef, types.MakeRefType(v.Type()))
if entry := ds.checkCache(targetRef); entry != nil && entry.Present() {
return
}
@@ -110,7 +111,7 @@ func (ds *dataStoreCommon) WriteValue(v types.Value) (r ref.Ref) {
d.Chk.True(entry.Type().Equals(targetType), "Value to write contains ref %s, which points to a value of a different type: %+v != %+v", reachable.TargetRef(), entry.Type(), targetType)
}
ds.cs.Put(chunk) // TODO: DataStore should manage batching and backgrounding Puts.
ds.setCache(r, presentChunk(v.Type()))
ds.setCache(targetRef, presentChunk(v.Type()))
return
}
@@ -159,7 +160,7 @@ func (ds *dataStoreCommon) doCommit(datasetID string, commit Commit) error {
currentRootRef, currentDatasets := ds.getRootAndDatasets()
// TODO: This Commit will be orphaned if the tryUpdateRoot() below fails
commitRef := NewRefOfCommit(ds.WriteValue(commit))
commitRef := ds.WriteValue(commit).(RefOfCommit)
// First commit in store is always fast-foward.
if !currentRootRef.IsEmpty() {
@@ -201,7 +202,7 @@ func (ds *dataStoreCommon) getRootAndDatasets() (currentRootRef ref.Ref, current
func (ds *dataStoreCommon) tryUpdateRoot(currentDatasets MapOfStringToRefOfCommit, currentRootRef ref.Ref) (err error) {
// TODO: This Commit will be orphaned if the UpdateRoot below fails
newRootRef := ds.WriteValue(currentDatasets)
newRootRef := ds.WriteValue(currentDatasets).TargetRef()
// If the root has been updated by another process in the short window since we read it, this call will fail. See issue #404
if !ds.cs.UpdateRoot(newRootRef, currentRootRef) {
err = ErrOptimisticLockFailed

View File

@@ -17,7 +17,7 @@ func TestReadWriteCache(t *testing.T) {
var v types.Value = types.Bool(true)
assert.NotEqual(ref.Ref{}, ds.WriteValue(v))
assert.Equal(1, cs.Writes)
r := ds.WriteValue(v)
r := ds.WriteValue(v).TargetRef()
assert.Equal(1, cs.Writes)
v = ds.ReadValue(r)

View File

@@ -468,5 +468,5 @@ func (r RefOfCommit) TargetValue(vr types.ValueReader) Commit {
}
func (r RefOfCommit) SetTargetValue(val Commit, vw types.ValueWriter) RefOfCommit {
return NewRefOfCommit(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfCommit)
}

View File

@@ -15,27 +15,24 @@ func createTestDataset(name string) Dataset {
func TestValidateRef(t *testing.T) {
ds := createTestDataset("test")
r := ds.Store().WriteValue(types.Bool(true))
r := ds.Store().WriteValue(types.Bool(true)).TargetRef()
assert.Panics(t, func() { ds.validateRefAsCommit(r) })
}
func NewList(ds Dataset, vs ...types.Value) types.Ref {
func NewList(ds Dataset, vs ...types.Value) types.RefBase {
v := types.NewList(vs...)
r := ds.Store().WriteValue(v)
return types.NewRef(r)
return ds.Store().WriteValue(v)
}
func NewMap(ds Dataset, vs ...types.Value) types.Ref {
func NewMap(ds Dataset, vs ...types.Value) types.RefBase {
v := types.NewMap(vs...)
r := ds.Store().WriteValue(v)
return types.NewRef(r)
return ds.Store().WriteValue(v)
}
func NewSet(ds Dataset, vs ...types.Value) types.Ref {
func NewSet(ds Dataset, vs ...types.Value) types.RefBase {
v := types.NewSet(vs...)
r := ds.Store().WriteValue(v)
return types.NewRef(r)
return ds.Store().WriteValue(v)
}
func pullTest(t *testing.T, topdown bool) {

View File

@@ -1,6 +1,6 @@
{
"name": "@attic/noms",
"version": "8.1.0",
"version": "9.0.0",
"main": "dist/commonjs/noms.js",
"jsnext:main": "dist/es6/noms.js",
"dependencies": {

View File

@@ -52,7 +52,7 @@ suite('Blob', () => {
const ds = new DataStore(ms);
const b1 = await newBlob(randomArray(15));
const r1 = await ds.writeValue(b1);
const r1 = await ds.writeValue(b1).targetRef;
const b2 = await ds.readValue(r1);
assert.isTrue(b1.equals(b2));
});

View File

@@ -3,7 +3,6 @@
import {suite, test} from 'mocha';
import MemoryStore from './memory-store.js';
import Ref from './ref.js';
import RefValue from './ref-value.js';
import {assert} from 'chai';
import {default as DataStore, getDatasTypes, newCommit} from './data-store.js';
import {invariant, notNull} from './assert.js';
@@ -144,9 +143,9 @@ suite('DataStore', () => {
const commit = await newCommit('foo', []);
const commitRef = new RefValue(ds.writeValue(commit), types.refOfCommitType);
const commitRef = ds.writeValue(commit);
const datasets = await newMap(['foo', commitRef], types.commitMapType);
const rootRef = ds.writeValue(datasets);
const rootRef = ds.writeValue(datasets).targetRef;
assert.isTrue(await ms.updateRoot(rootRef, new Ref()));
ds = new DataStore(ms); // refresh the datasets
@@ -161,14 +160,14 @@ suite('DataStore', () => {
test('writeValue optional type', async () => {
const ds = new DataStore(new MemoryStore());
const r1 = ds.writeValue('hello');
const r2 = ds.writeValue(false);
const r1 = ds.writeValue('hello').targetRef;
const r2 = ds.writeValue(false).targetRef;
assert.throws(() => {
ds.writeValue(1);
});
const r3 = ds.writeValue(2, uint8Type);
const r3 = ds.writeValue(2, uint8Type).targetRef;
const v1 = await ds.readValue(r1);
assert.equal('hello', v1);
@@ -179,7 +178,7 @@ suite('DataStore', () => {
const mt = makeCompoundType(Kind.Map, uint8Type, stringType);
const m = await newMap([3, 'b', 4, 'c'], mt);
const r4 = ds.writeValue(m);
const r4 = ds.writeValue(m).targetRef;
const v4 = await ds.readValue(r4);
assert.isTrue(m.equals(v4));
});
@@ -188,11 +187,11 @@ suite('DataStore', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms, 1e6);
const r1 = ds.writeValue('hello');
const r1 = ds.writeValue('hello').targetRef;
(ms: any).get = (ms: any).put = () => { assert.fail('unreachable'); };
const v1 = await ds.readValue(r1);
assert.equal(v1, 'hello');
const r2 = ds.writeValue('hello');
const r2 = ds.writeValue('hello').targetRef;
assert.isTrue(r1.equals(r2));
});
@@ -200,8 +199,8 @@ suite('DataStore', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms, 15);
const r1 = ds.writeValue('hello');
const r2 = ds.writeValue('world');
const r1 = ds.writeValue('hello').targetRef;
const r2 = ds.writeValue('world').targetRef;
(ms: any).get = () => { throw new Error(); };
const v2 = await ds.readValue(r2);
assert.equal(v2, 'world');

View File

@@ -11,6 +11,7 @@ import type {valueOrPrimitive} from './value.js';
import {
Field,
makeCompoundType,
makeRefType,
makeStructType,
makeType,
Type,
@@ -137,7 +138,7 @@ export default class DataStore {
return p;
}
writeValue(v: any, t: ?Type = undefined): Ref {
writeValue<T: valueOrPrimitive>(v: T, t: ?Type = undefined): RefValue<T> {
if (!t) {
switch (typeof v) {
case 'string':
@@ -157,13 +158,14 @@ export default class DataStore {
const chunk = encodeNomsValue(v, t, this);
invariant(!chunk.isEmpty());
const {ref} = chunk;
const refValue = new RefValue(ref, makeRefType(t));
const entry = this._valueCache.entry(ref);
if (entry && entry.present) {
return ref;
return refValue;
}
this._cs.put(chunk);
this._valueCache.add(ref, chunk.data.length, Promise.resolve(v));
return ref;
return refValue;
}
async commit(datasetId: string, commit: Commit): Promise<DataStore> {
@@ -171,8 +173,7 @@ export default class DataStore {
const datasetsP = this._datasetsFromRootRef(currentRootRefP);
let currentDatasets = await (datasetsP:Promise<NomsMap>);
const currentRootRef = await currentRootRefP;
const types = getDatasTypes();
const commitRef = new RefValue(this.writeValue(commit), types.refOfCommitType);
const commitRef = this.writeValue(commit);
if (!currentRootRef.isEmpty()) {
const currentHeadRef = await currentDatasets.get(datasetId);
@@ -187,7 +188,7 @@ export default class DataStore {
}
currentDatasets = await currentDatasets.set(datasetId, commitRef);
const newRootRef = this.writeValue(currentDatasets);
const newRootRef = this.writeValue(currentDatasets).targetRef;
if (await this._cs.updateRoot(newRootRef, currentRootRef)) {
return new DataStore(this._cs);
}

View File

@@ -165,11 +165,10 @@ suite('Decode', () => {
test('read compound list', async () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const ltr = makeCompoundType(Kind.List, int32Type);
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1])));
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3])));
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5])));
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1]))).targetRef;
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3]))).targetRef;
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5]))).targetRef;
const tuples = [
new MetaTuple(r1, 2),
new MetaTuple(r2, 4),
@@ -563,8 +562,8 @@ suite('Decode', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const r1 = ds.writeValue(await newBlob(stringToUint8Array('hi')));
const r2 = ds.writeValue(await newBlob(stringToUint8Array('world')));
const r1 = ds.writeValue(await newBlob(stringToUint8Array('hi'))).targetRef;
const r2 = ds.writeValue(await newBlob(stringToUint8Array('world'))).targetRef;
const a = [Kind.Blob, true, [r1.ref.toString(), '2', r2.ref.toString(), '5']];
const r = new JsonArrayReader(a, ds);

View File

@@ -354,11 +354,10 @@ suite('Encode', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const w = new JsonArrayWriter(ds);
const ltr = makeCompoundType(Kind.List, int32Type);
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1])));
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3])));
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5])));
const r1 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [0, 1]))).targetRef;
const r2 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [2, 3]))).targetRef;
const r3 = ds.writeValue(new NomsList(ltr, new ListLeafSequence(ds, ltr, [4, 5]))).targetRef;
const tuples = [
new MetaTuple(r1, 2),
new MetaTuple(r2, 4),

View File

@@ -165,7 +165,7 @@ suite('BuildList', () => {
const nums = firstNNumbers(testListSize);
const tr = makeCompoundType(Kind.List, int64Type);
const s = await newList(nums, tr);
const r = ds.writeValue(s);
const r = ds.writeValue(s).targetRef;
const s2 = await ds.readValue(r);
const outNums = await s2.toJS();
assert.deepEqual(nums, outNums);
@@ -249,11 +249,9 @@ suite('ListLeafSequence', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, elemType);
const st = stringType;
const refOfSt = makeCompoundType(Kind.Ref, st);
const r1 = new RefValue(ds.writeValue('x'), refOfSt);
const r2 = new RefValue(ds.writeValue('a'), refOfSt);
const r3 = new RefValue(ds.writeValue('b'), refOfSt);
const r1 = ds.writeValue('x');
const r2 = ds.writeValue('a');
const r3 = ds.writeValue('b');
const l = new NomsList(tr, new ListLeafSequence(ds, tr, ['z', r1, r2, r3]));
assert.strictEqual(3, l.chunks.length);
assert.isTrue(r1.equals(l.chunks[0]));
@@ -276,20 +274,20 @@ suite('CompoundList', () => {
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.List, stringType);
const l1 = new NomsList(tr, new ListLeafSequence(ds, tr, ['a', 'b']));
const r1 = ds.writeValue(l1);
const r1 = ds.writeValue(l1).targetRef;
const l2 = new NomsList(tr, new ListLeafSequence(ds, tr, ['e', 'f']));
const r2 = ds.writeValue(l2);
const r2 = ds.writeValue(l2).targetRef;
const l3 = new NomsList(tr, new ListLeafSequence(ds, tr, ['h', 'i']));
const r3 = ds.writeValue(l3);
const r3 = ds.writeValue(l3).targetRef;
const l4 = new NomsList(tr, new ListLeafSequence(ds, tr, ['m', 'n']));
const r4 = ds.writeValue(l4);
const r4 = ds.writeValue(l4).targetRef;
const m1 = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(r1, 2),
new MetaTuple(r2, 2)]));
const rm1 = ds.writeValue(m1);
const rm1 = ds.writeValue(m1).targetRef;
const m2 = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(r3, 2),
new MetaTuple(r4, 2)]));
const rm2 = ds.writeValue(m2);
const rm2 = ds.writeValue(m2).targetRef;
const l = new NomsList(tr, new IndexedMetaSequence(ds, tr, [new MetaTuple(rm1, 4),
new MetaTuple(rm2, 4)]));

View File

@@ -140,7 +140,7 @@ suite('BuildMap', () => {
int64Type);
const m = await newMap(kvs, tr);
const r = ds.writeValue(m);
const r = ds.writeValue(m).targetRef;
const m2 = await ds.readValue(r);
const outKvs = [];
await m2.forEach((v, k) => outKvs.push(k, v));
@@ -256,12 +256,10 @@ suite('MapLeaf', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Map, keyType, valueType);
const st = stringType;
const refOfSt = makeCompoundType(Kind.Ref, st);
const r1 = new RefValue(ds.writeValue('x'), refOfSt);
const r2 = new RefValue(ds.writeValue(true), refOfSt);
const r3 = new RefValue(ds.writeValue('b'), refOfSt);
const r4 = new RefValue(ds.writeValue(false), refOfSt);
const r1 = ds.writeValue('x');
const r2 = ds.writeValue(true);
const r3 = ds.writeValue('b');
const r4 = ds.writeValue(false);
const m = new NomsMap(tr,
new MapLeafSequence(ds, tr, [{key: r1, value: r2}, {key: r3, value: r4}]));
assert.strictEqual(4, m.chunks.length);
@@ -286,23 +284,23 @@ suite('CompoundMap', () => {
boolType);
const l1 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'a', value: false},
{key:'b', value:false}]));
const r1 = ds.writeValue(l1);
const r1 = ds.writeValue(l1).targetRef;
const l2 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'e', value: true},
{key:'f', value:true}]));
const r2 = ds.writeValue(l2);
const r2 = ds.writeValue(l2).targetRef;
const l3 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'h', value: false},
{key:'i', value:true}]));
const r3 = ds.writeValue(l3);
const r3 = ds.writeValue(l3).targetRef;
const l4 = new NomsMap(tr, new MapLeafSequence(ds, tr, [{key: 'm', value: true},
{key:'n', value:false}]));
const r4 = ds.writeValue(l4);
const r4 = ds.writeValue(l4).targetRef;
const m1 = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(r1, 'b'),
new MetaTuple(r2, 'f')]));
const rm1 = ds.writeValue(m1);
const rm1 = ds.writeValue(m1).targetRef;
const m2 = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(r3, 'i'),
new MetaTuple(r4, 'n')]));
const rm2 = ds.writeValue(m2);
const rm2 = ds.writeValue(m2).targetRef;
const c = new NomsMap(tr, new OrderedMetaSequence(ds, tr, [new MetaTuple(rm1, 'f'),
new MetaTuple(rm2, 'n')]));

View File

@@ -3,12 +3,12 @@
import type DataStore from './data-store.js';
import type Ref from './ref.js';
import type {Type} from './type.js';
import type {Value} from './value.js'; // eslint-disable-line no-unused-vars
import type {Value, valueOrPrimitive} from './value.js'; // eslint-disable-line no-unused-vars
import {invariant} from './assert.js';
import {refOfValueType} from './type.js';
import {ValueBase} from './value.js';
export default class RefValue<T: Value> extends ValueBase {
export default class RefValue<T: valueOrPrimitive> extends ValueBase {
_type: Type;
targetRef: Ref;

View File

@@ -110,7 +110,7 @@ suite('BuildSet', () => {
const nums = firstNNumbers(testSetSize);
const tr = makeCompoundType(Kind.Set, int64Type);
const s = await newSet(nums, tr);
const r = ds.writeValue(s);
const r = ds.writeValue(s).targetRef;
const s2 = await ds.readValue(r);
const outNums = [];
await s2.forEach(k => outNums.push(k));
@@ -200,11 +200,9 @@ suite('SetLeaf', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const tr = makeCompoundType(Kind.Set, elemType);
const st = stringType;
const refOfSt = makeCompoundType(Kind.Ref, st);
const r1 = new RefValue(ds.writeValue('x'), refOfSt);
const r2 = new RefValue(ds.writeValue('a'), refOfSt);
const r3 = new RefValue(ds.writeValue('b'), refOfSt);
const r1 = ds.writeValue('x');
const r2 = ds.writeValue('a');
const r3 = ds.writeValue('b');
const l = new NomsSet(tr, new SetLeafSequence(ds, tr, ['z', r1, r2, r3]));
assert.strictEqual(3, l.chunks.length);
assert.isTrue(r1.equals(l.chunks[0]));
@@ -229,7 +227,7 @@ suite('CompoundSet', () => {
let tuples = [];
for (let i = 0; i < values.length; i += 2) {
const l = new NomsSet(tr, new SetLeafSequence(ds, tr, [values[i], values[i + 1]]));
const r = ds.writeValue(l);
const r = ds.writeValue(l).targetRef;
tuples.push(new MetaTuple(r, values[i + 1]));
}
@@ -238,7 +236,7 @@ suite('CompoundSet', () => {
const next = [];
for (let i = 0; i < tuples.length; i += 2) {
last = new NomsSet(tr, new OrderedMetaSequence(ds, tr, [tuples[i], tuples[i + 1]]));
const r = ds.writeValue(last);
const r = ds.writeValue(last).targetRef;
next.push(new MetaTuple(r, tuples[i + 1].value));
}

View File

@@ -1,7 +1,6 @@
// @flow
import MemoryStore from './memory-store.js';
import RefValue from './ref-value.js';
import {newStruct, StructMirror, createStructClass} from './struct.js';
import {assert} from 'chai';
import {
@@ -54,7 +53,7 @@ suite('Struct', () => {
const type = makeType(pkgRef, 0);
const b = true;
const r = new RefValue(ds.writeValue(b), refOfBoolType);
const r = ds.writeValue(b);
const s1 = newStruct(type, typeDef, {r: r});
assert.strictEqual(2, s1.chunks.length);
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
@@ -82,7 +81,7 @@ suite('Struct', () => {
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
const b = true;
const r = new RefValue(ds.writeValue(b), refOfBoolType);
const r = ds.writeValue(b);
const s2 = newStruct(type, typeDef, {r: r});
assert.strictEqual(2, s2.chunks.length);
assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef));
@@ -110,7 +109,7 @@ suite('Struct', () => {
assert.isTrue(pkgRef.equals(s1.chunks[0].targetRef));
const b = true;
const r = new RefValue(ds.writeValue(b), refOfBoolType);
const r = ds.writeValue(b);
const s2 = newStruct(type, typeDef, {r: r});
assert.strictEqual(2, s2.chunks.length);
assert.isTrue(pkgRef.equals(s2.chunks[0].targetRef));

View File

@@ -38,11 +38,11 @@ suite('Type', () => {
const pkgRef = Ref.parse('sha1-0123456789abcdef0123456789abcdef01234567');
const trType = makeType(pkgRef, 42);
const otherRef = ds.writeValue(otherType);
const mapRef = ds.writeValue(mapType);
const setRef = ds.writeValue(setType);
const mahRef = ds.writeValue(mahType);
const trRef = ds.writeValue(trType);
const otherRef = ds.writeValue(otherType).targetRef;
const mapRef = ds.writeValue(mapType).targetRef;
const setRef = ds.writeValue(setType).targetRef;
const mahRef = ds.writeValue(mahType).targetRef;
const trRef = ds.writeValue(trType).targetRef;
assert.isTrue(otherType.equals(await ds.readValue(otherRef)));
assert.isTrue(mapType.equals(await ds.readValue(mapRef)));
@@ -90,7 +90,7 @@ suite('Type', () => {
const pkgRef = pkg.ref;
const unresolvedType = makeType(pkgRef, 42);
const unresolvedRef = ds.writeValue(unresolvedType);
const unresolvedRef = ds.writeValue(unresolvedType).targetRef;
const v = await ds.readValue(unresolvedRef);
assert.isNotNull(v);
@@ -107,7 +107,7 @@ suite('Type', () => {
const ms = new MemoryStore();
const ds = new DataStore(ms);
const v = makeType(new Ref(), -1);
const r = ds.writeValue(v);
const r = ds.writeValue(v).targetRef;
const v2 = await ds.readValue(r);
assert.isTrue(v.equals(v2));
});

View File

@@ -195,10 +195,10 @@ func buildSetOfRefOfPackage(pkg pkg.Parsed, deps depsMap, ds dataset.Dataset) ty
for _, dep := range deps {
// Writing the deps into ds should be redundant at this point, but do it to be sure.
// TODO: consider moving all dataset work over into nomdl/pkg BUG 409
s = s.Insert(types.NewRefOfPackage(ds.Store().WriteValue(dep)))
s = s.Insert(ds.Store().WriteValue(dep).(types.RefOfPackage))
}
r := ds.Store().WriteValue(pkg.Package)
return s.Insert(types.NewRefOfPackage(r))
r := ds.Store().WriteValue(pkg.Package).(types.RefOfPackage)
return s.Insert(r)
}
func getOutFileName(in string) string {

View File

@@ -203,12 +203,12 @@ func TestGenerateDeps(t *testing.T) {
defer os.RemoveAll(dir)
leaf1 := types.NewPackage([]types.Type{types.MakeEnumType("e1", "a", "b")}, []ref.Ref{})
leaf1Ref := ds.WriteValue(leaf1)
leaf1Ref := ds.WriteValue(leaf1).TargetRef()
leaf2 := types.NewPackage([]types.Type{types.MakePrimitiveType(types.BoolKind)}, []ref.Ref{})
leaf2Ref := ds.WriteValue(leaf2)
leaf2Ref := ds.WriteValue(leaf2).TargetRef()
depender := types.NewPackage([]types.Type{}, []ref.Ref{leaf1Ref})
dependerRef := ds.WriteValue(depender)
dependerRef := ds.WriteValue(depender).TargetRef()
top := types.NewPackage([]types.Type{}, []ref.Ref{leaf2Ref, dependerRef})
types.RegisterPackage(&top)
@@ -279,7 +279,7 @@ func TestCanUseDefFromImport(t *testing.T) {
X: Int64
}`)
pkg1 := pkg.ParseNomDL("test1", r1, dir, ds)
pkgRef1 := ds.WriteValue(pkg1.Package)
pkgRef1 := ds.WriteValue(pkg1.Package).TargetRef()
r2 := strings.NewReader(fmt.Sprintf(`
alias Other = import "%s"

View File

@@ -58,5 +58,5 @@ func (r {{.Name}}) TargetValue(vr {{$typesPackage}}ValueReader) {{userType .Elem
}
func (r {{.Name}}) SetTargetValue(val {{userType .ElemType}}, vw {{$typesPackage}}ValueWriter) {{.Name}} {
return New{{.Name}}(vw.WriteValue({{userToValue "val" .ElemType}}))
return vw.WriteValue({{userToValue "val" .ElemType}}).({{.Name}})
}

View File

@@ -41,7 +41,7 @@ func TestEnumValue(t *testing.T) {
func TestEnumIsValue(t *testing.T) {
ds := datas.NewDataStore(chunks.NewMemoryStore())
var v types.Value = gen.NewEnumStruct()
ref := ds.WriteValue(v)
ref := ds.WriteValue(v).TargetRef()
v2 := ds.ReadValue(ref)
assert.True(t, v.Equals(v2))
}

View File

@@ -169,7 +169,7 @@ func (r RefOfListOfString) TargetValue(vr types.ValueReader) ListOfString {
}
func (r RefOfListOfString) SetTargetValue(val ListOfString, vw types.ValueWriter) RefOfListOfString {
return NewRefOfListOfString(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfListOfString)
}
// ListOfRefOfFloat32
@@ -372,7 +372,7 @@ func (r RefOfSetOfFloat32) TargetValue(vr types.ValueReader) SetOfFloat32 {
}
func (r RefOfSetOfFloat32) SetTargetValue(val SetOfFloat32, vw types.ValueWriter) RefOfSetOfFloat32 {
return NewRefOfSetOfFloat32(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfSetOfFloat32)
}
// ListOfString
@@ -575,7 +575,7 @@ func (r RefOfFloat32) TargetValue(vr types.ValueReader) float32 {
}
func (r RefOfFloat32) SetTargetValue(val float32, vw types.ValueWriter) RefOfFloat32 {
return NewRefOfFloat32(vw.WriteValue(types.Float32(val)))
return vw.WriteValue(types.Float32(val)).(RefOfFloat32)
}
// SetOfFloat32

View File

@@ -42,7 +42,7 @@ func TestStructIsValue(t *testing.T) {
I: 42,
}.New()
ref := ds.WriteValue(v)
ref := ds.WriteValue(v).TargetRef()
v2 := ds.ReadValue(ref)
assert.True(v.Equals(v2))

View File

@@ -37,7 +37,7 @@ func resolveImports(aliases map[string]string, includePath string, vrw types.Val
d.Chk.NoError(err)
defer inFile.Close()
parsedDep := ParseNomDL(alias, inFile, filepath.Dir(canonical), vrw)
imports[alias] = vrw.WriteValue(parsedDep.Package)
imports[alias] = vrw.WriteValue(parsedDep.Package).TargetRef()
} else {
imports[alias] = r
}

View File

@@ -36,7 +36,7 @@ func (suite *ImportTestSuite) SetupTest() {
types.Field{"i", types.MakePrimitiveType(types.Int8Kind), false},
})
suite.nested = types.NewPackage([]types.Type{ns}, []ref.Ref{})
suite.nestedRef = suite.vrw.WriteValue(suite.nested)
suite.nestedRef = suite.vrw.WriteValue(suite.nested).TargetRef()
fs := types.MakeStructType("ForeignStruct", []types.Field{
types.Field{"b", types.MakeType(ref.Ref{}, 1), false},
@@ -45,7 +45,7 @@ func (suite *ImportTestSuite) SetupTest() {
types.Choices{})
fe := types.MakeEnumType("ForeignEnum", "uno", "dos")
suite.imported = types.NewPackage([]types.Type{fs, fe}, []ref.Ref{suite.nestedRef})
suite.importRef = suite.vrw.WriteValue(suite.imported)
suite.importRef = suite.vrw.WriteValue(suite.imported).TargetRef()
}
func (suite *ImportTestSuite) TestGetDeps() {
@@ -119,7 +119,7 @@ func (suite *ImportTestSuite) TestImports() {
suite.NoError(err)
defer inFile.Close()
parsedDep := ParseNomDL("", inFile, filepath.Dir(path), ds)
return ds.WriteValue(parsedDep.Package)
return ds.WriteValue(parsedDep.Package).TargetRef()
}
dir, err := ioutil.TempDir("", "")
@@ -218,7 +218,7 @@ func (suite *ImportTestSuite) TestImportWithLocalRef() {
X: Int64
}`)
pkg1 := ParseNomDL("test1", r1, dir, suite.vrw)
pkgRef1 := suite.vrw.WriteValue(pkg1.Package)
pkgRef1 := suite.vrw.WriteValue(pkg1.Package).TargetRef()
r2 := strings.NewReader(fmt.Sprintf(`
alias Other = import "%s"

View File

@@ -58,5 +58,5 @@ func (r RefOfBlob) TargetValue(vr ValueReader) Blob {
}
func (r RefOfBlob) SetTargetValue(val Blob, vw ValueWriter) RefOfBlob {
return NewRefOfBlob(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfBlob)
}

View File

@@ -90,7 +90,7 @@ func TestCompoundBlobReader(t *testing.T) {
test(cb)
vs := NewTestValueStore()
test(vs.ReadValue(vs.WriteValue(cb)).(compoundBlob))
test(vs.ReadValue(vs.WriteValue(cb).TargetRef()).(compoundBlob))
}
type testBlob struct {

View File

@@ -265,7 +265,7 @@ func makeListLeafChunkFn(t Type, sink ValueWriter) makeChunkFn {
list := valueFromType(newListLeaf(t, values...), t)
if sink != nil {
r := newRef(sink.WriteValue(list), MakeRefType(list.Type()))
r := sink.WriteValue(list)
return newMetaTuple(Uint64(len(values)), nil, r), list
}
return newMetaTuple(Uint64(len(values)), list, Ref{}), list

View File

@@ -114,7 +114,7 @@ func TestCompoundListGet(t *testing.T) {
tr := MakeCompoundType(ListKind, MakePrimitiveType(Int64Kind))
cl := NewTypedList(tr, simpleList...).(compoundList)
testGet(cl)
testGet(vs.ReadValue(vs.WriteValue(cl)).(compoundList))
testGet(vs.ReadValue(vs.WriteValue(cl).TargetRef()).(compoundList))
}
func TestCompoundListIter(t *testing.T) {
@@ -643,8 +643,7 @@ func TestCompoundListRefOfStructFirstNNumbers(t *testing.T) {
nums := []Value{}
for i := 0; i < n; i++ {
r := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i)}))
tr := newRef(r, refOfTypeStructType)
nums = append(nums, tr)
nums = append(nums, r)
}
return nums
@@ -661,7 +660,7 @@ func TestCompoundListModifyAfterRead(t *testing.T) {
list := getTestSimpleList().toCompoundList()
// Drop chunk values.
list = vs.ReadValue(vs.WriteValue(list)).(compoundList)
list = vs.ReadValue(vs.WriteValue(list).TargetRef()).(compoundList)
// Modify/query. Once upon a time this would crash.
llen := list.Len()
z := list.Get(0)

View File

@@ -123,7 +123,7 @@ func TestCompoundMapHas(t *testing.T) {
doTest := func(tm testMap) {
vs := NewTestValueStore()
m := tm.toCompoundMap()
m2 := vs.ReadValue(vs.WriteValue(m)).(compoundMap)
m2 := vs.ReadValue(vs.WriteValue(m).TargetRef()).(compoundMap)
for _, entry := range tm.entries {
k, v := entry.key, entry.value
assert.True(m.Has(k))
@@ -380,10 +380,8 @@ func TestCompoundMapRefOfStructFirstNNumbers(t *testing.T) {
kvs := []Value{}
n := 5000
for i := 0; i < n; i++ {
rk := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i)}))
k := newRef(rk, refOfTypeStructType)
rv := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i + 1)}))
v := newRef(rv, refOfTypeStructType)
k := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i)}))
v := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i + 1)}))
kvs = append(kvs, k, v)
}
@@ -396,7 +394,7 @@ func TestCompoundMapModifyAfterRead(t *testing.T) {
vs := NewTestValueStore()
m := getTestNativeOrderMap(2).toCompoundMap()
// Drop chunk values.
m = vs.ReadValue(vs.WriteValue(m)).(compoundMap)
m = vs.ReadValue(vs.WriteValue(m).TargetRef()).(compoundMap)
// Modify/query. Once upon a time this would crash.
fst, fstval := m.First()
m = m.Remove(fst).(compoundMap)

View File

@@ -100,7 +100,7 @@ func TestCompoundSetChunks(t *testing.T) {
doTest := func(ts testSet) {
vs := NewTestValueStore()
set := ts.toCompoundSet()
set2chunks := vs.ReadValue(vs.WriteValue(set)).(compoundSet).Chunks()
set2chunks := vs.ReadValue(vs.WriteValue(set).TargetRef()).(compoundSet).Chunks()
for i, r := range set.Chunks() {
assert.True(r.Type().Equals(set2chunks[i].Type()), "%s != %s", r.Type().Describe(), set2chunks[i].Type().Describe())
}
@@ -118,7 +118,7 @@ func TestCompoundSetHas(t *testing.T) {
doTest := func(ts testSet) {
vs := NewTestValueStore()
set := ts.toCompoundSet()
set2 := vs.ReadValue(vs.WriteValue(set)).(compoundSet)
set2 := vs.ReadValue(vs.WriteValue(set).TargetRef()).(compoundSet)
for _, v := range ts.values {
assert.True(set.Has(v))
assert.True(set2.Has(v))
@@ -378,8 +378,7 @@ func TestCompoundSetRefOfStructFirstNNumbers(t *testing.T) {
nums := []Value{}
for i := 0; i < n; i++ {
r := vs.WriteValue(NewStruct(structType, structTypeDef, structData{"n": Int64(i)}))
tr := newRef(r, refOfTypeStructType)
nums = append(nums, tr)
nums = append(nums, r)
}
return nums
@@ -395,7 +394,7 @@ func TestCompoundSetModifyAfterRead(t *testing.T) {
vs := NewTestValueStore()
set := getTestNativeOrderSet(2).toCompoundSet()
// Drop chunk values.
set = vs.ReadValue(vs.WriteValue(set)).(compoundSet)
set = vs.ReadValue(vs.WriteValue(set).TargetRef()).(compoundSet)
// Modify/query. Once upon a time this would crash.
fst := set.First()
set = set.Remove(fst).(compoundSet)

View File

@@ -590,7 +590,7 @@ func TestReadPackageThroughChunkSource(t *testing.T) {
}, Choices{}),
}, []ref.Ref{})
// Don't register
pkgRef := cs.WriteValue(pkg)
pkgRef := cs.WriteValue(pkg).TargetRef()
a := parseJson(`[%d, "%s", "0", "42"]`, UnresolvedKind, pkgRef.String())
r := newJsonArrayReader(a, cs)

View File

@@ -23,7 +23,7 @@ func TestGenericEnumWriteRead(t *testing.T) {
assert.False(vA.Equals(vB))
rA := vs.WriteValue(vA)
rA := vs.WriteValue(vA).TargetRef()
vA2 := vs.ReadValue(rA)
assert.True(vA.Equals(vA2))

View File

@@ -43,7 +43,7 @@ func TestIncrementalLoadList(t *testing.T) {
vs := newValueStore(cs)
expected := NewList(testVals...)
ref := vs.WriteValue(expected)
ref := vs.WriteValue(expected).TargetRef()
actualVar := vs.ReadValue(ref)
actual := actualVar.(List)
@@ -71,7 +71,7 @@ func SkipTestIncrementalLoadSet(t *testing.T) {
vs := newValueStore(cs)
expected := NewSet(testVals...)
ref := vs.WriteValue(expected)
ref := vs.WriteValue(expected).TargetRef()
actualVar := vs.ReadValue(ref)
actual := actualVar.(Set)
@@ -91,7 +91,7 @@ func SkipTestIncrementalLoadMap(t *testing.T) {
vs := newValueStore(cs)
expected := NewMap(testVals...)
ref := vs.WriteValue(expected)
ref := vs.WriteValue(expected).TargetRef()
actualVar := vs.ReadValue(ref)
actual := actualVar.(Map)
@@ -112,10 +112,10 @@ func SkipTestIncrementalAddRef(t *testing.T) {
vs := newValueStore(cs)
expectedItem := Uint32(42)
ref := vs.WriteValue(expectedItem)
ref := vs.WriteValue(expectedItem).TargetRef()
expected := NewList(NewRef(ref))
ref = vs.WriteValue(expected)
ref = vs.WriteValue(expected).TargetRef()
actualVar := vs.ReadValue(ref)
assert.Equal(1, cs.Reads)

View File

@@ -21,7 +21,7 @@ func newIndexedMetaSequenceChunkFn(t Type, source ValueReader, sink ValueWriter)
meta := newMetaSequenceFromData(tuples, t, source)
if sink != nil {
r := newRef(sink.WriteValue(meta), MakeRefType(meta.Type()))
r := sink.WriteValue(meta)
return newMetaTuple(Uint64(tuples.uint64ValuesSum()), nil, r), meta
}
return newMetaTuple(Uint64(tuples.uint64ValuesSum()), meta, Ref{}), meta

View File

@@ -13,42 +13,41 @@ func TestMeta(t *testing.T) {
vs := NewTestValueStore()
flatList := []Value{Uint32(0), Uint32(1), Uint32(2), Uint32(3), Uint32(4), Uint32(5), Uint32(6), Uint32(7)}
typeForRefOfListOfValue := MakeRefType(MakeCompoundType(ListKind, MakePrimitiveType(ValueKind)))
l0 := NewList(flatList[0])
lr0 := newRef(vs.WriteValue(l0), typeForRefOfListOfValue)
l0 := NewList(flatList[0])
lr0 := vs.WriteValue(l0)
l1 := NewList(flatList[1])
lr1 := newRef(vs.WriteValue(l1), typeForRefOfListOfValue)
lr1 := vs.WriteValue(l1)
l2 := NewList(flatList[2])
lr2 := newRef(vs.WriteValue(l2), typeForRefOfListOfValue)
lr2 := vs.WriteValue(l2)
l3 := NewList(flatList[3])
lr3 := newRef(vs.WriteValue(l3), typeForRefOfListOfValue)
lr3 := vs.WriteValue(l3)
l4 := NewList(flatList[4])
lr4 := newRef(vs.WriteValue(l4), typeForRefOfListOfValue)
lr4 := vs.WriteValue(l4)
l5 := NewList(flatList[5])
lr5 := newRef(vs.WriteValue(l5), typeForRefOfListOfValue)
lr5 := vs.WriteValue(l5)
l6 := NewList(flatList[6])
lr6 := newRef(vs.WriteValue(l6), typeForRefOfListOfValue)
lr6 := vs.WriteValue(l6)
l7 := NewList(flatList[7])
lr7 := newRef(vs.WriteValue(l7), typeForRefOfListOfValue)
lr7 := vs.WriteValue(l7)
mtr := l0.Type()
m0 := compoundList{metaSequenceObject{metaSequenceData{{l0, lr0, Uint64(1)}, {l1, lr1, Uint64(2)}}, mtr}, 0, &ref.Ref{}, vs}
lm0 := newRef(vs.WriteValue(m0), typeForRefOfListOfValue)
lm0 := vs.WriteValue(m0)
m1 := compoundList{metaSequenceObject{metaSequenceData{{l2, lr2, Uint64(1)}, {l3, lr3, Uint64(2)}}, mtr}, 0, &ref.Ref{}, vs}
lm1 := newRef(vs.WriteValue(m1), typeForRefOfListOfValue)
lm1 := vs.WriteValue(m1)
m2 := compoundList{metaSequenceObject{metaSequenceData{{l4, lr4, Uint64(1)}, {l5, lr5, Uint64(2)}}, mtr}, 0, &ref.Ref{}, vs}
lm2 := newRef(vs.WriteValue(m2), typeForRefOfListOfValue)
lm2 := vs.WriteValue(m2)
m3 := compoundList{metaSequenceObject{metaSequenceData{{l6, lr6, Uint64(1)}, {l7, lr7, Uint64(2)}}, mtr}, 0, &ref.Ref{}, vs}
lm3 := newRef(vs.WriteValue(m3), typeForRefOfListOfValue)
lm3 := vs.WriteValue(m3)
m00 := compoundList{metaSequenceObject{metaSequenceData{{m0, lm0, Uint64(2)}, {m1, lm1, Uint64(4)}}, mtr}, 0, &ref.Ref{}, vs}
lm00 := newRef(vs.WriteValue(m00), typeForRefOfListOfValue)
lm00 := vs.WriteValue(m00)
m01 := compoundList{metaSequenceObject{metaSequenceData{{m2, lm2, Uint64(2)}, {m3, lm3, Uint64(4)}}, mtr}, 0, &ref.Ref{}, vs}
lm01 := newRef(vs.WriteValue(m01), typeForRefOfListOfValue)
lm01 := vs.WriteValue(m01)
rootList := compoundList{metaSequenceObject{metaSequenceData{{m00, lm00, Uint64(4)}, {m01, lm01, Uint64(8)}}, mtr}, 0, &ref.Ref{}, vs}
rootRef := vs.WriteValue(rootList)
rootRef := vs.WriteValue(rootList).TargetRef()
rootList = vs.ReadValue(rootRef).(compoundList)

View File

@@ -127,6 +127,11 @@ func RegisterRef(t Type, bf refBuilderFunc) {
refFuncMap[t.Ref()] = bf
}
// TODO: This *isn't* public API: Remove. https://github.com/attic-labs/noms/issues/1209.
func PrivateRefFromType(target ref.Ref, t Type) RefBase {
return refFromType(target, t)
}
func refFromType(target ref.Ref, t Type) RefBase {
if f, ok := refFuncMap[t.Ref()]; ok {
return f(target)

View File

@@ -207,5 +207,5 @@ func (r RefOfPackage) TargetValue(vr ValueReader) Package {
}
func (r RefOfPackage) SetTargetValue(val Package, vw ValueWriter) RefOfPackage {
return NewRefOfPackage(vw.WriteValue(val))
return vw.WriteValue(val).(RefOfPackage)
}

View File

@@ -57,5 +57,5 @@ func (r Ref) TargetValue(vr ValueReader) Value {
func (r Ref) SetTargetValue(val Value, vw ValueWriter) Ref {
assertType(r.t.Desc.(CompoundDesc).ElemTypes[0], val)
return newRef(vw.WriteValue(val), r.t)
return newRef(vw.WriteValue(val).TargetRef(), r.t)
}

View File

@@ -27,11 +27,11 @@ func TestTypes(t *testing.T) {
pkgRef := ref.Parse("sha1-0123456789abcdef0123456789abcdef01234567")
trType := MakeType(pkgRef, 42)
mRef := vs.WriteValue(mapType)
setRef := vs.WriteValue(setType)
otherRef := vs.WriteValue(otherType)
mahRef := vs.WriteValue(mahType)
trRef := vs.WriteValue(trType)
mRef := vs.WriteValue(mapType).TargetRef()
setRef := vs.WriteValue(setType).TargetRef()
otherRef := vs.WriteValue(otherType).TargetRef()
mahRef := vs.WriteValue(mahType).TargetRef()
trRef := vs.WriteValue(trType).TargetRef()
assert.True(otherType.Equals(vs.ReadValue(otherRef)))
assert.True(mapType.Equals(vs.ReadValue(mRef)))
@@ -48,7 +48,7 @@ func TestTypeWithPkgRef(t *testing.T) {
pkgRef := RegisterPackage(&pkg)
unresolvedType := MakeType(pkgRef, 42)
unresolvedRef := vs.WriteValue(unresolvedType)
unresolvedRef := vs.WriteValue(unresolvedType).TargetRef()
v := vs.ReadValue(unresolvedRef)
assert.EqualValues(pkgRef, v.Chunks()[0].TargetRef())

View File

@@ -24,8 +24,9 @@ func (vrw *ValueStore) ReadValue(r ref.Ref) Value {
return DecodeChunk(vrw.cs.Get(r), vrw)
}
func (vrw *ValueStore) WriteValue(v Value) ref.Ref {
func (vrw *ValueStore) WriteValue(v Value) RefBase {
chunk := EncodeValue(v, vrw)
vrw.cs.Put(chunk)
return chunk.Ref()
targetRef := chunk.Ref()
return refFromType(targetRef, MakeRefType(v.Type()))
}

View File

@@ -1,13 +1,10 @@
package types
import (
"github.com/attic-labs/noms/chunks"
"github.com/attic-labs/noms/ref"
)
import "github.com/attic-labs/noms/chunks"
// ValueWriter is an interface that knows how to write Noms Values, e.g. datas/DataStore. Required to avoid import cycle between this package and the package that implements Value writing.
type ValueWriter interface {
WriteValue(v Value) ref.Ref
WriteValue(v Value) RefBase
}
type primitive interface {

View File

@@ -15,7 +15,7 @@ func TestWriteValue(t *testing.T) {
testEncode := func(expected string, v Value) ref.Ref {
vs := NewTestValueStore()
r := vs.WriteValue(v)
r := vs.WriteValue(v).TargetRef()
// Assuming that MemoryStore works correctly, we don't need to check the actual serialization, only the hash. Neat.
assert.EqualValues(sha1.Sum([]byte(expected)), r.Digest(), "Incorrect ref serializing %+v. Got: %#x", v, r.Digest())
@@ -42,7 +42,7 @@ func TestWriteBlobLeaf(t *testing.T) {
b1 := NewBlob(buf)
bl1, ok := b1.(blobLeaf)
assert.True(ok)
r1 := vs.WriteValue(bl1)
r1 := vs.WriteValue(bl1).TargetRef()
// echo -n 'b ' | sha1sum
assert.Equal("sha1-e1bc846440ec2fb557a5a271e785cd4c648883fa", r1.String())
@@ -50,7 +50,7 @@ func TestWriteBlobLeaf(t *testing.T) {
b2 := NewBlob(buf)
bl2, ok := b2.(blobLeaf)
assert.True(ok)
r2 := vs.WriteValue(bl2)
r2 := vs.WriteValue(bl2).TargetRef()
// echo -n 'b Hello, World!' | sha1sum
assert.Equal("sha1-135fe1453330547994b2ce8a1b238adfbd7df87e", r2.String())
}

View File

@@ -4,7 +4,6 @@ import (
"testing"
"github.com/attic-labs/noms/chunks"
"github.com/attic-labs/noms/ref"
"github.com/attic-labs/noms/types"
"github.com/stretchr/testify/suite"
)
@@ -23,15 +22,15 @@ func (suite *WalkAllTestSuite) SetupTest() {
suite.vs = types.NewTestValueStore()
}
func (suite *WalkAllTestSuite) walkWorker(r ref.Ref, expected int) {
func (suite *WalkAllTestSuite) walkWorker(r types.RefBase, expected int) {
actual := 0
AllP(types.NewRef(r), suite.vs, func(c types.Value) {
AllP(r, suite.vs, func(c types.Value) {
actual++
}, 1)
suite.Equal(expected, actual)
}
func (suite *WalkAllTestSuite) storeAndRef(v types.Value) ref.Ref {
func (suite *WalkAllTestSuite) storeAndRef(v types.Value) types.RefBase {
return suite.vs.WriteValue(v)
}
@@ -49,22 +48,19 @@ func (suite *WalkAllTestSuite) TestWalkComposites() {
suite.walkWorker(suite.storeAndRef(types.NewMap(types.Int32(8), types.Bool(true), types.Int32(0), types.Bool(false))), 6)
}
func (suite *WalkAllTestSuite) NewList(cs chunks.ChunkStore, vs ...types.Value) types.Ref {
func (suite *WalkAllTestSuite) NewList(cs chunks.ChunkStore, vs ...types.Value) types.RefBase {
v := types.NewList(vs...)
r := suite.vs.WriteValue(v)
return types.NewRef(r)
return suite.vs.WriteValue(v)
}
func (suite *WalkAllTestSuite) NewMap(cs chunks.ChunkStore, vs ...types.Value) types.Ref {
func (suite *WalkAllTestSuite) NewMap(cs chunks.ChunkStore, vs ...types.Value) types.RefBase {
v := types.NewMap(vs...)
r := suite.vs.WriteValue(v)
return types.NewRef(r)
return suite.vs.WriteValue(v)
}
func (suite *WalkAllTestSuite) NewSet(cs chunks.ChunkStore, vs ...types.Value) types.Ref {
func (suite *WalkAllTestSuite) NewSet(cs chunks.ChunkStore, vs ...types.Value) types.RefBase {
v := types.NewSet(vs...)
r := suite.vs.WriteValue(v)
return types.NewRef(r)
return suite.vs.WriteValue(v)
}
func (suite *WalkAllTestSuite) TestWalkNestedComposites() {