Rename NewTypedRefFromValue to NewRef and old NewRef to constructRef (#1427)

This is because:
* All type.Ref are now typed, so Typed was a tautology.
* The only way to construct a type.Ref is with a Value, so FromValue was
  a tautology (with a small amount of work to remove callers of NewRef).
This commit is contained in:
Ben Kalman
2016-05-11 18:06:17 -07:00
parent a3b18b3f62
commit 7ebc0f2feb
24 changed files with 88 additions and 86 deletions

View File

@@ -53,7 +53,7 @@ func main() {
}
var err error
sinkDataset, err = sinkDataset.Pull(sourceStore, types.NewTypedRefFromValue(sourceObj), int(*p))
sinkDataset, err = sinkDataset.Pull(sourceStore, types.NewRef(sourceObj), int(*p))
util.MaybeWriteMemProfile()
d.Exp.NoError(err)

View File

@@ -78,13 +78,13 @@ func (suite *DatabaseSuite) TestReadWriteCachePersists() {
suite.NoError(err)
suite.Equal(1, suite.cs.Writes-writesOnCommit)
newCommit := NewCommit().Set(ValueField, r).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(commit)))
newCommit := NewCommit().Set(ValueField, r).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(commit)))
suite.ds, err = suite.ds.Commit("foo", newCommit)
suite.NoError(err)
}
func (suite *DatabaseSuite) TestWriteRefToNonexistentValue() {
suite.Panics(func() { suite.ds.WriteValue(types.NewTypedRefFromValue(types.Bool(true))) })
suite.Panics(func() { suite.ds.WriteValue(types.NewRef(types.Bool(true))) })
}
func (suite *DatabaseSuite) TestTolerateUngettableRefs() {
@@ -118,7 +118,7 @@ func (suite *DatabaseSuite) TestDatabaseCommit() {
// |a| <- |b|
b := types.NewString("b")
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(aCommit)))
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(aCommit)))
suite.ds, err = suite.ds.Commit(datasetID, bCommit)
suite.NoError(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(b))
@@ -128,14 +128,14 @@ func (suite *DatabaseSuite) TestDatabaseCommit() {
// \----|c|
// Should be disallowed.
c := types.NewString("c")
cCommit := NewCommit().Set(ValueField, c).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(aCommit)))
cCommit := NewCommit().Set(ValueField, c).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(aCommit)))
suite.ds, err = suite.ds.Commit(datasetID, cCommit)
suite.Error(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(b))
// |a| <- |b| <- |d|
d := types.NewString("d")
dCommit := NewCommit().Set(ValueField, d).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(bCommit)))
dCommit := NewCommit().Set(ValueField, d).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(bCommit)))
suite.ds, err = suite.ds.Commit(datasetID, dCommit)
suite.NoError(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(d))
@@ -204,7 +204,7 @@ func (suite *DatabaseSuite) TestDatabaseDeleteConcurrent() {
// |a| <- |b|
b := types.NewString("b")
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(aCommit)))
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(aCommit)))
ds2, err := suite.ds.Commit(datasetID, bCommit)
suite.NoError(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(a))
@@ -233,7 +233,7 @@ func (suite *DatabaseSuite) TestDatabaseConcurrency() {
aCommit := NewCommit().Set(ValueField, a)
suite.ds, err = suite.ds.Commit(datasetID, aCommit)
b := types.NewString("b")
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(aCommit)))
bCommit := NewCommit().Set(ValueField, b).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(aCommit)))
suite.ds, err = suite.ds.Commit(datasetID, bCommit)
suite.NoError(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(b))
@@ -244,7 +244,7 @@ func (suite *DatabaseSuite) TestDatabaseConcurrency() {
// Change 1:
// |a| <- |b| <- |c|
c := types.NewString("c")
cCommit := NewCommit().Set(ValueField, c).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(bCommit)))
cCommit := NewCommit().Set(ValueField, c).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(bCommit)))
suite.ds, err = suite.ds.Commit(datasetID, cCommit)
suite.NoError(err)
suite.True(suite.ds.Head(datasetID).Get(ValueField).Equals(c))
@@ -253,7 +253,7 @@ func (suite *DatabaseSuite) TestDatabaseConcurrency() {
// |a| <- |b| <- |e|
// Should be disallowed, Database returned by Commit() should have |c| as Head.
e := types.NewString("e")
eCommit := NewCommit().Set(ValueField, e).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewTypedRefFromValue(bCommit)))
eCommit := NewCommit().Set(ValueField, e).Set(ParentsField, NewSetOfRefOfCommit().Insert(types.NewRef(bCommit)))
ds2, err = ds2.Commit(datasetID, eCommit)
suite.Error(err)
suite.True(ds2.Head(datasetID).Get(ValueField).Equals(c))

View File

@@ -90,14 +90,14 @@ func (suite *HTTPBatchStoreSuite) TestPutChunk() {
}
func (suite *HTTPBatchStoreSuite) TestPutChunksInOrder() {
chnx := []chunks.Chunk{
types.EncodeValue(types.NewString("abc"), nil),
types.EncodeValue(types.NewString("def"), nil),
vals := []types.Value{
types.NewString("abc"),
types.NewString("def"),
}
l := types.NewList()
for _, c := range chnx {
suite.store.SchedulePut(c, types.Hints{})
l = l.Append(newStringRef(c.Ref(), 1))
for _, val := range vals {
suite.store.SchedulePut(types.EncodeValue(val, nil), types.Hints{})
l = l.Append(types.NewRef(val))
}
suite.store.SchedulePut(types.EncodeValue(l, nil), types.Hints{})
suite.store.Flush()
@@ -106,12 +106,16 @@ func (suite *HTTPBatchStoreSuite) TestPutChunksInOrder() {
}
func (suite *HTTPBatchStoreSuite) TestPutChunkWithHints() {
vals := []types.Value{
types.NewString("abc"),
types.NewString("def"),
}
chnx := []chunks.Chunk{
types.EncodeValue(types.NewString("abc"), nil),
types.EncodeValue(types.NewString("def"), nil),
types.EncodeValue(vals[0], nil),
types.EncodeValue(vals[1], nil),
}
suite.NoError(suite.cs.PutMany(chnx))
l := types.NewList(newStringRef(chnx[0].Ref(), 1), newStringRef(chnx[1].Ref(), 1))
l := types.NewList(types.NewRef(vals[0]), types.NewRef(vals[1]))
suite.store.SchedulePut(types.EncodeValue(l, nil), types.Hints{
chnx[0].Ref(): struct{}{},
@@ -153,14 +157,14 @@ func (suite *HTTPBatchStoreSuite) TestPutChunksBackpressure() {
defer bs.Close()
defer bpcs.Close()
chnx := []chunks.Chunk{
types.EncodeValue(types.NewString("abc"), nil),
types.EncodeValue(types.NewString("def"), nil),
vals := []types.Value{
types.NewString("abc"),
types.NewString("def"),
}
l := types.NewList()
for _, c := range chnx {
bs.SchedulePut(c, types.Hints{})
l = l.Append(newStringRef(c.Ref(), 1))
for _, v := range vals {
bs.SchedulePut(types.EncodeValue(v, nil), types.Hints{})
l = l.Append(types.NewRef(v))
}
bs.SchedulePut(types.EncodeValue(l, nil), types.Hints{})
bs.Flush()
@@ -186,7 +190,3 @@ func (suite *HTTPBatchStoreSuite) TestGet() {
got := suite.store.Get(c.Ref())
suite.Equal(c.Ref(), got.Ref())
}
func newStringRef(r ref.Ref, height uint64) types.Ref {
return types.NewTypedRef(types.MakeRefType(types.StringType), r, height)
}

View File

@@ -47,14 +47,14 @@ func (suite *NotABatchSinkSuite) TearDownTest() {
}
func (suite *NotABatchSinkSuite) TestPutChunks() {
chnx := []chunks.Chunk{
types.EncodeValue(types.NewString("abc"), nil),
types.EncodeValue(types.NewString("def"), nil),
vals := []types.Value{
types.NewString("abc"),
types.NewString("def"),
}
l := types.NewList()
for _, c := range chnx {
suite.store.SchedulePut(c, types.Hints{})
l = l.Append(newStringRef(c.Ref(), 1))
for _, v := range vals {
suite.store.SchedulePut(types.EncodeValue(v, nil), types.Hints{})
l = l.Append(types.NewRef(v))
}
suite.store.SchedulePut(types.EncodeValue(l, nil), types.Hints{})
suite.store.Flush()

View File

@@ -33,7 +33,7 @@ func TestHandleWriteValue(t *testing.T) {
hint := l.Ref()
newItem := types.NewEmptyBlob()
itemChunk := types.EncodeValue(newItem, nil)
l2 := l.Insert(1, types.NewTypedRefFromValue(newItem))
l2 := l.Insert(1, types.NewRef(newItem))
listChunk := types.EncodeValue(l2, nil)
body := &bytes.Buffer{}
@@ -69,7 +69,7 @@ func TestHandleWriteValueBackpressure(t *testing.T) {
hint := l.Ref()
newItem := types.NewEmptyBlob()
itemChunk := types.EncodeValue(newItem, nil)
l2 := l.Insert(1, types.NewTypedRefFromValue(newItem))
l2 := l.Insert(1, types.NewRef(newItem))
listChunk := types.EncodeValue(l2, nil)
body := &bytes.Buffer{}

View File

@@ -75,8 +75,8 @@ func TestExplicitBranchUsingDatasets(t *testing.T) {
// ds1: |a| <- |b| <--|d|
// \ds2 <- |c| <--/
mergeParents := datas.NewSetOfRefOfCommit().
Insert(types.NewTypedRefFromValue(ds1.Head())).
Insert(types.NewTypedRefFromValue(ds2.Head()))
Insert(types.NewRef(ds1.Head())).
Insert(types.NewRef(ds2.Head()))
d := types.NewString("d")
ds2, err = ds2.CommitWithParents(d, mergeParents)
assert.NoError(err)

View File

@@ -19,7 +19,7 @@ func TestValidateRef(t *testing.T) {
r := ds.Store().WriteValue(b)
assert.Panics(t, func() { ds.validateRefAsCommit(r) })
assert.Panics(t, func() { ds.validateRefAsCommit(types.NewTypedRefFromValue(b)) })
assert.Panics(t, func() { ds.validateRefAsCommit(types.NewRef(b)) })
}
func NewList(ds Dataset, vs ...types.Value) types.Ref {
@@ -69,7 +69,7 @@ func pullTest(t *testing.T, topdown bool) {
source, err = source.Commit(updatedValue)
assert.NoError(err)
sink, err = sink.pull(source.Store(), types.NewTypedRefFromValue(source.Head()), 1, topdown)
sink, err = sink.pull(source.Store(), types.NewRef(source.Head()), 1, topdown)
assert.NoError(err)
assert.True(source.Head().Equals(sink.Head()))
}
@@ -98,7 +98,7 @@ func pullFirstCommit(t *testing.T, topdown bool) {
source, err := source.Commit(sourceInitialValue)
assert.NoError(err)
sink, err = sink.pull(source.Store(), types.NewTypedRefFromValue(source.Head()), 1, topdown)
sink, err = sink.pull(source.Store(), types.NewRef(source.Head()), 1, topdown)
assert.NoError(err)
assert.True(source.Head().Equals(sink.Head()))
}
@@ -125,7 +125,7 @@ func pullDeepRef(t *testing.T, topdown bool) {
source, err := source.Commit(sourceInitialValue)
assert.NoError(err)
sink, err = sink.pull(source.Store(), types.NewTypedRefFromValue(source.Head()), 1, topdown)
sink, err = sink.pull(source.Store(), types.NewRef(source.Head()), 1, topdown)
assert.NoError(err)
assert.True(source.Head().Equals(sink.Head()))
}

View File

@@ -139,7 +139,7 @@ func newBlobLeafChunkFn(vr ValueReader) makeChunkFn {
}
blob := newBlob(newBlobLeafSequence(vr, buff))
return newMetaTuple(Number(len(buff)), blob, NewTypedRefFromValue(blob), uint64(len(buff))), blob
return newMetaTuple(Number(len(buff)), blob, NewRef(blob), uint64(len(buff))), blob
}
}

View File

@@ -81,7 +81,7 @@ func (r *jsonArrayReader) readKind() NomsKind {
func (r *jsonArrayReader) readRef(t *Type) Ref {
ref := ref.Parse(r.readString())
height := r.readUint()
return NewTypedRef(t, ref, height)
return constructRef(t, ref, height)
}
func (r *jsonArrayReader) readType(parentStructTypes []*Type) *Type {

View File

@@ -127,8 +127,8 @@ func TestReadCompoundList(t *testing.T) {
list1 := newList(newListLeafSequence(tr, cs, Number(0)))
list2 := newList(newListLeafSequence(tr, cs, Number(1), Number(2), Number(3)))
l2 := newList(newIndexedMetaSequence([]metaTuple{
newMetaTuple(Number(1), list1, NewTypedRefFromValue(list1), 1),
newMetaTuple(Number(4), list2, NewTypedRefFromValue(list2), 4),
newMetaTuple(Number(1), list1, NewRef(list1), 1),
newMetaTuple(Number(4), list2, NewRef(list2), 4),
}, tr, cs))
a := parseJSON(`[
@@ -151,8 +151,8 @@ func TestReadCompoundSet(t *testing.T) {
set1 := newSet(newSetLeafSequence(tr, cs, Number(0), Number(1)))
set2 := newSet(newSetLeafSequence(tr, cs, Number(2), Number(3), Number(4)))
l2 := newSet(newOrderedMetaSequence([]metaTuple{
newMetaTuple(Number(1), set1, NewTypedRefFromValue(set1), 2),
newMetaTuple(Number(4), set2, NewTypedRefFromValue(set2), 3),
newMetaTuple(Number(1), set1, NewRef(set1), 2),
newMetaTuple(Number(4), set2, NewRef(set2), 3),
}, tr, cs))
a := parseJSON(`[
@@ -216,9 +216,9 @@ func TestReadCompoundBlob(t *testing.T) {
_, ok := m.(Blob)
assert.True(ok)
m2 := newBlob(newIndexedMetaSequence([]metaTuple{
newMetaTuple(Number(20), nil, NewTypedRef(RefOfBlobType, r1, 1), 20),
newMetaTuple(Number(40), nil, NewTypedRef(RefOfBlobType, r2, 1), 40),
newMetaTuple(Number(60), nil, NewTypedRef(RefOfBlobType, r3, 1), 60),
newMetaTuple(Number(20), nil, constructRef(RefOfBlobType, r1, 1), 20),
newMetaTuple(Number(40), nil, constructRef(RefOfBlobType, r2, 1), 40),
newMetaTuple(Number(60), nil, constructRef(RefOfBlobType, r3, 1), 60),
}, BlobType, cs))
assert.True(m.Type().Equals(m2.Type()))
@@ -308,7 +308,7 @@ func TestReadRef(t *testing.T) {
reader := newJSONArrayReader(a, cs)
v := reader.readValue()
tr := MakeRefType(NumberType)
assert.True(NewTypedRef(tr, r, 42).Equals(v))
assert.True(constructRef(tr, r, 42).Equals(v))
}
func TestReadStructWithBlob(t *testing.T) {

View File

@@ -131,9 +131,9 @@ func TestWriteCompoundBlob(t *testing.T) {
r3 := ref.Parse("sha1-0000000000000000000000000000000000000003")
v := newBlob(newIndexedMetaSequence([]metaTuple{
newMetaTuple(Number(20), nil, NewTypedRef(RefOfBlobType, r1, 11), 20),
newMetaTuple(Number(40), nil, NewTypedRef(RefOfBlobType, r2, 22), 40),
newMetaTuple(Number(60), nil, NewTypedRef(RefOfBlobType, r3, 33), 60),
newMetaTuple(Number(20), nil, constructRef(RefOfBlobType, r1, 11), 20),
newMetaTuple(Number(40), nil, constructRef(RefOfBlobType, r2, 22), 40),
newMetaTuple(Number(60), nil, constructRef(RefOfBlobType, r3, 33), 60),
}, BlobType, NewTestValueStore()))
w := newJSONArrayWriter(NewTestValueStore())
w.writeValue(v)
@@ -245,8 +245,8 @@ func TestWriteCompoundList(t *testing.T) {
list1 := newList(newListLeafSequence(ltr, cs, Number(0)))
list2 := newList(newListLeafSequence(ltr, cs, Number(1), Number(2), Number(3)))
cl := newList(newIndexedMetaSequence([]metaTuple{
newMetaTuple(Number(1), list1, NewTypedRefFromValue(list1), 1),
newMetaTuple(Number(4), list2, NewTypedRefFromValue(list2), 4),
newMetaTuple(Number(1), list1, NewRef(list1), 1),
newMetaTuple(Number(4), list2, NewRef(list2), 4),
}, ltr, cs))
w := newJSONArrayWriter(cs)
@@ -266,8 +266,8 @@ func TestWriteCompoundSet(t *testing.T) {
set1 := newSet(newSetLeafSequence(ltr, cs, Number(0), Number(1)))
set2 := newSet(newSetLeafSequence(ltr, cs, Number(2), Number(3), Number(4)))
cl := newSet(newOrderedMetaSequence([]metaTuple{
newMetaTuple(Number(1), set1, NewTypedRefFromValue(set1), 2),
newMetaTuple(Number(4), set2, NewTypedRefFromValue(set2), 3),
newMetaTuple(Number(1), set1, NewRef(set1), 2),
newMetaTuple(Number(4), set2, NewRef(set2), 3),
}, ltr, cs))
w := newJSONArrayWriter(cs)
@@ -348,7 +348,7 @@ func TestWriteRef(t *testing.T) {
typ := MakeRefType(NumberType)
r := ref.Parse("sha1-0123456789abcdef0123456789abcdef01234567")
v := NewTypedRef(typ, r, 4)
v := constructRef(typ, r, 4)
w := newJSONArrayWriter(NewTestValueStore())
w.writeValue(v)

View File

@@ -33,8 +33,8 @@ func TestValueEquals(t *testing.T) {
b1 := NewBlob(bytes.NewBufferString("hi"))
b2 := NewBlob(bytes.NewBufferString("bye"))
return newBlob(newIndexedMetaSequence([]metaTuple{
newMetaTuple(Number(uint64(2)), b1, NewTypedRefFromValue(b1), 2),
newMetaTuple(Number(uint64(5)), b2, NewTypedRefFromValue(b2), 5),
newMetaTuple(Number(uint64(2)), b1, NewRef(b1), 2),
newMetaTuple(Number(uint64(5)), b2, NewRef(b2), 5),
}, BlobType, nil))
},
func() Value { return NewList() },
@@ -76,7 +76,7 @@ func TestValueEquals(t *testing.T) {
}
v := f1()
if v != nil {
r := NewTypedRefFromValue(v)
r := NewRef(v)
assert.False(r.Equals(v))
assert.False(v.Equals(r))
}

View File

@@ -111,6 +111,6 @@ func newIndexedMetaSequenceChunkFn(t *Type, source ValueReader, sink ValueWriter
r := sink.WriteValue(col)
return newMetaTuple(Number(tuples.uint64ValuesSum()), nil, r, numLeaves), col
}
return newMetaTuple(Number(tuples.uint64ValuesSum()), col, NewTypedRefFromValue(col), numLeaves), col
return newMetaTuple(Number(tuples.uint64ValuesSum()), col, NewRef(col), numLeaves), col
}
}

View File

@@ -205,6 +205,6 @@ func makeListLeafChunkFn(t *Type, vr ValueReader, sink ValueWriter) makeChunkFn
if sink != nil {
return newMetaTuple(Number(len(values)), nil, sink.WriteValue(list), uint64(len(values))), list
}
return newMetaTuple(Number(len(values)), list, NewTypedRefFromValue(list), uint64(len(values))), list
return newMetaTuple(Number(len(values)), list, NewRef(list), uint64(len(values))), list
}
}

View File

@@ -263,10 +263,10 @@ func makeMapLeafChunkFn(t *Type, vr ValueReader) makeChunkFn {
if len(mapData) > 0 {
indexValue = mapData[len(mapData)-1].key
if !isSequenceOrderedByIndexedType(t) {
indexValue = NewTypedRefFromValue(indexValue)
indexValue = NewRef(indexValue)
}
}
return newMetaTuple(indexValue, m, NewTypedRefFromValue(m), uint64(len(items))), m
return newMetaTuple(indexValue, m, NewRef(m), uint64(len(items))), m
}
}

View File

@@ -791,11 +791,11 @@ func TestMapChunks(t *testing.T) {
c1 := l1.Chunks()
assert.Len(c1, 0)
l2 := NewMap(NewTypedRefFromValue(Number(0)), Number(1))
l2 := NewMap(NewRef(Number(0)), Number(1))
c2 := l2.Chunks()
assert.Len(c2, 1)
l3 := NewMap(Number(0), NewTypedRefFromValue(Number(1)))
l3 := NewMap(Number(0), NewRef(Number(1)))
c3 := l3.Chunks()
assert.Len(c3, 1)
}

View File

@@ -128,6 +128,6 @@ func newOrderedMetaSequenceChunkFn(t *Type, vr ValueReader) makeChunkFn {
col = newMap(metaSeq)
}
return newMetaTuple(tuples.last().value, col, NewTypedRefFromValue(col), numLeaves), col
return newMetaTuple(tuples.last().value, col, NewRef(col), numLeaves), col
}
}

View File

@@ -12,13 +12,15 @@ type Ref struct {
ref *ref.Ref
}
func NewTypedRef(t *Type, target ref.Ref, height uint64) Ref {
d.Chk.Equal(RefKind, t.Kind(), "Invalid type. Expected: RefKind, found: %s", t.Describe())
return Ref{target, height, t, &ref.Ref{}}
func NewRef(v Value) Ref {
return Ref{v.Ref(), maxChunkHeight(v) + 1, MakeRefType(v.Type()), &ref.Ref{}}
}
func NewTypedRefFromValue(v Value) Ref {
return NewTypedRef(MakeRefType(v.Type()), v.Ref(), maxChunkHeight(v)+1)
// Constructs a Ref directly from struct properties. This should not be used outside decoding and testing within the types package.
func constructRef(t *Type, target ref.Ref, height uint64) Ref {
d.Chk.Equal(RefKind, t.Kind(), "Invalid type. Expected: RefKind, found: %s", t.Describe())
d.Chk.NotEqual(ValueType, t.Desc.(CompoundDesc).ElemTypes[0])
return Ref{target, height, t, &ref.Ref{}}
}
func maxChunkHeight(v Value) (max uint64) {

View File

@@ -10,7 +10,7 @@ func TestRefInList(t *testing.T) {
assert := assert.New(t)
l := NewList()
r := NewTypedRefFromValue(l)
r := NewRef(l)
l = l.Append(r)
r2 := l.Get(0)
assert.True(r.Equals(r2))
@@ -20,7 +20,7 @@ func TestRefInSet(t *testing.T) {
assert := assert.New(t)
s := NewSet()
r := NewTypedRefFromValue(s)
r := NewRef(s)
s = s.Insert(r)
r2 := s.First()
assert.True(r.Equals(r2))
@@ -30,7 +30,7 @@ func TestRefInMap(t *testing.T) {
assert := assert.New(t)
m := NewMap()
r := NewTypedRefFromValue(m)
r := NewRef(m)
m = m.Set(Number(0), r).Set(r, Number(1))
r2 := m.Get(Number(0))
assert.True(r.Equals(r2))
@@ -43,7 +43,7 @@ func TestRefChunks(t *testing.T) {
assert := assert.New(t)
l := NewList()
r := NewTypedRefFromValue(l)
r := NewRef(l)
assert.Len(r.Chunks(), 1)
assert.Equal(r, r.Chunks()[0])
}

View File

@@ -238,10 +238,10 @@ func makeSetLeafChunkFn(t *Type, vr ValueReader) makeChunkFn {
if len(setData) > 0 {
indexValue = setData[len(setData)-1]
if !isSequenceOrderedByIndexedType(t) {
indexValue = NewTypedRefFromValue(indexValue)
indexValue = NewRef(indexValue)
}
}
return newMetaTuple(indexValue, set, NewTypedRefFromValue(set), uint64(len(items))), set
return newMetaTuple(indexValue, set, NewRef(set), uint64(len(items))), set
}
}

View File

@@ -673,7 +673,7 @@ func TestSetChunks(t *testing.T) {
c1 := l1.Chunks()
assert.Len(c1, 0)
l2 := NewSet(NewTypedRefFromValue(Number(0)))
l2 := NewSet(NewRef(Number(0)))
c2 := l2.Chunks()
assert.Len(c2, 1)
}

View File

@@ -32,7 +32,7 @@ func TestGenericStructChunks(t *testing.T) {
b := Bool(true)
data1 := structData{"r": NewTypedRefFromValue(b)}
data1 := structData{"r": NewRef(b)}
s1 := newStructFromData(data1, typ)
assert.Len(s1.Chunks(), 1)

View File

@@ -67,7 +67,7 @@ func (lvs *ValueStore) WriteValue(v Value) Ref {
c := EncodeValue(v, lvs)
d.Chk.False(c.IsEmpty())
hash := c.Ref()
r := NewTypedRef(MakeRefType(v.Type()), hash, maxChunkHeight(v)+1)
r := constructRef(MakeRefType(v.Type()), hash, maxChunkHeight(v)+1)
if lvs.isPresent(hash) {
return r
}

View File

@@ -77,7 +77,7 @@ func TestCheckChunksInCache(t *testing.T) {
cs.Put(EncodeValue(b, nil))
cvs.set(b.Ref(), hintedChunk{b.Type(), b.Ref()})
bref := NewTypedRefFromValue(b)
bref := NewRef(b)
assert.NotPanics(func() { cvs.checkChunksInCache(bref) })
}