go/store/datas: StoreRoot RefMap -> AddressMap.

This commit is contained in:
Aaron Son
2022-06-02 14:26:32 -07:00
parent debb94172b
commit bad143cbcb
9 changed files with 171 additions and 224 deletions

View File

@@ -47,24 +47,48 @@ func (rcv *StoreRoot) Table() flatbuffers.Table {
return rcv._tab
}
func (rcv *StoreRoot) Refs(obj *RefMap) *RefMap {
func (rcv *StoreRoot) AddressMap(j int) byte {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
x := rcv._tab.Indirect(o + rcv._tab.Pos)
if obj == nil {
obj = new(RefMap)
}
obj.Init(rcv._tab.Bytes, x)
return obj
a := rcv._tab.Vector(o)
return rcv._tab.GetByte(a + flatbuffers.UOffsetT(j*1))
}
return 0
}
func (rcv *StoreRoot) AddressMapLength() int {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
return rcv._tab.VectorLen(o)
}
return 0
}
func (rcv *StoreRoot) AddressMapBytes() []byte {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
return rcv._tab.ByteVector(o + rcv._tab.Pos)
}
return nil
}
func (rcv *StoreRoot) MutateAddressMap(j int, n byte) bool {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
a := rcv._tab.Vector(o)
return rcv._tab.MutateByte(a+flatbuffers.UOffsetT(j*1), n)
}
return false
}
func StoreRootStart(builder *flatbuffers.Builder) {
builder.StartObject(1)
}
func StoreRootAddRefs(builder *flatbuffers.Builder, refs flatbuffers.UOffsetT) {
builder.PrependUOffsetTSlot(0, flatbuffers.UOffsetT(refs), 0)
func StoreRootAddAddressMap(builder *flatbuffers.Builder, addressMap flatbuffers.UOffsetT) {
builder.PrependUOffsetTSlot(0, flatbuffers.UOffsetT(addressMap), 0)
}
func StoreRootStartAddressMapVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT {
return builder.StartVector(1, numElems, 1)
}
func StoreRootEnd(builder *flatbuffers.Builder) flatbuffers.UOffsetT {
return builder.EndObject()

View File

@@ -19,9 +19,7 @@ namespace serial;
// StoreRoot is the top-level chunk in the store,
// containing references to all named Refs.
table StoreRoot {
// map from ref name to ref hash
// with map root node inlined
refs:RefMap (required);
address_map:[ubyte]; // Embedded serialized AddressMap.
}
// KEEP THIS IN SYNC WITH fileidentifiers.go

View File

@@ -55,8 +55,8 @@ func (s *nomsRootTestSuite) TestBasic() {
goldenHello := "u8g2r4qg97kkqn42lvao77st2mv3bpl0\n"
goldenGoodbye := "70b9adi6amrab3a5t4hcibdob0cq49m0\n"
if types.Format_Default == types.Format_DOLT_DEV {
goldenHello = "bu6q8qir2vfq6lliqrko4jqls0rjga0h\n"
goldenGoodbye = "79a0kfbq40fl8s359e8ssitevt76jsnv\n"
goldenHello = "39s2lgmuevanphocgt4q25j196ghbh7u\n"
goldenGoodbye = "pvp4h0callkbs9lte1fqe9isirpmjtfs\n"
}
ds, _ = datas.CommitValue(context.Background(), db, ds, types.String("hello!"))

View File

@@ -35,7 +35,7 @@ import (
"github.com/dolthub/dolt/go/store/cmd/noms/util"
"github.com/dolthub/dolt/go/store/config"
"github.com/dolthub/dolt/go/store/hash"
"github.com/dolthub/dolt/go/store/prolly"
"github.com/dolthub/dolt/go/store/prolly/shim"
"github.com/dolthub/dolt/go/store/prolly/tree"
"github.com/dolthub/dolt/go/store/types"
"github.com/dolthub/dolt/go/store/util/datetime"
@@ -179,7 +179,7 @@ func outputType(value interface{}) {
func outputEncodedValue(ctx context.Context, w io.Writer, value interface{}) error {
switch value := value.(type) {
case types.TupleRowStorage:
node := prolly.NodeFromValue(value)
node := shim.NodeFromValue(value)
return tree.OutputProllyNode(w, node)
case tree.Node:
return tree.OutputProllyNode(w, value)
@@ -216,7 +216,7 @@ func outputEncodedValue(ctx context.Context, w io.Writer, value interface{}) err
return nil
case serial.ProllyTreeNodeFileID:
node := prolly.NodeFromValue(value)
node := shim.NodeFromValue(value)
return tree.OutputProllyNode(w, node)
default:
return types.WriteEncodedValue(ctx, w, value)

View File

@@ -28,6 +28,8 @@ import (
"github.com/dolthub/dolt/go/store/chunks"
"github.com/dolthub/dolt/go/store/hash"
"github.com/dolthub/dolt/go/store/prolly"
"github.com/dolthub/dolt/go/store/prolly/tree"
"github.com/dolthub/dolt/go/store/types"
)
@@ -87,17 +89,17 @@ func (db *database) loadDatasetsNomsMap(ctx context.Context, rootHash hash.Hash)
return val.(types.Map), nil
}
func (db *database) loadDatasetsRefmap(ctx context.Context, rootHash hash.Hash) (refmap, error) {
func (db *database) loadDatasetsRefmap(ctx context.Context, rootHash hash.Hash) (prolly.AddressMap, error) {
if rootHash == (hash.Hash{}) {
return empty_refmap(), nil
return prolly.NewEmptyAddressMap(tree.NewNodeStore(db.chunkStore())), nil
}
val, err := db.ReadValue(ctx, rootHash)
if err != nil {
return refmap{}, err
return prolly.AddressMap{}, err
}
return parse_storeroot([]byte(val.(types.SerialMessage))), nil
return parse_storeroot([]byte(val.(types.SerialMessage)), db.chunkStore()), nil
}
func getParentsClosure(ctx context.Context, vrw types.ValueReadWriter, parentRefsL types.List) (types.Ref, bool, error) {
@@ -240,23 +242,15 @@ func getParentsClosure(ctx context.Context, vrw types.ValueReadWriter, parentRef
}
type refmapDatasetsMap struct {
rm refmap
am prolly.AddressMap
}
func (m refmapDatasetsMap) Len() uint64 {
return m.rm.len()
return uint64(m.am.Count())
}
func (m refmapDatasetsMap) IterAll(ctx context.Context, cb func(string, hash.Hash) error) error {
addrs := m.rm.RefMap.RefArrayBytes()
for i := 0; i < m.rm.RefMap.NamesLength(); i++ {
name := string(m.rm.RefMap.Names(i))
addr := hash.New(addrs[i*20 : i*20+20])
if err := cb(name, addr); err != nil {
return err
}
}
return nil
return m.am.IterAll(ctx, cb)
}
type nomsDatasetsMap struct {
@@ -332,7 +326,10 @@ func (db *database) datasetFromMap(ctx context.Context, datasetID string, dsmap
return newDataset(db, datasetID, head, headAddr)
} else if rmdsmap, ok := dsmap.(refmapDatasetsMap); ok {
var err error
curr := rmdsmap.rm.lookup(datasetID)
curr, err := rmdsmap.am.Get(ctx, datasetID)
if err != nil {
return Dataset{}, err
}
var head types.Value
if !curr.IsEmpty() {
head, err = db.ReadValue(ctx, curr)
@@ -436,23 +433,31 @@ func (db *database) doSetHead(ctx context.Context, ds Dataset, addr hash.Hash) e
}
return datasets.Edit().Set(key, ref).Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
curr := rm.lookup(ds.ID())
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
curr, err := am.Get(ctx, ds.ID())
if err != nil {
return prolly.AddressMap{}, err
}
if curr != (hash.Hash{}) {
currHead, err := db.readHead(ctx, curr)
if err != nil {
return refmap{}, err
return prolly.AddressMap{}, err
}
currType := currHead.TypeName()
if currType != headType {
return refmap{}, fmt.Errorf("cannot change type of head; currently points at %s but new value would point at %s", currType, headType)
return prolly.AddressMap{}, fmt.Errorf("cannot change type of head; currently points at %s but new value would point at %s", currType, headType)
}
}
h, err := newVal.Hash(db.Format())
if err != nil {
return refmap{}, err
return prolly.AddressMap{}, err
}
return rm.set(ds.ID(), h), nil
ae := am.Editor()
err = ae.Update(ctx, ds.ID(), h)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
}
@@ -567,21 +572,29 @@ func (db *database) doCommit(ctx context.Context, datasetID string, datasetCurre
}
return datasets.Edit().Set(types.String(datasetID), newCommitValueRef).Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
curr := rm.lookup(datasetID)
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
curr, err := am.Get(ctx, datasetID)
if err != nil {
return prolly.AddressMap{}, err
}
if curr != datasetCurrentAddr {
return refmap{}, ErrMergeNeeded
return prolly.AddressMap{}, ErrMergeNeeded
}
h, err := newCommitValue.Hash(db.Format())
if err != nil {
return refmap{}, err
return prolly.AddressMap{}, err
}
if curr != (hash.Hash{}) {
if curr == h {
return refmap{}, ErrAlreadyCommitted
return prolly.AddressMap{}, ErrAlreadyCommitted
}
}
return rm.set(datasetID, h), nil
ae := am.Editor()
err = ae.Update(ctx, datasetID, h)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
}
@@ -616,12 +629,20 @@ func (db *database) doTag(ctx context.Context, datasetID string, tagAddr hash.Ha
}
return datasets.Edit().Set(types.String(datasetID), tagRef).Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
curr := rm.lookup(datasetID)
if curr != (hash.Hash{}) {
return refmap{}, fmt.Errorf("tag %s already exists and cannot be altered after creation", datasetID)
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
curr, err := am.Get(ctx, datasetID)
if err != nil {
return prolly.AddressMap{}, err
}
return rm.set(datasetID, tagAddr), nil
if curr != (hash.Hash{}) {
return prolly.AddressMap{}, fmt.Errorf("tag %s already exists and cannot be altered after creation", datasetID)
}
ae := am.Editor()
err = ae.Update(ctx, datasetID, tagAddr)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
}
@@ -655,12 +676,20 @@ func (db *database) doUpdateWorkingSet(ctx context.Context, datasetID string, ad
}
return datasets.Edit().Set(types.String(datasetID), ref).Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
curr := rm.lookup(datasetID)
if curr != currHash {
return refmap{}, ErrOptimisticLockFailed
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
curr, err := am.Get(ctx, datasetID)
if err != nil {
return prolly.AddressMap{}, err
}
return rm.set(datasetID, addr), nil
if curr != currHash {
return prolly.AddressMap{}, ErrOptimisticLockFailed
}
ae := am.Editor()
err = ae.Update(ctx, datasetID, addr)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
}
@@ -739,20 +768,31 @@ func (db *database) CommitWithWorkingSet(
Set(types.String(workingSetDS.ID()), wsValRef).
Set(types.String(commitDS.ID()), commitValRef).
Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
currWS := rm.lookup(workingSetDS.ID())
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
currWS, err := am.Get(ctx, workingSetDS.ID())
if err != nil {
return prolly.AddressMap{}, err
}
if currWS != prevWsHash {
return refmap{}, ErrOptimisticLockFailed
return prolly.AddressMap{}, ErrOptimisticLockFailed
}
currDS, err := am.Get(ctx, commitDS.ID())
if err != nil {
return prolly.AddressMap{}, err
}
currDS := rm.lookup(commitDS.ID())
if currDS != currDSHash {
return refmap{}, ErrMergeNeeded
return prolly.AddressMap{}, ErrMergeNeeded
}
rm = rm.edit([]RefMapEdit{
{commitDS.ID(), commitValRef.TargetHash()},
{workingSetDS.ID(), wsAddr},
})
return rm, nil
ae := am.Editor()
err = ae.Update(ctx, commitDS.ID(), commitValRef.TargetHash())
if err != nil {
return prolly.AddressMap{}, err
}
err = ae.Update(ctx, workingSetDS.ID(), wsAddr)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
if err != nil {
@@ -783,7 +823,7 @@ func (db *database) Delete(ctx context.Context, ds Dataset) (Dataset, error) {
func (db *database) update(ctx context.Context,
edit func(context.Context, types.Map) (types.Map, error),
editFB func(context.Context, refmap) (refmap, error)) error {
editFB func(context.Context, prolly.AddressMap) (prolly.AddressMap, error)) error {
var (
err error
root hash.Hash
@@ -808,7 +848,7 @@ func (db *database) update(ctx context.Context,
return err
}
data := datasets.storeroot_flatbuffer()
data := storeroot_flatbuffer(datasets)
r, err := db.WriteValue(ctx, types.SerialMessage(data))
if err != nil {
return err
@@ -862,15 +902,23 @@ func (db *database) doDelete(ctx context.Context, datasetIDstr string) error {
return types.Map{}, ErrMergeNeeded
}
return datasets.Edit().Remove(datasetID).Map(ctx)
}, func(ctx context.Context, rm refmap) (refmap, error) {
curr := rm.lookup(datasetIDstr)
}, func(ctx context.Context, am prolly.AddressMap) (prolly.AddressMap, error) {
curr, err := am.Get(ctx, datasetIDstr)
if err != nil {
return prolly.AddressMap{}, err
}
if curr != (hash.Hash{}) && firstHash == (hash.Hash{}) {
firstHash = curr
}
if curr != firstHash {
return refmap{}, ErrMergeNeeded
return prolly.AddressMap{}, ErrMergeNeeded
}
return rm.delete(datasetIDstr), nil
ae := am.Editor()
err = ae.Delete(ctx, datasetIDstr)
if err != nil {
return prolly.AddressMap{}, err
}
return ae.Flush(ctx)
})
}

View File

@@ -21,7 +21,11 @@ import (
flatbuffers "github.com/google/flatbuffers/go"
"github.com/dolthub/dolt/go/gen/fb/serial"
"github.com/dolthub/dolt/go/store/chunks"
"github.com/dolthub/dolt/go/store/hash"
"github.com/dolthub/dolt/go/store/prolly"
"github.com/dolthub/dolt/go/store/prolly/tree"
"github.com/dolthub/dolt/go/store/types"
)
type RefMapEdit struct {
@@ -117,7 +121,6 @@ func RefMapApplyEdits(rm *serial.RefMap, builder *flatbuffers.Builder, edits []R
}
start = stop - hashessz
refarrayoff := builder.CreateByteVector(builder.Bytes[start:stop])
serial.RefMapStart(builder)
serial.RefMapAddNames(builder, namesoff)
serial.RefMapAddRefArray(builder, refarrayoff)
@@ -126,63 +129,26 @@ func RefMapApplyEdits(rm *serial.RefMap, builder *flatbuffers.Builder, edits []R
return serial.RefMapEnd(builder)
}
type refmap struct {
*serial.RefMap
}
func empty_refmap() refmap {
builder := flatbuffers.NewBuilder(24)
serial.RefMapStart(builder)
builder.Finish(serial.RefMapEnd(builder))
return refmap{serial.GetRootAsRefMap(builder.FinishedBytes(), 0)}
}
func (rm refmap) len() uint64 {
return uint64(rm.RefMap.NamesLength())
}
func (rm refmap) edit(edits []RefMapEdit) refmap {
func storeroot_flatbuffer(am prolly.AddressMap) []byte {
builder := flatbuffers.NewBuilder(1024)
builder.Finish(RefMapApplyEdits(rm.RefMap, builder, edits))
return refmap{serial.GetRootAsRefMap(builder.FinishedBytes(), 0)}
}
func (rm refmap) lookup(key string) hash.Hash {
return RefMapLookup(rm.RefMap, key)
}
func (rm refmap) set(key string, addr hash.Hash) refmap {
return rm.edit([]RefMapEdit{{key, addr}})
}
func (rm *refmap) delete(key string) refmap {
return rm.edit([]RefMapEdit{{key, hash.Hash{}}})
}
func (rm refmap) storeroot_flatbuffer() []byte {
builder := flatbuffers.NewBuilder(1024)
refmap := RefMapApplyEdits(rm.RefMap, builder, []RefMapEdit{})
ambytes := []byte(tree.ValueFromNode(am.Node()).(types.TupleRowStorage))
builder.Prep(flatbuffers.SizeUOffsetT, len(ambytes))
stop := int(builder.Head())
start := stop - len(ambytes)
copy(builder.Bytes[start:stop], ambytes)
voff := builder.CreateByteVector(builder.Bytes[start:stop])
serial.StoreRootStart(builder)
serial.StoreRootAddRefs(builder, refmap)
serial.StoreRootAddAddressMap(builder, voff)
builder.FinishWithFileIdentifier(serial.StoreRootEnd(builder), []byte(serial.StoreRootFileID))
return builder.FinishedBytes()
}
func parse_storeroot(bs []byte) refmap {
func parse_storeroot(bs []byte, cs chunks.ChunkStore) prolly.AddressMap {
if !bytes.Equal([]byte(serial.StoreRootFileID), bs[4:8]) {
panic("expected store root file id, got: " + string(bs[4:8]))
}
sr := serial.GetRootAsStoreRoot(bs, 0)
rm := sr.Refs(nil)
if rm == nil {
panic("refmap of storeroot was missing")
}
if rm.TreeLevel() != 0 {
panic("unsupported multi-level refmap")
}
if uint64(rm.NamesLength()) != rm.TreeCount() {
panic("inconsistent refmap at level 0 where names length != tree count")
}
return refmap{rm}
mapbytes := sr.AddressMapBytes()
node := tree.NodeFromBytes(mapbytes)
return prolly.NewAddressMap(node, tree.NewNodeStore(cs))
}

View File

@@ -1,81 +0,0 @@
// Copyright 2022 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package datas
import (
"testing"
flatbuffers "github.com/google/flatbuffers/go"
"github.com/stretchr/testify/assert"
"github.com/dolthub/dolt/go/gen/fb/serial"
"github.com/dolthub/dolt/go/store/hash"
)
func TestRefMapLookupEmpty(t *testing.T) {
rm := empty_refmap().RefMap
assert.Equal(t, rm.NamesLength(), 0)
assert.Equal(t, RefMapLookup(rm, ""), hash.Hash{})
assert.Equal(t, RefMapLookup(rm, "doesnotexist"), hash.Hash{})
}
func edit_refmap(rm *serial.RefMap, edits []RefMapEdit) *serial.RefMap {
builder := flatbuffers.NewBuilder(1024)
builder.Finish(RefMapApplyEdits(rm, builder, edits))
return serial.GetRootAsRefMap(builder.FinishedBytes(), 0)
}
func TestRefMapEditInserts(t *testing.T) {
empty := empty_refmap().RefMap
a_hash := hash.Parse("3i50gcjrl9m2pgolrkc22kq46sj4p96o")
with_a := edit_refmap(empty, []RefMapEdit{RefMapEdit{"a", a_hash}})
assert.Equal(t, RefMapLookup(with_a, "a"), a_hash)
assert.Equal(t, RefMapLookup(with_a, "A"), hash.Hash{})
assert.Equal(t, RefMapLookup(empty, "a"), hash.Hash{})
b_hash := hash.Parse("7mm15d7prjlurr8g4u51n7dfg6bemt7p")
with_ab_from_a := edit_refmap(with_a, []RefMapEdit{RefMapEdit{"b", b_hash}})
with_ab_from_empty := edit_refmap(empty, []RefMapEdit{RefMapEdit{"b", b_hash}, RefMapEdit{"a", a_hash}})
assert.Equal(t, with_ab_from_a.Table().Bytes, with_ab_from_empty.Table().Bytes)
assert.Equal(t, RefMapLookup(with_ab_from_a, "a"), a_hash)
assert.Equal(t, RefMapLookup(with_ab_from_a, "b"), b_hash)
assert.Equal(t, RefMapLookup(with_ab_from_a, "c"), hash.Hash{})
assert.Equal(t, RefMapLookup(with_ab_from_a, "A"), hash.Hash{})
}
func TestRefMapEditDeletes(t *testing.T) {
empty := empty_refmap().RefMap
a_hash := hash.Parse("3i50gcjrl9m2pgolrkc22kq46sj4p96o")
b_hash := hash.Parse("7mm15d7prjlurr8g4u51n7dfg6bemt7p")
with_ab := edit_refmap(empty, []RefMapEdit{RefMapEdit{"b", b_hash}, RefMapEdit{"a", a_hash}})
without_a := edit_refmap(with_ab, []RefMapEdit{{Name: "a"}})
assert.Equal(t, RefMapLookup(without_a, "a"), hash.Hash{})
assert.Equal(t, RefMapLookup(without_a, "b"), b_hash)
without_ab := edit_refmap(without_a, []RefMapEdit{{Name: "b"}})
assert.Equal(t, without_ab.NamesLength(), 0)
assert.Equal(t, without_ab.RefArrayLength(), 0)
assert.Equal(t, RefMapLookup(without_ab, "a"), hash.Hash{})
assert.Equal(t, RefMapLookup(without_ab, "b"), hash.Hash{})
assert.Equal(t, empty.Table().Bytes, without_ab.Table().Bytes)
with_b := edit_refmap(empty, []RefMapEdit{RefMapEdit{"b", b_hash}})
assert.Equal(t, without_a.Table().Bytes, with_b.Table().Bytes)
delete_from_empty := edit_refmap(empty, []RefMapEdit{RefMapEdit{Name: "b"}})
assert.Equal(t, delete_from_empty.Table().Bytes, empty.Table().Bytes)
}

View File

@@ -30,9 +30,13 @@ type AddressMap struct {
}
func NewEmptyAddressMap(ns tree.NodeStore) AddressMap {
return NewAddressMap(newEmptyMapNode(ns.Pool()), ns)
}
func NewAddressMap(node tree.Node, ns tree.NodeStore) AddressMap {
return AddressMap{
addresses: orderedTree[stringSlice, address, lexicographic]{
root: newEmptyMapNode(ns.Pool()),
root: node,
ns: ns,
order: lexicographic{},
},
@@ -59,6 +63,10 @@ func (c AddressMap) Height() int {
return c.addresses.height()
}
func (c AddressMap) Node() tree.Node {
return c.addresses.root
}
func (c AddressMap) HashOf() hash.Hash {
return c.addresses.hashOf()
}

View File

@@ -56,15 +56,8 @@ func (sm SerialMessage) HumanReadableString() string {
case serial.StoreRootFileID:
msg := serial.GetRootAsStoreRoot([]byte(sm), 0)
ret := &strings.Builder{}
refs := msg.Refs(nil)
fmt.Fprintf(ret, "{\n")
hashes := refs.RefArrayBytes()
for i := 0; i < refs.NamesLength(); i++ {
name := refs.Names(i)
addr := hash.New(hashes[i*20 : (i+1)*20])
fmt.Fprintf(ret, "\t%s: #%s\n", name, addr.String())
}
fmt.Fprintf(ret, "}")
mapbytes := msg.AddressMapBytes()
fmt.Fprintf(ret, "StoreRoot{%s}", TupleRowStorage(mapbytes).HumanReadableString())
return ret.String()
case serial.TagFileID:
return "Tag"
@@ -175,18 +168,9 @@ func (sm SerialMessage) walkRefs(nbf *NomsBinFormat, cb RefCallback) error {
switch serial.GetFileID([]byte(sm)) {
case serial.StoreRootFileID:
msg := serial.GetRootAsStoreRoot([]byte(sm), 0)
rm := msg.Refs(nil)
refs := rm.RefArrayBytes()
for i := 0; i < rm.NamesLength(); i++ {
off := i * 20
addr := hash.New(refs[off : off+20])
r, err := constructRef(nbf, addr, PrimitiveTypeMap[ValueKind], SerialMessageRefHeight)
if err != nil {
return err
}
if err = cb(r); err != nil {
return err
}
if msg.AddressMapLength() > 0 {
mapbytes := msg.AddressMapBytes()
return TupleRowStorage(mapbytes).walkRefs(nbf, cb)
}
case serial.TagFileID:
msg := serial.GetRootAsTag([]byte(sm), 0)