diff --git a/newset/README.md b/newset/README.md new file mode 100644 index 0000000000..c2684483e3 --- /dev/null +++ b/newset/README.md @@ -0,0 +1,3 @@ +This is a work-in-progress implementation of a multi-tiered chunked set. + +It's not used in noms, but the idea is to gradually make it efficient, then implement all set operations on it, then replace the noms set implementation. diff --git a/newset/buz_chunker.go b/newset/buz_chunker.go new file mode 100644 index 0000000000..e53adfc574 --- /dev/null +++ b/newset/buz_chunker.go @@ -0,0 +1,35 @@ +package newset + +import ( + "github.com/attic-labs/noms/Godeps/_workspace/src/github.com/kch42/buzhash" + "github.com/attic-labs/noms/ref" +) + +const ( + buzPattern = uint32(1<<6 - 1) // Average size of 64 elements +) + +type buzChunker struct { + h *buzhash.BuzHash +} + +func newBuzChunker() *buzChunker { + return &buzChunker{newBuzHash()} +} + +func (c *buzChunker) Add(r ref.Ref) bool { + c.h.Write(r.DigestSlice()) + isBoundary := c.h.Sum32()&buzPattern == buzPattern + if isBoundary { + c.h = newBuzHash() + } + return isBoundary +} + +func (c *buzChunker) New() Chunker { + return newBuzChunker() +} + +func newBuzHash() *buzhash.BuzHash { + return buzhash.NewBuzHash(uint32(8 * ref.NewHash().BlockSize())) +} diff --git a/newset/buz_chunker_test.go b/newset/buz_chunker_test.go new file mode 100644 index 0000000000..d48bfd1e4c --- /dev/null +++ b/newset/buz_chunker_test.go @@ -0,0 +1,35 @@ +package newset + +import ( + "testing" + + "github.com/attic-labs/noms/Godeps/_workspace/src/github.com/stretchr/testify/assert" + "github.com/attic-labs/noms/ref" + "github.com/attic-labs/noms/types" +) + +func TestNumMatches(t *testing.T) { + assert := assert.New(t) + chunker := newBuzChunker() + + numMatches := 0 + for i := 0; i < 1000; i++ { + if chunker.Add(getRef(i)) { + numMatches++ + } + } + + // 20 was experimentally determined by calling Add 1000 times. + assert.Equal(20, numMatches) +} + +func TestThing(t *testing.T) { + assert := assert.New(t) + // This ref has been experimentally determined to be immediately chunked. + r := ref.Parse("sha1-00000000000000000000000000000000000f422f") + assert.True(newBuzChunker().Add(r)) +} + +func getRef(i int) ref.Ref { + return types.Int32(i).Ref() +} diff --git a/newset/chunked_set.go b/newset/chunked_set.go new file mode 100644 index 0000000000..2acf66e271 --- /dev/null +++ b/newset/chunked_set.go @@ -0,0 +1,75 @@ +package newset + +import ( + "fmt" + "sort" + "strings" + + "github.com/attic-labs/noms/ref" +) + +type chunkedSet struct { + children entrySlice // sorted +} + +type chunkedSetEntry struct { + start ref.Ref + set Set +} + +type entrySlice []chunkedSetEntry + +func (es entrySlice) Len() int { + return len(es) +} + +func (es entrySlice) Less(i, j int) bool { + return ref.Less(es[i].start, es[j].start) +} + +func (es entrySlice) Swap(i, j int) { + es[i], es[j] = es[j], es[i] +} + +func (set chunkedSet) Len() (length uint64) { + for _, entry := range set.children { + length += entry.set.Len() + } + return +} + +func (set chunkedSet) first() ref.Ref { + return set.children[0].start +} + +func (set chunkedSet) Has(r ref.Ref) bool { + searchIndex := sort.Search(len(set.children), func(i int) bool { + return ref.Greater(set.children[i].start, r) + }) + if searchIndex == 0 { + return false + } + searchIndex-- + return set.children[searchIndex].set.Has(r) +} + +func (set chunkedSet) Ref() ref.Ref { + // Eventually when chunked sets use noms Values this will need to be derived from the serialization of a chunked set, not simply a hash of all items' refs. + h := ref.NewHash() + for _, entry := range set.children { + h.Write(entry.set.Ref().DigestSlice()) + } + return ref.FromHash(h) +} + +func (set chunkedSet) fmt(indent int) string { + indentStr := strings.Repeat(" ", indent) + if len(set.children) == 0 { + return fmt.Sprintf("%s(empty chunked set)", indentStr) + } + s := fmt.Sprintf("%s(chunked with %d chunks)\n", indentStr, len(set.children)) + for i, entry := range set.children { + s += fmt.Sprintf("%schunk %d (start %s)\n%s\n", indentStr, i, fmtRef(entry.start), entry.set.fmt(indent+4)) + } + return s +} diff --git a/newset/chunker.go b/newset/chunker.go new file mode 100644 index 0000000000..bc38d8c996 --- /dev/null +++ b/newset/chunker.go @@ -0,0 +1,10 @@ +package newset + +import "github.com/attic-labs/noms/ref" + +type Chunker interface { + // Adds a ref to the chunker, and returns whether it results in a chunk boundary. + Add(r ref.Ref) bool + // Returns a new instance of this chunker's type. This is really a factory method hiding on an instance which is a bit icky. + New() Chunker +} diff --git a/newset/flat_set.go b/newset/flat_set.go new file mode 100644 index 0000000000..f1799bd7d9 --- /dev/null +++ b/newset/flat_set.go @@ -0,0 +1,58 @@ +package newset + +import ( + "fmt" + "sort" + "strings" + + "github.com/attic-labs/noms/ref" +) + +type flatSet struct { + d ref.RefSlice // sorted + r *ref.Ref +} + +func (s flatSet) Len() uint64 { + return uint64(len(s.d)) +} + +func (s flatSet) Has(r ref.Ref) bool { + idx := s.searchForIndex(r) + return idx != len(s.d) && s.d[idx] == r +} + +func (s flatSet) first() ref.Ref { + return s.d[0] +} + +func (s flatSet) Ref() ref.Ref { + if s.r == nil { + h := ref.NewHash() + for _, r := range s.d { + h.Write(r.DigestSlice()) + } + r := ref.FromHash(h) + s.r = &r + } + return *s.r +} + +func (s flatSet) fmt(indent int) string { + indentStr := strings.Repeat(" ", indent) + if len(s.d) == 1 { + return fmt.Sprintf("%sflat %s", indentStr, fmtRef(s.d[0])) + } + return fmt.Sprintf("%sflat{%s...(%d more)...%s}", indentStr, fmtRef(s.d[0]), len(s.d)-2, fmtRef(s.d[len(s.d)-1])) +} + +func (s flatSet) searchForIndex(r ref.Ref) int { + return sort.Search(len(s.d), func(i int) bool { + return !ref.Less(s.d[i], r) + }) +} + +func fmtRef(r ref.Ref) string { + str := r.String() + return str[len(str)-8:] +} diff --git a/newset/referrator.go b/newset/referrator.go new file mode 100644 index 0000000000..954e4b920e --- /dev/null +++ b/newset/referrator.go @@ -0,0 +1,28 @@ +package newset + +import ( + "math/big" + + "github.com/attic-labs/noms/ref" +) + +// Generates fake ascending ref.Ref-s. +type referrator struct { + count *big.Int +} + +func newReferrator() referrator { + return referrator{big.NewInt(int64(0))} +} + +func (r referrator) Next() ref.Ref { + digest := ref.Sha1Digest{} + bytes := r.count.Bytes() + for i := 0; i < len(bytes); i++ { + digest[len(digest)-i-1] = bytes[len(bytes)-i-1] + } + + result := ref.New(digest) + r.count.Add(r.count, big.NewInt(int64(1))) + return result +} diff --git a/newset/referrator_test.go b/newset/referrator_test.go new file mode 100644 index 0000000000..293d3b2d19 --- /dev/null +++ b/newset/referrator_test.go @@ -0,0 +1,19 @@ +package newset + +import ( + "testing" + + "github.com/attic-labs/noms/Godeps/_workspace/src/github.com/stretchr/testify/assert" +) + +func TestReferrator(t *testing.T) { + assert := assert.New(t) + + ator := newReferrator() + assert.Equal("sha1-0000000000000000000000000000000000000000", ator.Next().String()) + assert.Equal("sha1-0000000000000000000000000000000000000001", ator.Next().String()) + for i := 0; i < 510; i++ { + ator.Next() + } + assert.Equal("sha1-0000000000000000000000000000000000000200", ator.Next().String()) +} diff --git a/newset/set.go b/newset/set.go new file mode 100644 index 0000000000..66802f97bc --- /dev/null +++ b/newset/set.go @@ -0,0 +1,13 @@ +package newset + +import ( + "github.com/attic-labs/noms/ref" +) + +type Set interface { + first() ref.Ref + Len() uint64 + Has(r ref.Ref) bool + Ref() ref.Ref + fmt(indent int) string +} diff --git a/newset/set_builder.go b/newset/set_builder.go new file mode 100644 index 0000000000..0d9f6e2dc9 --- /dev/null +++ b/newset/set_builder.go @@ -0,0 +1,87 @@ +package newset + +import ( + "github.com/attic-labs/noms/d" + "github.com/attic-labs/noms/ref" +) + +// This file is a giant copy-paste, but the architecture of chunking will likely be written in terms of iteration, so deal with it then. +type SetBuilder interface { + AddItem(r ref.Ref) + Build() Set +} + +type leafSetBuilder struct { + current flatSet + chunks []flatSet + chunker Chunker +} + +func NewSetBuilder() SetBuilder { + return NewSetBuilderWithChunker(newBuzChunker()) +} + +func NewSetBuilderWithChunker(chunker Chunker) SetBuilder { + return &leafSetBuilder{chunker: chunker} +} + +func (builder *leafSetBuilder) AddItem(r ref.Ref) { + builder.current.d = append(builder.current.d, r) + if builder.chunker.Add(r) { + builder.chunks = append(builder.chunks, builder.current) + builder.current = flatSet{} + } +} + +func (builder *leafSetBuilder) Build() Set { + if builder.current.Len() > uint64(0) { + builder.chunks = append(builder.chunks, builder.current) + } + + if len(builder.chunks) == 1 { + d.Chk.NotEqual(0, builder.chunks[0].Len()) + return builder.chunks[0] + } + + mcb := newMetaChunkBuilder(builder.chunker.New()) + for _, c := range builder.chunks { + mcb.AddItem(c) + } + + return mcb.Build() +} + +type chunkedSetBuilder struct { + current chunkedSet + sets []chunkedSet + chunker Chunker +} + +func newMetaChunkBuilder(chunker Chunker) chunkedSetBuilder { + return chunkedSetBuilder{chunker: chunker} +} + +func (mcb *chunkedSetBuilder) AddItem(s Set) { + mcb.current.children = append(mcb.current.children, chunkedSetEntry{s.first(), s}) + if mcb.chunker.Add(s.Ref()) { + mcb.sets = append(mcb.sets, mcb.current) + mcb.current = chunkedSet{} + } +} + +func (mcb *chunkedSetBuilder) Build() chunkedSet { + if mcb.current.Len() > 0 { + mcb.sets = append(mcb.sets, mcb.current) + } + + if len(mcb.sets) == 1 { + d.Chk.NotEqual(0, mcb.sets[0].Len()) + return mcb.sets[0] + } + + b := newMetaChunkBuilder(mcb.chunker.New()) + for _, s := range mcb.sets { + b.AddItem(s) + } + return b.Build() +} diff --git a/newset/set_builder_test.go b/newset/set_builder_test.go new file mode 100644 index 0000000000..eb10398128 --- /dev/null +++ b/newset/set_builder_test.go @@ -0,0 +1,128 @@ +package newset + +import ( + "testing" + + "github.com/attic-labs/noms/Godeps/_workspace/src/github.com/stretchr/testify/assert" + "github.com/attic-labs/noms/ref" +) + +// A Chunker that always produces chunks of the same size. +type identicalSizeChunker struct { + chunkSize int + cursor int +} + +func newIdenticalSizeChunker(chunkSize int) *identicalSizeChunker { + return &identicalSizeChunker{chunkSize: chunkSize} +} + +func (chunker *identicalSizeChunker) Add(r ref.Ref) bool { + if chunker.cursor == chunker.chunkSize-1 { + chunker.cursor = 0 + return true + } + chunker.cursor++ + return false +} + +func (chunker *identicalSizeChunker) New() Chunker { + return newIdenticalSizeChunker(chunker.chunkSize) +} + +func TestIdenticalSizeChunker(t *testing.T) { + assert := assert.New(t) + r := ref.Ref{} + + chunker := newIdenticalSizeChunker(1) + assert.True(chunker.Add(r)) + assert.True(chunker.Add(r)) + + chunker = newIdenticalSizeChunker(3) + assert.False(chunker.Add(r)) + assert.False(chunker.Add(r)) + assert.True(chunker.Add(r)) + assert.False(chunker.Add(r)) + assert.False(chunker.Add(r)) + assert.True(chunker.Add(r)) +} + +func TestChunkSize3Depth4(t *testing.T) { + assert := assert.New(t) + + // Define the chunk size as 3 items, and aim for a tree with 4 layers of chunking (root, 3 chunk layers, leaves), implying we need to add 3^4 = 81 items. + chunkSize := 3 + + sb := NewSetBuilderWithChunker(newIdenticalSizeChunker(chunkSize)) + refs, ator := addRefs(&sb, 81) + set := sb.Build() + + for _, r := range refs { + assert.True(set.Has(r)) + } + assert.False(set.Has(ator.Next())) + + // Test each level by hand. This could be factored nicely, but nice factoring might have bugs. It's clear this way we're testing the right thing. + + // Top layer is the first chunked layer. + first := set.(chunkedSet) + assert.Equal(uint64(81), first.Len()) + assert.Equal(refs[0], first.first()) + assert.Equal(chunkSize, len(first.children)) + + for i := 0; i < chunkSize; i++ { + // Second chunked layer: + second := first.children[i].set.(chunkedSet) + assert.Equal(uint64(27), second.Len()) + assert.Equal(chunkSize, len(second.children)) + assert.Equal(refs[27*i], second.first()) + + for j := 0; j < chunkSize; j++ { + // Third chunked layer: + third := second.children[j].set.(chunkedSet) + assert.Equal(uint64(9), third.Len()) + assert.Equal(chunkSize, len(third.children)) + assert.Equal(refs[27*i+9*j], third.first()) + + for k := 0; k < chunkSize; k++ { + // Fourth layer are the leaf nodes. + fourth := third.children[k].set.(flatSet) + assert.Equal(uint64(3), fourth.Len()) + assert.Equal(chunkSize, len(fourth.d)) + assert.Equal(refs[27*i+9*j+3*k], fourth.first()) + // Lastly, check the individual values of the leaf nodes. + for m := 0; m < chunkSize; m++ { + assert.Equal(refs[27*i+9*j+3*k+m], fourth.d[m]) + } + } + } + } +} + +func TestRealData(t *testing.T) { + assert := assert.New(t) + + sb := NewSetBuilder() + refs, ator := addRefs(&sb, 5000) + set := sb.Build() + + for _, r := range refs { + assert.True(set.Has(r)) + } + assert.False(set.Has(ator.Next())) + + expected := "(chunked with 85 chunks)\nchunk 0 (start 00000000)\n flat{00000000...(10 more)...0000000b}\nchunk 1 (start 0000000c)\n flat{0000000c...(185 more)...000000c6}\nchunk 2 (start 000000c7)\n flat{000000c7...(11 more)...000000d3}\nchunk 3 (start 000000d4)\n flat{000000d4...(34 more)...000000f7}\nchunk 4 (start 000000f8)\n flat{000000f8...(37 more)...0000011e}\nchunk 5 (start 0000011f)\n flat{0000011f...(19 more)...00000133}\nchunk 6 (start 00000134)\n flat{00000134...(15 more)...00000144}\nchunk 7 (start 00000145)\n flat{00000145...(86 more)...0000019c}\nchunk 8 (start 0000019d)\n flat{0000019d...(179 more)...00000251}\nchunk 9 (start 00000252)\n flat{00000252...(63 more)...00000292}\nchunk 10 (start 00000293)\n flat{00000293...(102 more)...000002fa}\nchunk 11 (start 000002fb)\n flat{000002fb...(30 more)...0000031a}\nchunk 12 (start 0000031b)\n flat{0000031b...(5 more)...00000321}\nchunk 13 (start 00000322)\n flat{00000322...(32 more)...00000343}\nchunk 14 (start 00000344)\n flat{00000344...(143 more)...000003d4}\nchunk 15 (start 000003d5)\n flat{000003d5...(16 more)...000003e6}\nchunk 16 (start 000003e7)\n flat{000003e7...(12 more)...000003f4}\nchunk 17 (start 000003f5)\n flat{000003f5...(184 more)...000004ae}\nchunk 18 (start 000004af)\n flat{000004af...(11 more)...000004bb}\nchunk 19 (start 000004bc)\n flat{000004bc...(9 more)...000004c6}\nchunk 20 (start 000004c7)\n flat{000004c7...(48 more)...000004f8}\nchunk 21 (start 000004f9)\n flat{000004f9...(52 more)...0000052e}\nchunk 22 (start 0000052f)\n flat{0000052f...(20 more)...00000544}\nchunk 23 (start 00000545)\n flat{00000545...(25 more)...0000055f}\nchunk 24 (start 00000560)\n flat{00000560...(13 more)...0000056e}\nchunk 25 (start 0000056f)\n flat{0000056f...(65 more)...000005b1}\nchunk 26 (start 000005b2)\n flat{000005b2...(34 more)...000005d5}\nchunk 27 (start 000005d6)\n flat{000005d6...(81 more)...00000628}\nchunk 28 (start 00000629)\n flat{00000629...(81 more)...0000067b}\nchunk 29 (start 0000067c)\n flat{0000067c...(34 more)...0000069f}\nchunk 30 (start 000006a0)\n flat{000006a0...(43 more)...000006cc}\nchunk 31 (start 000006cd)\n flat{000006cd...(38 more)...000006f4}\nchunk 32 (start 000006f5)\n flat{000006f5...(15 more)...00000705}\nchunk 33 (start 00000706)\n flat{00000706...(38 more)...0000072d}\nchunk 34 (start 0000072e)\n flat{0000072e...(64 more)...0000076f}\nchunk 35 (start 00000770)\n flat{00000770...(0 more)...00000771}\nchunk 36 (start 00000772)\n flat{00000772...(50 more)...000007a5}\nchunk 37 (start 000007a6)\n flat{000007a6...(26 more)...000007c1}\nchunk 38 (start 000007c2)\n flat{000007c2...(23 more)...000007da}\nchunk 39 (start 000007db)\n flat{000007db...(198 more)...000008a2}\nchunk 40 (start 000008a3)\n flat{000008a3...(16 more)...000008b4}\nchunk 41 (start 000008b5)\n flat{000008b5...(18 more)...000008c8}\nchunk 42 (start 000008c9)\n flat{000008c9...(61 more)...00000907}\nchunk 43 (start 00000908)\n flat{00000908...(49 more)...0000093a}\nchunk 44 (start 0000093b)\n flat{0000093b...(100 more)...000009a0}\nchunk 45 (start 000009a1)\n flat{000009a1...(24 more)...000009ba}\nchunk 46 (start 000009bb)\n flat{000009bb...(38 more)...000009e2}\nchunk 47 (start 000009e3)\n flat{000009e3...(30 more)...00000a02}\nchunk 48 (start 00000a03)\n flat{00000a03...(139 more)...00000a8f}\nchunk 49 (start 00000a90)\n flat{00000a90...(4 more)...00000a95}\nchunk 50 (start 00000a96)\n flat{00000a96...(1 more)...00000a98}\nchunk 51 (start 00000a99)\n flat{00000a99...(228 more)...00000b7e}\nchunk 52 (start 00000b7f)\n flat{00000b7f...(12 more)...00000b8c}\nchunk 53 (start 00000b8d)\n flat{00000b8d...(33 more)...00000baf}\nchunk 54 (start 00000bb0)\n flat{00000bb0...(75 more)...00000bfc}\nchunk 55 (start 00000bfd)\n flat{00000bfd...(109 more)...00000c6b}\nchunk 56 (start 00000c6c)\n flat{00000c6c...(28 more)...00000c89}\nchunk 57 (start 00000c8a)\n flat{00000c8a...(0 more)...00000c8b}\nchunk 58 (start 00000c8c)\n flat{00000c8c...(37 more)...00000cb2}\nchunk 59 (start 00000cb3)\n flat{00000cb3...(219 more)...00000d8f}\nchunk 60 (start 00000d90)\n flat{00000d90...(11 more)...00000d9c}\nchunk 61 (start 00000d9d)\n flat{00000d9d...(18 more)...00000db0}\nchunk 62 (start 00000db1)\n flat{00000db1...(84 more)...00000e06}\nchunk 63 (start 00000e07)\n flat{00000e07...(14 more)...00000e16}\nchunk 64 (start 00000e17)\n flat{00000e17...(41 more)...00000e41}\nchunk 65 (start 00000e42)\n flat{00000e42...(2 more)...00000e45}\nchunk 66 (start 00000e46)\n flat{00000e46...(23 more)...00000e5e}\nchunk 67 (start 00000e5f)\n flat{00000e5f...(25 more)...00000e79}\nchunk 68 (start 00000e7a)\n flat{00000e7a...(22 more)...00000e91}\nchunk 69 (start 00000e92)\n flat{00000e92...(12 more)...00000e9f}\nchunk 70 (start 00000ea0)\n flat{00000ea0...(59 more)...00000edc}\nchunk 71 (start 00000edd)\n flat{00000edd...(90 more)...00000f38}\nchunk 72 (start 00000f39)\n flat{00000f39...(274 more)...0000104c}\nchunk 73 (start 0000104d)\n flat{0000104d...(15 more)...0000105d}\nchunk 74 (start 0000105e)\n flat{0000105e...(52 more)...00001093}\nchunk 75 (start 00001094)\n flat{00001094...(87 more)...000010ec}\nchunk 76 (start 000010ed)\n flat{000010ed...(69 more)...00001133}\nchunk 77 (start 00001134)\n flat{00001134...(371 more)...000012a8}\nchunk 78 (start 000012a9)\n flat{000012a9...(44 more)...000012d6}\nchunk 79 (start 000012d7)\n flat{000012d7...(13 more)...000012e5}\nchunk 80 (start 000012e6)\n flat{000012e6...(10 more)...000012f1}\nchunk 81 (start 000012f2)\n flat{000012f2...(30 more)...00001311}\nchunk 82 (start 00001312)\n flat 00001312\nchunk 83 (start 00001313)\n flat{00001313...(49 more)...00001345}\nchunk 84 (start 00001346)\n flat{00001346...(64 more)...00001387}\n" + assert.Equal(expected, set.fmt(0)) +} + +// Add n ref items to a set builder, and return the refs that were added alongside the referrator used to generate them. +func addRefs(sb *SetBuilder, n int) ([]ref.Ref, referrator) { + var refs []ref.Ref + ator := newReferrator() + for i := 0; i < n; i++ { + ref := ator.Next() + (*sb).AddItem(ref) + refs = append(refs, ref) + } + return refs, ator +} diff --git a/ref/ref.go b/ref/ref.go index fa7c4c5ca0..4de583b50f 100644 --- a/ref/ref.go +++ b/ref/ref.go @@ -34,6 +34,10 @@ func (r Ref) IsEmpty() bool { return r.digest == emptyRef.digest } +func (r Ref) DigestSlice() []byte { + return r.digest[:] +} + func (r Ref) String() string { return fmt.Sprintf("sha1-%s", hex.EncodeToString(r.digest[:])) } @@ -92,3 +96,7 @@ func Less(r1, r2 Ref) bool { } return false } + +func Greater(r1, r2 Ref) bool { + return !Less(r1, r2) && r1 != r2 +} diff --git a/ref/ref_test.go b/ref/ref_test.go index a1ed233bbd..ce3c985986 100644 --- a/ref/ref_test.go +++ b/ref/ref_test.go @@ -62,6 +62,15 @@ func TestDigest(t *testing.T) { assert.NotEqual(t, r.Digest(), d) } +func TestDigestSlice(t *testing.T) { + r := New(Sha1Digest{}) + d := r.DigestSlice() + assert.Equal(t, r.DigestSlice(), d) + // DigestSlice() must return a copy otherwise things get weird. + d[0] = 0x01 + assert.NotEqual(t, r.DigestSlice(), d) +} + func TestFromHash(t *testing.T) { h := sha1.New() h.Write([]byte("abc")) @@ -79,3 +88,37 @@ func TestIsEmpty(t *testing.T) { r3 := Parse("sha1-a9993e364706816aba3e25717850c26c9cd0d89d") assert.False(t, r3.IsEmpty()) } + +func TestLess(t *testing.T) { + assert := assert.New(t) + + r1 := Parse("sha1-0000000000000000000000000000000000000001") + r2 := Parse("sha1-0000000000000000000000000000000000000002") + + assert.False(Less(r1, r1)) + assert.True(Less(r1, r2)) + assert.False(Less(r2, r1)) + assert.False(Less(r2, r2)) + + r0 := Ref{} + assert.False(Less(r0, r0)) + assert.True(Less(r0, r2)) + assert.False(Less(r2, r0)) +} + +func TestGreater(t *testing.T) { + assert := assert.New(t) + + r1 := Parse("sha1-0000000000000000000000000000000000000001") + r2 := Parse("sha1-0000000000000000000000000000000000000002") + + assert.False(Greater(r1, r1)) + assert.False(Greater(r1, r2)) + assert.True(Greater(r2, r1)) + assert.False(Greater(r2, r2)) + + r0 := Ref{} + assert.False(Greater(r0, r0)) + assert.False(Greater(r0, r2)) + assert.True(Greater(r2, r0)) +}