mirror of
https://github.com/dolthub/dolt.git
synced 2026-02-13 03:09:06 -06:00
no functional changes, improving code quality (#2410)
fix misspellings; fix code that was not gofmt'd - plus take advantage of gofmt -s too; couple of unreachable golint reported fixes; reference go report card results and tests
This commit is contained in:
@@ -4,6 +4,7 @@
|
||||
<br><br>
|
||||
[](http://jenkins.noms.io/job/NomsServer)
|
||||
[](https://godoc.org/github.com/attic-labs/noms)
|
||||
[](https://goreportcard.com/report/github.com/attic-labs/noms)
|
||||
[](http://slack.noms.io)
|
||||
|
||||
*Noms* is a decentralized database based on ideas from Git.
|
||||
|
||||
@@ -19,8 +19,8 @@ import (
|
||||
"github.com/attic-labs/noms/go/constants"
|
||||
"github.com/attic-labs/noms/go/d"
|
||||
"github.com/attic-labs/noms/go/datas"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
flag "github.com/juju/gnuflag"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@@ -50,7 +50,7 @@ func setupLogFlags() *flag.FlagSet {
|
||||
logFlagSet.IntVar(&maxLines, "max-lines", 10, "max number of lines to show per commit (-1 for all lines)")
|
||||
logFlagSet.IntVar(&maxCommits, "n", 0, "max number of commits to display (0 for all commits)")
|
||||
logFlagSet.BoolVar(&oneline, "oneline", false, "show a summary of each commit on a single line")
|
||||
logFlagSet.BoolVar(&showGraph, "graph", false, "show ascii-based commit hierarcy on left side of output")
|
||||
logFlagSet.BoolVar(&showGraph, "graph", false, "show ascii-based commit hierarchy on left side of output")
|
||||
logFlagSet.BoolVar(&showValue, "show-value", false, "show commit value rather than diff information -- this is temporary")
|
||||
outputpager.RegisterOutputpagerFlags(logFlagSet)
|
||||
return logFlagSet
|
||||
|
||||
@@ -55,7 +55,7 @@ func (s *nomsSyncTestSuite) TestSync() {
|
||||
source1HeadRef := source1.Head().Hash()
|
||||
source2.Database().Close() // Close Database backing both Datasets
|
||||
|
||||
sourceSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "#" + source1HeadRef.String())
|
||||
sourceSpec := spec.CreateValueSpecString("ldb", s.LdbDir, "#"+source1HeadRef.String())
|
||||
ldb2dir := path.Join(s.TempDir, "ldb2")
|
||||
sinkDatasetSpec := spec.CreateValueSpecString("ldb", ldb2dir, "dest")
|
||||
sout, _ := s.Run(main, []string{"sync", sourceSpec, sinkDatasetSpec})
|
||||
|
||||
@@ -13,11 +13,11 @@ import (
|
||||
"github.com/attic-labs/noms/go/d"
|
||||
"github.com/attic-labs/noms/go/hash"
|
||||
"github.com/golang/snappy"
|
||||
flag "github.com/juju/gnuflag"
|
||||
"github.com/syndtr/goleveldb/leveldb"
|
||||
"github.com/syndtr/goleveldb/leveldb/errors"
|
||||
"github.com/syndtr/goleveldb/leveldb/filter"
|
||||
"github.com/syndtr/goleveldb/leveldb/opt"
|
||||
flag "github.com/juju/gnuflag"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@@ -29,7 +29,7 @@ func TestNewCommit(t *testing.T) {
|
||||
})
|
||||
assertTypeEquals(et, at)
|
||||
|
||||
// Commiting another Number
|
||||
// Committing another Number
|
||||
commit2 := NewCommit(types.Number(2), types.NewSet(types.NewRef(commit)), types.EmptyStruct)
|
||||
at2 := commit2.Type()
|
||||
et2 := et
|
||||
|
||||
@@ -99,11 +99,11 @@ func (ds *databaseCommon) doCommit(datasetID string, commit types.Struct) error
|
||||
currentRootRef, currentDatasets := ds.getRootAndDatasets()
|
||||
commitRef := ds.WriteValue(commit) // will be orphaned if the tryUpdateRoot() below fails
|
||||
|
||||
// First commit in store is always fast-foward.
|
||||
// First commit in store is always fast-forward.
|
||||
if !currentRootRef.IsEmpty() {
|
||||
r, hasHead := currentDatasets.MaybeGet(types.String(datasetID))
|
||||
|
||||
// First commit in dataset is always fast-foward.
|
||||
// First commit in dataset is always fast-forward.
|
||||
if hasHead {
|
||||
currentHeadRef := r.(types.Ref)
|
||||
// Allow only fast-forward commits.
|
||||
|
||||
@@ -41,7 +41,7 @@ func TestHandleWriteValue(t *testing.T) {
|
||||
listChunk := types.EncodeValue(l2, nil)
|
||||
|
||||
body := &bytes.Buffer{}
|
||||
serializeHints(body, map[hash.Hash]struct{}{hint: struct{}{}})
|
||||
serializeHints(body, map[hash.Hash]struct{}{hint: {}})
|
||||
chunks.Serialize(itemChunk, body)
|
||||
chunks.Serialize(listChunk, body)
|
||||
|
||||
@@ -75,7 +75,7 @@ func TestHandleWriteValueBackpressure(t *testing.T) {
|
||||
listChunk := types.EncodeValue(l2, nil)
|
||||
|
||||
body := &bytes.Buffer{}
|
||||
serializeHints(body, map[hash.Hash]struct{}{hint: struct{}{}})
|
||||
serializeHints(body, map[hash.Hash]struct{}{hint: {}})
|
||||
chunks.Serialize(itemChunk, body)
|
||||
chunks.Serialize(listChunk, body)
|
||||
|
||||
@@ -103,8 +103,8 @@ func TestBuildWriteValueRequest(t *testing.T) {
|
||||
close(inChunkChan)
|
||||
|
||||
hints := map[hash.Hash]struct{}{
|
||||
hash.Parse("00000000000000000000000000000002"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000003"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000002"): {},
|
||||
hash.Parse("00000000000000000000000000000003"): {},
|
||||
}
|
||||
compressed := buildWriteValueRequest(inChunkChan, hints)
|
||||
gr := snappy.NewReader(compressed)
|
||||
@@ -139,8 +139,8 @@ func serializeChunks(chnx []chunks.Chunk, assert *assert.Assertions) io.Reader {
|
||||
func TestBuildHashesRequest(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
hashes := map[hash.Hash]struct{}{
|
||||
hash.Parse("00000000000000000000000000000002"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000003"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000002"): {},
|
||||
hash.Parse("00000000000000000000000000000003"): {},
|
||||
}
|
||||
r := buildHashesRequest(hashes)
|
||||
b, err := ioutil.ReadAll(r)
|
||||
|
||||
@@ -15,10 +15,10 @@ import (
|
||||
func TestHintRoundTrip(t *testing.T) {
|
||||
b := &bytes.Buffer{}
|
||||
input := map[hash.Hash]struct{}{
|
||||
hash.Parse("00000000000000000000000000000000"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000001"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000002"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000003"): struct{}{},
|
||||
hash.Parse("00000000000000000000000000000000"): {},
|
||||
hash.Parse("00000000000000000000000000000001"): {},
|
||||
hash.Parse("00000000000000000000000000000002"): {},
|
||||
hash.Parse("00000000000000000000000000000003"): {},
|
||||
}
|
||||
serializeHints(b, input)
|
||||
output := deserializeHints(b)
|
||||
|
||||
@@ -63,7 +63,7 @@ func (ds *Dataset) HeadValue() types.Value {
|
||||
return c.Get(datas.ValueField)
|
||||
}
|
||||
|
||||
// MaybeHeadValue returns the Value field of the current head Commit, if avaliable. If not it
|
||||
// MaybeHeadValue returns the Value field of the current head Commit, if available. If not it
|
||||
// returns nil and 'false'.
|
||||
func (ds *Dataset) MaybeHeadValue() (types.Value, bool) {
|
||||
c, ok := ds.Database().MaybeHead(ds.id)
|
||||
|
||||
@@ -523,8 +523,8 @@ func TestDecodeRecursive(t *testing.T) {
|
||||
|
||||
assert.Equal(Node{
|
||||
1, []Node{
|
||||
Node{2, []Node{}},
|
||||
Node{3, []Node{}},
|
||||
{2, []Node{}},
|
||||
{3, []Node{}},
|
||||
},
|
||||
}, n)
|
||||
}
|
||||
|
||||
@@ -280,8 +280,8 @@ func TestEncodeRecursive(t *testing.T) {
|
||||
}
|
||||
v, err := Marshal(Node{
|
||||
1, []Node{
|
||||
Node{2, []Node{}},
|
||||
Node{3, []Node(nil)},
|
||||
{2, []Node{}},
|
||||
{3, []Node(nil)},
|
||||
},
|
||||
})
|
||||
assert.NoError(err)
|
||||
|
||||
@@ -16,8 +16,8 @@ import (
|
||||
|
||||
"github.com/codahale/blake2"
|
||||
humanize "github.com/dustin/go-humanize"
|
||||
"github.com/kch42/buzhash"
|
||||
flag "github.com/juju/gnuflag"
|
||||
"github.com/kch42/buzhash"
|
||||
)
|
||||
|
||||
func main() {
|
||||
|
||||
@@ -175,16 +175,16 @@ func TestDatabaseSpecs(t *testing.T) {
|
||||
}
|
||||
|
||||
testCases := []testCase{
|
||||
testCase{"http://localhost:8000", "http", "//localhost:8000", ""},
|
||||
testCase{"http://localhost:8000/fff", "http", "//localhost:8000/fff", ""},
|
||||
testCase{"https://local.attic.io/john/doe", "https", "//local.attic.io/john/doe", ""},
|
||||
testCase{"ldb:/filesys/john/doe", "ldb", "/filesys/john/doe", ""},
|
||||
testCase{"./john/doe", "ldb", "./john/doe", ""},
|
||||
testCase{"john/doe", "ldb", "john/doe", ""},
|
||||
testCase{"/john/doe", "ldb", "/john/doe", ""},
|
||||
testCase{"mem", "mem", "", ""},
|
||||
testCase{"http://server.com/john/doe?access_token=jane", "http", "//server.com/john/doe?access_token=jane", "jane"},
|
||||
testCase{"https://server.com/john/doe/?arg=2&qp1=true&access_token=jane", "https", "//server.com/john/doe/?arg=2&qp1=true&access_token=jane", "jane"},
|
||||
{"http://localhost:8000", "http", "//localhost:8000", ""},
|
||||
{"http://localhost:8000/fff", "http", "//localhost:8000/fff", ""},
|
||||
{"https://local.attic.io/john/doe", "https", "//local.attic.io/john/doe", ""},
|
||||
{"ldb:/filesys/john/doe", "ldb", "/filesys/john/doe", ""},
|
||||
{"./john/doe", "ldb", "./john/doe", ""},
|
||||
{"john/doe", "ldb", "john/doe", ""},
|
||||
{"/john/doe", "ldb", "/john/doe", ""},
|
||||
{"mem", "mem", "", ""},
|
||||
{"http://server.com/john/doe?access_token=jane", "http", "//server.com/john/doe?access_token=jane", "jane"},
|
||||
{"https://server.com/john/doe/?arg=2&qp1=true&access_token=jane", "https", "//server.com/john/doe/?arg=2&qp1=true&access_token=jane", "jane"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -220,13 +220,13 @@ func TestDatasetSpecs(t *testing.T) {
|
||||
}
|
||||
|
||||
testCases := []testCase{
|
||||
testCase{"http://localhost:8000::ds1", "http", "//localhost:8000", "ds1", ""},
|
||||
testCase{"http://localhost:8000/john/doe/::ds2", "http", "//localhost:8000/john/doe/", "ds2", ""},
|
||||
testCase{"https://local.attic.io/john/doe::ds3", "https", "//local.attic.io/john/doe", "ds3", ""},
|
||||
testCase{"http://local.attic.io/john/doe::ds1", "http", "//local.attic.io/john/doe", "ds1", ""},
|
||||
testCase{"ldb:/filesys/john/doe::ds/one", "ldb", "/filesys/john/doe", "ds/one", ""},
|
||||
testCase{"http://localhost:8000/john/doe?access_token=abc::ds/one", "http", "//localhost:8000/john/doe?access_token=abc", "ds/one", "abc"},
|
||||
testCase{"https://localhost:8000?qp1=x&access_token=abc&qp2=y::ds/one", "https", "//localhost:8000?qp1=x&access_token=abc&qp2=y", "ds/one", "abc"},
|
||||
{"http://localhost:8000::ds1", "http", "//localhost:8000", "ds1", ""},
|
||||
{"http://localhost:8000/john/doe/::ds2", "http", "//localhost:8000/john/doe/", "ds2", ""},
|
||||
{"https://local.attic.io/john/doe::ds3", "https", "//local.attic.io/john/doe", "ds3", ""},
|
||||
{"http://local.attic.io/john/doe::ds1", "http", "//local.attic.io/john/doe", "ds1", ""},
|
||||
{"ldb:/filesys/john/doe::ds/one", "ldb", "/filesys/john/doe", "ds/one", ""},
|
||||
{"http://localhost:8000/john/doe?access_token=abc::ds/one", "http", "//localhost:8000/john/doe?access_token=abc", "ds/one", "abc"},
|
||||
{"https://localhost:8000?qp1=x&access_token=abc&qp2=y::ds/one", "https", "//localhost:8000?qp1=x&access_token=abc&qp2=y", "ds/one", "abc"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
@@ -251,11 +251,11 @@ func TestPathSpec(t *testing.T) {
|
||||
}
|
||||
|
||||
testCases := []testCase{
|
||||
testCase{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
testCase{"ldb:/filesys/john/doe::#0123456789abcdefghijklmnopqrstuv", "ldb", "/filesys/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
testCase{"mem::#0123456789abcdefghijklmnopqrstuv", "mem", "", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
testCase{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
testCase{"http://localhost:8000/john/doe/::ds1", "http", "//localhost:8000/john/doe/", "ds1"},
|
||||
{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
{"ldb:/filesys/john/doe::#0123456789abcdefghijklmnopqrstuv", "ldb", "/filesys/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
{"mem::#0123456789abcdefghijklmnopqrstuv", "mem", "", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
{"http://local.attic.io/john/doe::#0123456789abcdefghijklmnopqrstuv", "http", "//local.attic.io/john/doe", "#0123456789abcdefghijklmnopqrstuv"},
|
||||
{"http://localhost:8000/john/doe/::ds1", "http", "//localhost:8000/john/doe/", "ds1"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
||||
@@ -88,16 +88,16 @@ func calcSplices(previousLength uint64, currentLength uint64, maxSpliceMatrixSiz
|
||||
}
|
||||
|
||||
if currentStart == currentEnd {
|
||||
return []Splice{Splice{previousStart, previousEnd - previousStart, 0, 0}}
|
||||
return []Splice{{previousStart, previousEnd - previousStart, 0, 0}}
|
||||
} else if previousStart == previousEnd {
|
||||
return []Splice{Splice{previousStart, 0, currentEnd - currentStart, currentStart}}
|
||||
return []Splice{{previousStart, 0, currentEnd - currentStart, currentStart}}
|
||||
}
|
||||
|
||||
previousLength = previousEnd - previousStart
|
||||
currentLength = currentEnd - currentStart
|
||||
|
||||
if previousLength*currentLength > maxSpliceMatrixSize {
|
||||
return []Splice{Splice{0, previousLength, currentLength, 0}}
|
||||
return []Splice{{0, previousLength, currentLength, 0}}
|
||||
}
|
||||
|
||||
splices := make([]Splice, 0)
|
||||
|
||||
@@ -22,7 +22,7 @@ func TestEditDistanceAppend(t *testing.T) {
|
||||
assertDiff(assert,
|
||||
[]uint64{0, 1, 2},
|
||||
[]uint64{0, 1, 2, 3, 4, 5},
|
||||
[]Splice{Splice{3, 0, 3, 3}},
|
||||
[]Splice{{3, 0, 3, 3}},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ func TestEditDistancePrepend(t *testing.T) {
|
||||
assertDiff(assert,
|
||||
[]uint64{3, 4, 5, 6},
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6},
|
||||
[]Splice{Splice{0, 0, 3, 0}},
|
||||
[]Splice{{0, 0, 3, 0}},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ func TestEditDistanceChopFromEnd(t *testing.T) {
|
||||
assertDiff(assert,
|
||||
[]uint64{0, 1, 2, 3, 4, 5},
|
||||
[]uint64{0, 1, 2},
|
||||
[]Splice{Splice{3, 3, 0, 0}},
|
||||
[]Splice{{3, 3, 0, 0}},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ func TestEditDistanceChopFromStart(t *testing.T) {
|
||||
assertDiff(assert,
|
||||
[]uint64{0, 1, 2, 3, 4, 5},
|
||||
[]uint64{3, 4, 5},
|
||||
[]Splice{Splice{0, 3, 0, 0}},
|
||||
[]Splice{{0, 3, 0, 0}},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ func TestEditDistanceChopFromMiddle(t *testing.T) {
|
||||
assertDiff(assert,
|
||||
[]uint64{0, 1, 2, 3, 4, 5},
|
||||
[]uint64{0, 5},
|
||||
[]Splice{Splice{1, 4, 0, 0}},
|
||||
[]Splice{{1, 4, 0, 0}},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -73,8 +73,8 @@ func TestEditDistanceA(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8},
|
||||
[]uint64{0, 1, 2, 4, 5, 6, 8},
|
||||
[]Splice{
|
||||
Splice{3, 1, 0, 0},
|
||||
Splice{7, 1, 0, 0},
|
||||
{3, 1, 0, 0},
|
||||
{7, 1, 0, 0},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -86,10 +86,10 @@ func TestEditDistanceRemoveABunch(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{1, 2, 4, 5, 7, 8, 10},
|
||||
[]Splice{
|
||||
Splice{0, 1, 0, 0},
|
||||
Splice{3, 1, 0, 0},
|
||||
Splice{6, 1, 0, 0},
|
||||
Splice{9, 1, 0, 0},
|
||||
{0, 1, 0, 0},
|
||||
{3, 1, 0, 0},
|
||||
{6, 1, 0, 0},
|
||||
{9, 1, 0, 0},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -101,11 +101,11 @@ func TestEditDistanceAddABunch(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 'a', 1, 2, 3, 'b', 'c', 'd', 4, 5, 6, 7, 'e', 8, 9, 'f', 10, 'g'},
|
||||
[]Splice{
|
||||
Splice{1, 0, 1, 1},
|
||||
Splice{4, 0, 3, 5},
|
||||
Splice{8, 0, 1, 12},
|
||||
Splice{10, 0, 1, 15},
|
||||
Splice{11, 0, 1, 17},
|
||||
{1, 0, 1, 1},
|
||||
{4, 0, 3, 5},
|
||||
{8, 0, 1, 12},
|
||||
{10, 0, 1, 15},
|
||||
{11, 0, 1, 17},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -117,9 +117,9 @@ func TestEditDistanceUpdateABunch(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{'a', 1, 2, 'b', 'c', 'd', 6, 7, 'e', 9, 10},
|
||||
[]Splice{
|
||||
Splice{0, 1, 1, 0},
|
||||
Splice{3, 3, 3, 3},
|
||||
Splice{8, 1, 1, 8},
|
||||
{0, 1, 1, 0},
|
||||
{3, 3, 3, 3},
|
||||
{8, 1, 1, 8},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -131,7 +131,7 @@ func TestEditDistanceLeftOverlap(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 1, 2, 3, 'a', 'b', 8, 9, 10},
|
||||
[]Splice{
|
||||
Splice{4, 4, 2, 4},
|
||||
{4, 4, 2, 4},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -143,7 +143,7 @@ func TestEditDistanceRightOverlap(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 'a', 'b', 10},
|
||||
[]Splice{
|
||||
Splice{6, 4, 2, 6},
|
||||
{6, 4, 2, 6},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -155,7 +155,7 @@ func TestEditDistanceWithin(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 1, 2, 3, 'a', 'b', 10},
|
||||
[]Splice{
|
||||
Splice{4, 6, 2, 4},
|
||||
{4, 6, 2, 4},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -167,7 +167,7 @@ func TestEditDistanceWithout(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 'a', 'b', 'c', 'd', 8, 9, 10},
|
||||
[]Splice{
|
||||
Splice{6, 2, 4, 6},
|
||||
{6, 2, 4, 6},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -179,11 +179,11 @@ func TestEditDistanceMix1(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10},
|
||||
[]uint64{0, 'a', 1, 'b', 3, 'c', 4, 6, 7, 'e', 'f', 10},
|
||||
[]Splice{
|
||||
Splice{1, 0, 1, 1},
|
||||
Splice{2, 1, 1, 3},
|
||||
Splice{4, 0, 1, 5},
|
||||
Splice{5, 1, 0, 0},
|
||||
Splice{8, 2, 2, 9},
|
||||
{1, 0, 1, 1},
|
||||
{2, 1, 1, 3},
|
||||
{4, 0, 1, 5},
|
||||
{5, 1, 0, 0},
|
||||
{8, 2, 2, 9},
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -195,8 +195,8 @@ func TestEditDistanceReverse(t *testing.T) {
|
||||
[]uint64{0, 1, 2, 3, 4, 5, 6, 7},
|
||||
[]uint64{7, 6, 5, 4, 3, 2, 1, 0},
|
||||
[]Splice{
|
||||
Splice{0, 3, 4, 0},
|
||||
Splice{4, 4, 3, 5},
|
||||
{0, 3, 4, 0},
|
||||
{4, 4, 3, 5},
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -370,7 +370,7 @@ func TestListValidateInsertAtZero(t *testing.T) {
|
||||
count--
|
||||
v := values[count]
|
||||
s = s.Insert(uint64(0), v)
|
||||
validateList(t, s, values[count:len(values)])
|
||||
validateList(t, s, values[count:])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -749,7 +749,7 @@ func TestListDiffVersusEmpty(t *testing.T) {
|
||||
|
||||
assert.Equal(len(diff2), len(diff1))
|
||||
diffExpected := []Splice{
|
||||
Splice{0, 0, 5, 0},
|
||||
{0, 0, 5, 0},
|
||||
}
|
||||
assert.Equal(diffExpected, diff1, "expected diff is wrong")
|
||||
}
|
||||
@@ -771,7 +771,7 @@ func TestListDiffReverse(t *testing.T) {
|
||||
diff2 := accumulateDiffSplices(l2, l1)
|
||||
|
||||
diffExpected := []Splice{
|
||||
Splice{0, 5000, 5000, 0},
|
||||
{0, 5000, 5000, 0},
|
||||
}
|
||||
assert.Equal(diffExpected, diff1, "expected diff is wrong")
|
||||
assert.Equal(diffExpected, diff2, "expected diff is wrong")
|
||||
@@ -796,8 +796,8 @@ func TestListDiffReverseWithLargerLimit(t *testing.T) {
|
||||
|
||||
assert.Equal(len(diff2), len(diff1))
|
||||
diffExpected := []Splice{
|
||||
Splice{0, 2499, 2500, 0},
|
||||
Splice{2500, 2500, 2499, 2501},
|
||||
{0, 2499, 2500, 0},
|
||||
{2500, 2500, 2499, 2501},
|
||||
}
|
||||
assert.Equal(diffExpected, diff1, "expected diff is wrong")
|
||||
assert.Equal(diffExpected, diff2, "expected diff is wrong")
|
||||
@@ -825,11 +825,11 @@ func TestListDiffRemove5x100(t *testing.T) {
|
||||
|
||||
assert.Equal(len(diff1), len(diff2))
|
||||
diff2Expected := []Splice{
|
||||
Splice{0, 100, 0, 0},
|
||||
Splice{1000, 100, 0, 0},
|
||||
Splice{2000, 100, 0, 0},
|
||||
Splice{3000, 100, 0, 0},
|
||||
Splice{4000, 100, 0, 0},
|
||||
{0, 100, 0, 0},
|
||||
{1000, 100, 0, 0},
|
||||
{2000, 100, 0, 0},
|
||||
{3000, 100, 0, 0},
|
||||
{4000, 100, 0, 0},
|
||||
}
|
||||
assert.Equal(diff2Expected, diff2, "expected diff is wrong")
|
||||
}
|
||||
@@ -856,11 +856,11 @@ func TestListDiffAdd5x5(t *testing.T) {
|
||||
|
||||
assert.Equal(len(diff1), len(diff2))
|
||||
diff2Expected := []Splice{
|
||||
Splice{5, 0, 5, 5},
|
||||
Splice{1000, 0, 5, 1005},
|
||||
Splice{2000, 0, 5, 2010},
|
||||
Splice{3000, 0, 5, 3015},
|
||||
Splice{4000, 0, 5, 4020},
|
||||
{5, 0, 5, 5},
|
||||
{1000, 0, 5, 1005},
|
||||
{2000, 0, 5, 2010},
|
||||
{3000, 0, 5, 3015},
|
||||
{4000, 0, 5, 4020},
|
||||
}
|
||||
assert.Equal(diff2Expected, diff2, "expected diff is wrong")
|
||||
}
|
||||
@@ -885,16 +885,16 @@ func TestListDiffReplaceReverse5x100(t *testing.T) {
|
||||
diff := accumulateDiffSplices(l2, l1)
|
||||
|
||||
diffExpected := []Splice{
|
||||
Splice{0, 49, 50, 0},
|
||||
Splice{50, 50, 49, 51},
|
||||
Splice{1000, 49, 50, 1000},
|
||||
Splice{1050, 50, 49, 1051},
|
||||
Splice{2000, 49, 50, 2000},
|
||||
Splice{2050, 50, 49, 2051},
|
||||
Splice{3000, 49, 50, 3000},
|
||||
Splice{3050, 50, 49, 3051},
|
||||
Splice{4000, 49, 50, 4000},
|
||||
Splice{4050, 50, 49, 4051},
|
||||
{0, 49, 50, 0},
|
||||
{50, 50, 49, 51},
|
||||
{1000, 49, 50, 1000},
|
||||
{1050, 50, 49, 1051},
|
||||
{2000, 49, 50, 2000},
|
||||
{2050, 50, 49, 2051},
|
||||
{3000, 49, 50, 3000},
|
||||
{3050, 50, 49, 3051},
|
||||
{4000, 49, 50, 4000},
|
||||
{4050, 50, 49, 4051},
|
||||
}
|
||||
assert.Equal(diffExpected, diff, "expected diff is wrong")
|
||||
}
|
||||
@@ -925,7 +925,7 @@ func TestListDiffString2(t *testing.T) {
|
||||
diff := accumulateDiffSplices(l2, l1)
|
||||
|
||||
diffExpected := []Splice{
|
||||
Splice{3, 0, 1, 3},
|
||||
{3, 0, 1, 3},
|
||||
}
|
||||
assert.Equal(diffExpected, diff, "expected diff is wrong")
|
||||
}
|
||||
@@ -942,7 +942,7 @@ func TestListDiffString3(t *testing.T) {
|
||||
diff := accumulateDiffSplices(l2, l1)
|
||||
|
||||
diffExpected := []Splice{
|
||||
Splice{2, 1, 1, 2},
|
||||
{2, 1, 1, 2},
|
||||
}
|
||||
assert.Equal(diffExpected, diff, "expected diff is wrong")
|
||||
}
|
||||
@@ -1005,7 +1005,7 @@ func TestListDiffAllValuesInSequenceRemoved(t *testing.T) {
|
||||
diff := accumulateDiffSplices(l2, l1)
|
||||
|
||||
expected := []Splice{
|
||||
Splice{3, 0, 5, 3},
|
||||
{3, 0, 5, 3},
|
||||
}
|
||||
|
||||
assert.Equal(expected, diff)
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
const (
|
||||
defaultChunkPattern = uint32(1<<12 - 1) // Avg Chunk Size of 4k
|
||||
|
||||
// The window size to use for computing the rolling hash. This is way more than neccessary assuming random data (two bytes would be sufficient with a target chunk size of 4k). The benefit of a larger window is it allows for better distribution on input with lower entropy. At a target chunk size of 4k, any given byte changing has roughly a 1.5% chance of affecting an existing boundary, which seems like an acceptable trade-off.
|
||||
// The window size to use for computing the rolling hash. This is way more than necessary assuming random data (two bytes would be sufficient with a target chunk size of 4k). The benefit of a larger window is it allows for better distribution on input with lower entropy. At a target chunk size of 4k, any given byte changing has roughly a 1.5% chance of affecting an existing boundary, which seems like an acceptable trade-off.
|
||||
defaultChunkWindow = uint32(64)
|
||||
)
|
||||
|
||||
@@ -92,7 +92,7 @@ func (rv *rollingValueHasher) HashValue(v Value) {
|
||||
rv.enc.writeValue(v)
|
||||
}
|
||||
|
||||
// nomsWriter interface. Note: It's unfortunate to have another implimentation of nomsWriter and this one must be kept in sync with binaryNomsWriter, but hashing values is a red-hot code path and it's worth alot to avoid the allocations for literally encoding values.
|
||||
// nomsWriter interface. Note: It's unfortunate to have another implementation of nomsWriter and this one must be kept in sync with binaryNomsWriter, but hashing values is a red-hot code path and it's worth a lot to avoid the allocations for literally encoding values.
|
||||
func (rv *rollingValueHasher) writeBytes(v []byte) {
|
||||
for _, b := range v {
|
||||
rv.HashByte(b)
|
||||
|
||||
@@ -86,7 +86,7 @@ func (sc *sequenceChunker) resume() {
|
||||
}
|
||||
}
|
||||
|
||||
// If the hash window won't be filled by the preceeding items in the current chunk, walk further back until they will.
|
||||
// If the hash window won't be filled by the preceding items in the current chunk, walk further back until they will.
|
||||
for primeHashBytes > 0 && retreater.retreatMaybeAllowBeforeStart(false) {
|
||||
primeHashCount++
|
||||
sc.rv.ClearLastBoundary()
|
||||
@@ -243,7 +243,6 @@ func (sc *sequenceChunker) Done() sequence {
|
||||
|
||||
mt = child.getItem(0).(metaTuple)
|
||||
}
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
// If we are mutating an existing sequence, appending subsequent items in the sequence until we reach a pre-existing chunk boundary or the end of the sequence.
|
||||
|
||||
@@ -127,7 +127,6 @@ func (u *UnionIterator) Next() Value {
|
||||
return u.bState.Next()
|
||||
}
|
||||
panic("Unreachable")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *UnionIterator) SkipTo(v Value) Value {
|
||||
@@ -154,7 +153,6 @@ func (u *UnionIterator) SkipTo(v Value) Value {
|
||||
return u.bState.Next()
|
||||
}
|
||||
panic("Unreachable")
|
||||
return nil
|
||||
}
|
||||
|
||||
// IntersectionIterator only returns values that are returned in both of its child iterators.
|
||||
|
||||
@@ -28,7 +28,7 @@ type Struct struct {
|
||||
func NewStruct(name string, data StructData) Struct {
|
||||
fieldNames := make(sort.StringSlice, len(data))
|
||||
i := 0
|
||||
for fn, _ := range data {
|
||||
for fn := range data {
|
||||
fieldNames[i] = fn
|
||||
i++
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ func resolveStructCycles(t *Type, parentStructTypes []*Type) *Type {
|
||||
}
|
||||
|
||||
// We normalize structs during their construction iff they have no unresolved cycles. Normalizing applies a canonical ordering to the composite types of a union and serializes all types under the struct. To ensure a consistent ordering of the composite types of a union, we generate a unique "order id" or OID for each of those types. The OID is the hash of a unique type encoding that is independant of the extant order of types within any subordinate unions. This encoding for most types is a straightforward serialization of its components; for unions the encoding is a bytewise XOR of the hashes of each of its composite type encodings.
|
||||
// We require a consistent order of types within a union to ensure that equivalent types have a single persistent encoding and, therefore, a single hash. The method described above fails for "unrolled" cycles whereby two equivalent, but uniquely described structures, would have different OIDs. Consider for example the following two types that, while equivalent, do not yeild the same OID:
|
||||
// We require a consistent order of types within a union to ensure that equivalent types have a single persistent encoding and, therefore, a single hash. The method described above fails for "unrolled" cycles whereby two equivalent, but uniquely described structures, would have different OIDs. Consider for example the following two types that, while equivalent, do not yield the same OID:
|
||||
// Struct A { a: Cycle<0> }
|
||||
// Struct A { a: Struct A { a: Cycle<1> } }
|
||||
// We explicitly disallow this sort of redundantly expressed type. If a non-Byzantine use of such a construction arises, we can attempt to simplify the expansive type or find another means of comparison.
|
||||
@@ -276,7 +276,7 @@ func encodeForOID(t *Type, buf nomsWriter, allowUnresolvedCycles bool, root *Typ
|
||||
|
||||
buf.writeUint32(uint32(len(desc.ElemTypes)))
|
||||
|
||||
// This is the only subtle case: encode each subordinate type, generate the hash, remove duplicates, and xor the results together to form an order indepedant encoding.
|
||||
// This is the only subtle case: encode each subordinate type, generate the hash, remove duplicates, and xor the results together to form an order independent encoding.
|
||||
mbuf := newBinaryNomsWriter()
|
||||
oids := make(map[hash.Hash]struct{})
|
||||
for _, tt := range desc.ElemTypes {
|
||||
@@ -286,7 +286,7 @@ func encodeForOID(t *Type, buf nomsWriter, allowUnresolvedCycles bool, root *Typ
|
||||
}
|
||||
|
||||
data := make([]byte, hash.ByteLen)
|
||||
for o, _ := range oids {
|
||||
for o := range oids {
|
||||
digest := o.Digest()
|
||||
for i := 0; i < len(data); i++ {
|
||||
data[i] ^= digest[i]
|
||||
|
||||
@@ -25,22 +25,22 @@ type testSuite struct {
|
||||
// please update Go and JS to keep them in sync - see js/src//xp-test.js
|
||||
func newTestSuite() *testSuite {
|
||||
testValues := []*testValue{
|
||||
&testValue{Bool(true), "g19moobgrm32dn083bokhksuobulq28c", "bool - true"},
|
||||
&testValue{Bool(false), "bqjhrhmgmjqnnssqln87o84c6no6pklq", "bool - false"},
|
||||
&testValue{Number(-1), "hq0jvv1enraehfggfk8s27ll1rmirt96", "num - -1"},
|
||||
&testValue{Number(0), "elie88b5iouak7onvi2mpkcgoqqr771l", "num - 0"},
|
||||
&testValue{Number(1), "6h9ldndhjoq0r5sbn1955gaearq5dovc", "num - 1"},
|
||||
&testValue{Number(-122.411912027329), "hcdjnev3lccjplue6pb0fkhgeehv6oec", "num - -122.411912027329"},
|
||||
{Bool(true), "g19moobgrm32dn083bokhksuobulq28c", "bool - true"},
|
||||
{Bool(false), "bqjhrhmgmjqnnssqln87o84c6no6pklq", "bool - false"},
|
||||
{Number(-1), "hq0jvv1enraehfggfk8s27ll1rmirt96", "num - -1"},
|
||||
{Number(0), "elie88b5iouak7onvi2mpkcgoqqr771l", "num - 0"},
|
||||
{Number(1), "6h9ldndhjoq0r5sbn1955gaearq5dovc", "num - 1"},
|
||||
{Number(-122.411912027329), "hcdjnev3lccjplue6pb0fkhgeehv6oec", "num - -122.411912027329"},
|
||||
// JS Number.MAX_SAFE_INTEGER
|
||||
&testValue{Number(9007199254740991), "3fpnjghte4v4q8qogl4bga0qldetlo7b", "num - 9007199254740991"},
|
||||
{Number(9007199254740991), "3fpnjghte4v4q8qogl4bga0qldetlo7b", "num - 9007199254740991"},
|
||||
// JS Number.MIN_SAFE_INTEGER
|
||||
&testValue{Number(-9007199254740991), "jd80frddd2fs3q567tledcgmfs85dvke", "num - -9007199254740991"},
|
||||
{Number(-9007199254740991), "jd80frddd2fs3q567tledcgmfs85dvke", "num - -9007199254740991"},
|
||||
// JS Number.EPSILON
|
||||
&testValue{Number(2.220446049250313e-16), "qapetp8502l672v2vie52nd4qjviq5je", "num - 2.220446049250313e-16"},
|
||||
&testValue{Number(math.MaxFloat64), "9bqr7ofsvhutqo5ue1iqpmsu70e85ll6", "num - 1.7976931348623157e+308"},
|
||||
&testValue{String(""), "ssfs0o2eq3kg50p37q2crhhqhjcs2391", "str - empty"},
|
||||
&testValue{String("0"), "jngc7d11d2h0c6s2f15l10rckvu753rb", "str - 0"},
|
||||
&testValue{String("false"), "1v3a1t4to25kkohm1bhh2thebmls0lp0", "str - false"},
|
||||
{Number(2.220446049250313e-16), "qapetp8502l672v2vie52nd4qjviq5je", "num - 2.220446049250313e-16"},
|
||||
{Number(math.MaxFloat64), "9bqr7ofsvhutqo5ue1iqpmsu70e85ll6", "num - 1.7976931348623157e+308"},
|
||||
{String(""), "ssfs0o2eq3kg50p37q2crhhqhjcs2391", "str - empty"},
|
||||
{String("0"), "jngc7d11d2h0c6s2f15l10rckvu753rb", "str - 0"},
|
||||
{String("false"), "1v3a1t4to25kkohm1bhh2thebmls0lp0", "str - false"},
|
||||
}
|
||||
|
||||
// TODO: add these types too
|
||||
|
||||
@@ -13,7 +13,7 @@ import (
|
||||
|
||||
type ProcessFn func(in interface{}) (out interface{})
|
||||
|
||||
// Creates a pool of |parallelism| goroutines to process values off of |input| by calling |fn| and guarentees that results of each call will be sent on |out| in the order the corresponding input was received.
|
||||
// Creates a pool of |parallelism| goroutines to process values off of |input| by calling |fn| and guarantees that results of each call will be sent on |out| in the order the corresponding input was received.
|
||||
func New(input chan interface{}, fn ProcessFn, parallelism int) chan interface{} {
|
||||
d.Chk.True(parallelism > 0)
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ import (
|
||||
"sync"
|
||||
|
||||
"github.com/attic-labs/noms/go/d"
|
||||
goisatty "github.com/mattn/go-isatty"
|
||||
flag "github.com/juju/gnuflag"
|
||||
goisatty "github.com/mattn/go-isatty"
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@@ -34,9 +34,9 @@ func createTestData(s *testSuite, buildAsMap bool) []types.Value {
|
||||
s.header = []string{"a", "b", "c"}
|
||||
structName := "SomeStruct"
|
||||
s.payload = [][]string{
|
||||
[]string{"4", "10", "255"},
|
||||
[]string{"5", "7", "100"},
|
||||
[]string{"512", "12", "55"},
|
||||
{"4", "10", "255"},
|
||||
{"5", "7", "100"},
|
||||
{"512", "12", "55"},
|
||||
}
|
||||
|
||||
sliceLen := len(s.payload)
|
||||
|
||||
@@ -24,16 +24,16 @@ func TestSchemaDetection(t *testing.T) {
|
||||
}
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"foo", "1", "5"},
|
||||
[]string{"bar", "0", "10"},
|
||||
[]string{"true", "1", "23"},
|
||||
[]string{"1", "1", "60"},
|
||||
[]string{"1.1", "false", "75"},
|
||||
{"foo", "1", "5"},
|
||||
{"bar", "0", "10"},
|
||||
{"true", "1", "23"},
|
||||
{"1", "1", "60"},
|
||||
{"1.1", "false", "75"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{types.StringKind},
|
||||
KindSlice{types.BoolKind, types.StringKind},
|
||||
KindSlice{
|
||||
{types.StringKind},
|
||||
{types.BoolKind, types.StringKind},
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind,
|
||||
},
|
||||
@@ -41,61 +41,61 @@ func TestSchemaDetection(t *testing.T) {
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"foo"},
|
||||
[]string{"bar"},
|
||||
[]string{"true"},
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
{"foo"},
|
||||
{"bar"},
|
||||
{"true"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{types.StringKind},
|
||||
{types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"true"},
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
{"true"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{types.StringKind},
|
||||
{types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"true"},
|
||||
[]string{"false"},
|
||||
[]string{"True"},
|
||||
[]string{"False"},
|
||||
[]string{"TRUE"},
|
||||
[]string{"FALSE"},
|
||||
[]string{"1"},
|
||||
[]string{"0"},
|
||||
{"true"},
|
||||
{"false"},
|
||||
{"True"},
|
||||
{"False"},
|
||||
{"TRUE"},
|
||||
{"FALSE"},
|
||||
{"1"},
|
||||
{"0"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{types.BoolKind, types.StringKind},
|
||||
{types.BoolKind, types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
[]string{"4.940656458412465441765687928682213723651e-50"},
|
||||
[]string{"-4.940656458412465441765687928682213723651e-50"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
{"4.940656458412465441765687928682213723651e-50"},
|
||||
{"-4.940656458412465441765687928682213723651e-50"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
@@ -103,36 +103,36 @@ func TestSchemaDetection(t *testing.T) {
|
||||
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
[]string{"1.797693134862315708145274237317043567981e+102"},
|
||||
[]string{"-1.797693134862315708145274237317043567981e+102"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
{"1.797693134862315708145274237317043567981e+102"},
|
||||
{"-1.797693134862315708145274237317043567981e+102"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"1.1"},
|
||||
[]string{"1.797693134862315708145274237317043567981e+309"},
|
||||
[]string{"-1.797693134862315708145274237317043567981e+309"},
|
||||
{"1"},
|
||||
{"1.1"},
|
||||
{"1.797693134862315708145274237317043567981e+309"},
|
||||
{"-1.797693134862315708145274237317043567981e+309"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"0"},
|
||||
{"1"},
|
||||
{"0"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.BoolKind,
|
||||
types.StringKind},
|
||||
@@ -140,123 +140,123 @@ func TestSchemaDetection(t *testing.T) {
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"0"},
|
||||
[]string{"-0"},
|
||||
{"0"},
|
||||
{"-0"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"280"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"280"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"-180"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"-180"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"33000"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"33000"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"-44000"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"-44000"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"2547483648"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"2547483648"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{"1"},
|
||||
[]string{"-4347483648"},
|
||||
[]string{"0"},
|
||||
[]string{"-1"},
|
||||
{"1"},
|
||||
{"-4347483648"},
|
||||
{"0"},
|
||||
{"-1"},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{fmt.Sprintf("%d", uint64(1<<63))},
|
||||
[]string{fmt.Sprintf("%d", uint64(1<<63)+1)},
|
||||
{fmt.Sprintf("%d", uint64(1<<63))},
|
||||
{fmt.Sprintf("%d", uint64(1<<63)+1)},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
)
|
||||
test(
|
||||
[][]string{
|
||||
[]string{fmt.Sprintf("%d", uint64(1<<32))},
|
||||
[]string{fmt.Sprintf("%d", uint64(1<<32)+1)},
|
||||
{fmt.Sprintf("%d", uint64(1<<32))},
|
||||
{fmt.Sprintf("%d", uint64(1<<32)+1)},
|
||||
},
|
||||
[]KindSlice{
|
||||
KindSlice{
|
||||
{
|
||||
types.NumberKind,
|
||||
types.StringKind},
|
||||
},
|
||||
@@ -274,37 +274,37 @@ func TestCombinationsWithLength(t *testing.T) {
|
||||
assert.Equal(expect, combinations)
|
||||
}
|
||||
test([]int{0}, 1, [][]int{
|
||||
[]int{0},
|
||||
{0},
|
||||
})
|
||||
test([]int{1}, 1, [][]int{
|
||||
[]int{1},
|
||||
{1},
|
||||
})
|
||||
test([]int{0, 1}, 1, [][]int{
|
||||
[]int{0},
|
||||
[]int{1},
|
||||
{0},
|
||||
{1},
|
||||
})
|
||||
test([]int{0, 1}, 2, [][]int{
|
||||
[]int{0, 1},
|
||||
{0, 1},
|
||||
})
|
||||
test([]int{70, 80, 90, 100}, 1, [][]int{
|
||||
[]int{70},
|
||||
[]int{80},
|
||||
[]int{90},
|
||||
[]int{100},
|
||||
{70},
|
||||
{80},
|
||||
{90},
|
||||
{100},
|
||||
})
|
||||
test([]int{70, 80, 90, 100}, 2, [][]int{
|
||||
[]int{70, 80},
|
||||
[]int{70, 90},
|
||||
[]int{70, 100},
|
||||
[]int{80, 90},
|
||||
[]int{80, 100},
|
||||
[]int{90, 100},
|
||||
{70, 80},
|
||||
{70, 90},
|
||||
{70, 100},
|
||||
{80, 90},
|
||||
{80, 100},
|
||||
{90, 100},
|
||||
})
|
||||
test([]int{70, 80, 90, 100}, 3, [][]int{
|
||||
[]int{70, 80, 90},
|
||||
[]int{70, 80, 100},
|
||||
[]int{70, 90, 100},
|
||||
[]int{80, 90, 100},
|
||||
{70, 80, 90},
|
||||
{70, 80, 100},
|
||||
{70, 90, 100},
|
||||
{80, 90, 100},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -319,33 +319,33 @@ func TestCombinationsWithLengthFromTo(t *testing.T) {
|
||||
assert.Equal(expect, combinations)
|
||||
}
|
||||
test([]int{0}, 1, 1, [][]int{
|
||||
[]int{0},
|
||||
{0},
|
||||
})
|
||||
test([]int{1}, 1, 1, [][]int{
|
||||
[]int{1},
|
||||
{1},
|
||||
})
|
||||
test([]int{0, 1}, 1, 2, [][]int{
|
||||
[]int{0},
|
||||
[]int{1},
|
||||
[]int{0, 1},
|
||||
{0},
|
||||
{1},
|
||||
{0, 1},
|
||||
})
|
||||
test([]int{0, 1}, 2, 2, [][]int{
|
||||
[]int{0, 1},
|
||||
{0, 1},
|
||||
})
|
||||
test([]int{70, 80, 90, 100}, 1, 3, [][]int{
|
||||
[]int{70},
|
||||
[]int{80},
|
||||
[]int{90},
|
||||
[]int{100},
|
||||
[]int{70, 80},
|
||||
[]int{70, 90},
|
||||
[]int{70, 100},
|
||||
[]int{80, 90},
|
||||
[]int{80, 100},
|
||||
[]int{90, 100},
|
||||
[]int{70, 80, 90},
|
||||
[]int{70, 80, 100},
|
||||
[]int{70, 90, 100},
|
||||
[]int{80, 90, 100},
|
||||
{70},
|
||||
{80},
|
||||
{90},
|
||||
{100},
|
||||
{70, 80},
|
||||
{70, 90},
|
||||
{70, 100},
|
||||
{80, 90},
|
||||
{80, 100},
|
||||
{90, 100},
|
||||
{70, 80, 90},
|
||||
{70, 80, 100},
|
||||
{70, 90, 100},
|
||||
{80, 90, 100},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -110,7 +110,7 @@ func TestWriteValue(t *testing.T) {
|
||||
assert.Equal(http.StatusOK, w.Code, string(w.Body.Bytes()))
|
||||
|
||||
whash := wval.Hash()
|
||||
hints := map[hash.Hash]struct{}{whash: struct{}{}}
|
||||
hints := map[hash.Hash]struct{}{whash: {}}
|
||||
rdr := buildGetRefsRequestBody(hints)
|
||||
r, _ = newRequest("POST", dbName+constants.GetRefsPath, rdr)
|
||||
r.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
|
||||
@@ -30,23 +30,23 @@ func TestQueryScanner(t *testing.T) {
|
||||
s := NewQueryScanner(`9 (99.9) -9 0x7F "99.9" and or http://localhost:8000/cli-tour::yo <= >= < > = _`)
|
||||
|
||||
scannerResults := []scannerResult{
|
||||
scannerResult{tok: scanner.Int, text: "9"},
|
||||
scannerResult{tok: int('('), text: "("},
|
||||
scannerResult{tok: scanner.Float, text: "99.9"},
|
||||
scannerResult{tok: int(')'), text: ")"},
|
||||
scannerResult{tok: '-', text: "-"},
|
||||
scannerResult{tok: scanner.Int, text: "9"},
|
||||
scannerResult{tok: scanner.Int, text: "0x7F"},
|
||||
scannerResult{tok: scanner.String, text: `"99.9"`},
|
||||
scannerResult{tok: scanner.Ident, text: "and"},
|
||||
scannerResult{tok: scanner.Ident, text: "or"},
|
||||
scannerResult{tok: scanner.Ident, text: "http://localhost:8000/cli-tour::yo"},
|
||||
scannerResult{tok: scanner.Ident, text: "<="},
|
||||
scannerResult{tok: scanner.Ident, text: ">="},
|
||||
scannerResult{tok: scanner.Ident, text: "<"},
|
||||
scannerResult{tok: scanner.Ident, text: ">"},
|
||||
scannerResult{tok: int('='), text: "="},
|
||||
scannerResult{tok: int('_'), text: "_"},
|
||||
{tok: scanner.Int, text: "9"},
|
||||
{tok: int('('), text: "("},
|
||||
{tok: scanner.Float, text: "99.9"},
|
||||
{tok: int(')'), text: ")"},
|
||||
{tok: '-', text: "-"},
|
||||
{tok: scanner.Int, text: "9"},
|
||||
{tok: scanner.Int, text: "0x7F"},
|
||||
{tok: scanner.String, text: `"99.9"`},
|
||||
{tok: scanner.Ident, text: "and"},
|
||||
{tok: scanner.Ident, text: "or"},
|
||||
{tok: scanner.Ident, text: "http://localhost:8000/cli-tour::yo"},
|
||||
{tok: scanner.Ident, text: "<="},
|
||||
{tok: scanner.Ident, text: ">="},
|
||||
{tok: scanner.Ident, text: "<"},
|
||||
{tok: scanner.Ident, text: ">"},
|
||||
{tok: int('='), text: "="},
|
||||
{tok: int('_'), text: "_"},
|
||||
}
|
||||
|
||||
for _, sr := range scannerResults {
|
||||
@@ -63,10 +63,10 @@ func TestPeek(t *testing.T) {
|
||||
|
||||
s := NewQueryScanner(`_ < "one"`)
|
||||
scannerResults := []scannerResult{
|
||||
scannerResult{tok: int('_'), text: "_"},
|
||||
scannerResult{tok: scanner.Ident, text: "<"},
|
||||
scannerResult{tok: scanner.String, text: `"one"`},
|
||||
scannerResult{tok: scanner.EOF, text: ""},
|
||||
{tok: int('_'), text: "_"},
|
||||
{tok: scanner.Ident, text: "<"},
|
||||
{tok: scanner.String, text: `"one"`},
|
||||
{tok: scanner.EOF, text: ""},
|
||||
}
|
||||
|
||||
for _, sr := range scannerResults {
|
||||
@@ -86,17 +86,17 @@ func TestParsing(t *testing.T) {
|
||||
re4 := compExpr{"index1", lt, types.Number(-2030)}
|
||||
|
||||
queries := []parseResult{
|
||||
parseResult{`index1 = 2015`, re1},
|
||||
parseResult{`(index1 = 2015 )`, re1},
|
||||
parseResult{`(((index1 = 2015 ) ))`, re1},
|
||||
parseResult{`index1 = 2015 or index1 >= 2020`, logExpr{or, re1, re2, "index1"}},
|
||||
parseResult{`(index1 = 2015) or index1 >= 2020`, logExpr{or, re1, re2, "index1"}},
|
||||
parseResult{`index1 = 2015 or (index1 >= 2020)`, logExpr{or, re1, re2, "index1"}},
|
||||
parseResult{`(index1 = 2015 or index1 >= 2020)`, logExpr{or, re1, re2, "index1"}},
|
||||
parseResult{`(index1 = 2015 or index1 >= 2020) and index1 <= 2022`, logExpr{and, logExpr{or, re1, re2, "index1"}, re3, "index1"}},
|
||||
parseResult{`index1 = 2015 or index1 >= 2020 and index1 <= 2022`, logExpr{or, re1, logExpr{and, re2, re3, "index1"}, "index1"}},
|
||||
parseResult{`index1 = 2015 or index1 >= 2020 and index1 <= 2022 or index1 < -2030`, logExpr{or, re1, logExpr{and, re2, logExpr{or, re3, re4, "index1"}, "index1"}, "index1"}},
|
||||
parseResult{`(index1 = 2015 or index1 >= 2020) and (index1 <= 2022 or index1 < -2030)`, logExpr{and, logExpr{or, re1, re2, "index1"}, logExpr{or, re3, re4, "index1"}, "index1"}},
|
||||
{`index1 = 2015`, re1},
|
||||
{`(index1 = 2015 )`, re1},
|
||||
{`(((index1 = 2015 ) ))`, re1},
|
||||
{`index1 = 2015 or index1 >= 2020`, logExpr{or, re1, re2, "index1"}},
|
||||
{`(index1 = 2015) or index1 >= 2020`, logExpr{or, re1, re2, "index1"}},
|
||||
{`index1 = 2015 or (index1 >= 2020)`, logExpr{or, re1, re2, "index1"}},
|
||||
{`(index1 = 2015 or index1 >= 2020)`, logExpr{or, re1, re2, "index1"}},
|
||||
{`(index1 = 2015 or index1 >= 2020) and index1 <= 2022`, logExpr{and, logExpr{or, re1, re2, "index1"}, re3, "index1"}},
|
||||
{`index1 = 2015 or index1 >= 2020 and index1 <= 2022`, logExpr{or, re1, logExpr{and, re2, re3, "index1"}, "index1"}},
|
||||
{`index1 = 2015 or index1 >= 2020 and index1 <= 2022 or index1 < -2030`, logExpr{or, re1, logExpr{and, re2, logExpr{or, re3, re4, "index1"}, "index1"}, "index1"}},
|
||||
{`(index1 = 2015 or index1 >= 2020) and (index1 <= 2022 or index1 < -2030)`, logExpr{and, logExpr{or, re1, re2, "index1"}, logExpr{or, re3, re4, "index1"}, "index1"}},
|
||||
}
|
||||
|
||||
db := datas.NewDatabase(chunks.NewMemoryStore())
|
||||
|
||||
@@ -96,7 +96,7 @@ func (r queryRange) and(o queryRange) (rangeDescs queryRangeSlice) {
|
||||
|
||||
lower := r.lower.maxValue(o.lower)
|
||||
upper := r.upper.minValue(o.upper)
|
||||
return []queryRange{queryRange{lower, upper}}
|
||||
return []queryRange{{lower, upper}}
|
||||
}
|
||||
|
||||
func (r queryRange) or(o queryRange) (rSlice queryRangeSlice) {
|
||||
|
||||
@@ -43,11 +43,11 @@ func main() {
|
||||
db := os.Args[1]
|
||||
|
||||
rs := []runner{
|
||||
runner{"diff", runDiff},
|
||||
runner{"log diff", runLogDiff},
|
||||
runner{"log show", runLogShow},
|
||||
runner{"show", runShow},
|
||||
runner{"sync", runSync},
|
||||
{"diff", runDiff},
|
||||
{"log diff", runLogDiff},
|
||||
{"log show", runLogShow},
|
||||
{"show", runShow},
|
||||
{"sync", runSync},
|
||||
}
|
||||
|
||||
for ds := range streamDs(db) {
|
||||
|
||||
@@ -131,8 +131,8 @@ func (suite *SerialRunnerTestSuite) TestFailure() {
|
||||
path, expected string
|
||||
}
|
||||
tests := []testCase{
|
||||
testCase{suite.uniqueBuildFile(), "Scoobaz"},
|
||||
testCase{suite.uniqueBuildFile(), "at the disco"},
|
||||
{suite.uniqueBuildFile(), "Scoobaz"},
|
||||
{suite.uniqueBuildFile(), "at the disco"},
|
||||
}
|
||||
goodOne := testCase{suite.uniqueBuildFile(), "All's well"}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user