Merge remote-tracking branch 'origin/main' into aaron/remotesrv-cleanups

This commit is contained in:
Aaron Son
2022-09-02 12:21:42 -07:00
18 changed files with 801 additions and 162 deletions
+2 -2
View File
@@ -63,9 +63,9 @@ func (cmd VersionCmd) ArgParser() *argparser.ArgParser {
func (cmd VersionCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv) int {
cli.Println("dolt version", cmd.VersionStr)
if dEnv.HasDoltDir() && !cli.CheckEnvIsValid(dEnv) {
if dEnv.HasDoltDir() && dEnv.RSLoadErr == nil && !cli.CheckEnvIsValid(dEnv) {
return 2
} else if dEnv.HasDoltDir() {
} else if dEnv.HasDoltDir() && dEnv.RSLoadErr == nil {
nbf := dEnv.DoltDB.Format()
cli.Printf("database storage format: %s\n", dfunctions.GetStorageFormatDisplayString(nbf))
} else {
+1 -1
View File
@@ -56,7 +56,7 @@ require (
)
require (
github.com/dolthub/go-mysql-server v0.12.1-0.20220830220815-1e1f50eaae23
github.com/dolthub/go-mysql-server v0.12.1-0.20220831202020-aad33a5f02f4
github.com/google/flatbuffers v2.0.6+incompatible
github.com/gosuri/uilive v0.0.4
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6
+2 -2
View File
@@ -175,8 +175,8 @@ github.com/dolthub/flatbuffers v1.13.0-dh.1 h1:OWJdaPep22N52O/0xsUevxJ6Qfw1M2txC
github.com/dolthub/flatbuffers v1.13.0-dh.1/go.mod h1:CorYGaDmXjHz1Z7i50PYXG1Ricn31GcA2wNOTFIQAKE=
github.com/dolthub/fslock v0.0.3 h1:iLMpUIvJKMKm92+N1fmHVdxJP5NdyDK5bK7z7Ba2s2U=
github.com/dolthub/fslock v0.0.3/go.mod h1:QWql+P17oAAMLnL4HGB5tiovtDuAjdDTPbuqx7bYfa0=
github.com/dolthub/go-mysql-server v0.12.1-0.20220830220815-1e1f50eaae23 h1:whaYiETfwIMm1LBOsZrMlbnD5H+LPjsbGqCA/CxoNZs=
github.com/dolthub/go-mysql-server v0.12.1-0.20220830220815-1e1f50eaae23/go.mod h1:F/UtM55EgpqIiWoXwPg0jx8euPYwUpJsKHLiFtEjKwM=
github.com/dolthub/go-mysql-server v0.12.1-0.20220831202020-aad33a5f02f4 h1:OYhOUli8IeEls+s3WO7fBFB0Hcb7c3vGs3XS3i9Bu+Y=
github.com/dolthub/go-mysql-server v0.12.1-0.20220831202020-aad33a5f02f4/go.mod h1:F/UtM55EgpqIiWoXwPg0jx8euPYwUpJsKHLiFtEjKwM=
github.com/dolthub/ishell v0.0.0-20220112232610-14e753f0f371 h1:oyPHJlzumKta1vnOQqUnfdz+pk3EmnHS3Nd0cCT0I2g=
github.com/dolthub/ishell v0.0.0-20220112232610-14e753f0f371/go.mod h1:dhGBqcCEfK5kuFmeO5+WOx3hqc1k3M29c1oS/R7N4ms=
github.com/dolthub/jsonpath v0.0.0-20210609232853-d49537a30474 h1:xTrR+l5l+1Lfq0NvhiEsctylXinUMFhhsqaEcl414p8=
+21 -2
View File
@@ -32,8 +32,10 @@ import (
)
const (
doltDir = dbfactory.DoltDir
nomsDir = dbfactory.DataDir
doltDir = dbfactory.DoltDir
nomsDir = dbfactory.DataDir
oldGenDir = "oldgen"
manifestFile = "manifest"
migrationRef = "migration"
)
@@ -217,12 +219,29 @@ func swapManifests(ctx context.Context, src, dest filesys.Filesys) (err error) {
return err
}
// backup the current oldgen manifest, if one exists
gcManifest := filepath.Join(doltDir, nomsDir, oldGenDir, manifestFile)
oldGen, _ := dest.Exists(gcManifest)
if oldGen {
bak = filepath.Join(doltDir, nomsDir, oldGenDir, manifestFile+".bak")
if err = filesys.CopyFile(gcManifest, bak, dest, dest); err != nil {
return err
}
}
// copy manifest to |dest| under temporary name
tmp := filepath.Join(doltDir, nomsDir, "temp-manifest")
if err = filesys.CopyFile(manifest, tmp, src, dest); err != nil {
return err
}
// delete current oldgen manifest
if oldGen {
if err = dest.Delete(gcManifest, true); err != nil {
return err
}
}
// atomically swap the manifests
return dest.MoveFile(tmp, manifest)
// exit immediately!
@@ -1186,7 +1186,6 @@ func TestDiffSystemTable(t *testing.T) {
}
func TestDiffSystemTablePrepared(t *testing.T) {
t.Skip()
harness := newDoltHarness(t)
harness.Setup(setup.MydbData)
for _, test := range DiffSystemTableScriptTests {
@@ -1003,6 +1003,29 @@ var HistorySystemTableScriptTests = []queries.ScriptTest{
{3, 4},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.pk = 3)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.pk = 3) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
@@ -1050,6 +1073,29 @@ var HistorySystemTableScriptTests = []queries.ScriptTest{
{9, 10},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where c = 4",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.c = 4)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[4, 4]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where c = 10 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.c = 10) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[10, 10]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
@@ -1206,83 +1252,6 @@ var HistorySystemTableScriptTests = []queries.ScriptTest{
},
},
},
{
SkipPrepared: true,
Name: "index by primary key",
SetUpScript: []string{
"create table t1 (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t1 values (1,2), (3,4)",
"set @Commit1 = dolt_commit('-am', 'initial table');",
"insert into t1 values (5,6), (7,8)",
"set @Commit2 = dolt_commit('-am', 'two more rows');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.pk = 3)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.pk = 3) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
SkipPrepared: true,
Name: "adding an index",
SetUpScript: []string{
"create table t1 (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t1 values (1,2), (3,4)",
"set @Commit1 = dolt_commit('-am', 'initial table');",
"insert into t1 values (5,6), (7,8)",
"set @Commit2 = dolt_commit('-am', 'two more rows');",
"insert into t1 values (9,10), (11,12)",
"create index t1_c on t1(c)",
"set @Commit2 = dolt_commit('-am', 'two more rows and an index');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "explain select pk, c from dolt_history_t1 where c = 4",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.c = 4)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[4, 4]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where c = 10 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.c = 10) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[10, 10]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
SkipPrepared: true,
Name: "dolt_history table with AS OF",
@@ -1312,81 +1281,6 @@ var HistorySystemTableScriptTests = []queries.ScriptTest{
// BrokenHistorySystemTableScriptTests contains tests that work for non-prepared, but don't work
// for prepared queries.
var BrokenHistorySystemTableScriptTests = []queries.ScriptTest{
{
Name: "index by primary key",
SetUpScript: []string{
"create table t1 (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t1 values (1,2), (3,4)",
"set @Commit1 = dolt_commit('-am', 'initial table');",
"insert into t1 values (5,6), (7,8)",
"set @Commit2 = dolt_commit('-am', 'two more rows');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.pk = 3)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where pk = 3 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.pk = 3) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.pk]"},
{" ├─ filters: [{[3, 3]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
Name: "adding an index",
SetUpScript: []string{
"create table t1 (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t1 values (1,2), (3,4)",
"set @Commit1 = dolt_commit('-am', 'initial table');",
"insert into t1 values (5,6), (7,8)",
"set @Commit2 = dolt_commit('-am', 'two more rows');",
"insert into t1 values (9,10), (11,12)",
"create index t1_c on t1(c)",
"set @Commit2 = dolt_commit('-am', 'two more rows and an index');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "explain select pk, c from dolt_history_t1 where c = 4",
Expected: []sql.Row{
{"Exchange"},
{" └─ Filter(dolt_history_t1.c = 4)"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[4, 4]}]"},
{" └─ columns: [pk c]"},
},
},
{
Query: "explain select pk, c from dolt_history_t1 where c = 10 and committer = 'someguy'",
Expected: []sql.Row{
{"Exchange"},
{" └─ Project(dolt_history_t1.pk, dolt_history_t1.c)"},
{" └─ Filter((dolt_history_t1.c = 10) AND (dolt_history_t1.committer = 'someguy'))"},
{" └─ IndexedTableAccess(dolt_history_t1)"},
{" ├─ index: [dolt_history_t1.c]"},
{" ├─ filters: [{[10, 10]}]"},
{" └─ columns: [pk c committer]"},
},
},
},
},
{
Name: "dolt_history table with AS OF",
SetUpScript: []string{
+28
View File
@@ -124,10 +124,19 @@ func DiffMaps(ctx context.Context, from, to Map, cb DiffFn) error {
return diffOrderedTrees(ctx, from.tuples, to.tuples, cb)
}
// RangeDiffMaps returns diffs within a Range. See Range for which diffs are
// returned.
func RangeDiffMaps(ctx context.Context, from, to Map, rng Range, cb DiffFn) error {
return rangeDiffOrderedTrees(ctx, from.tuples, to.tuples, rng, cb)
}
// DiffMapsKeyRange returns diffs within a physical key range. The key range is
// specified by |start| and |stop|. If |start| and/or |stop| is null, then the
// range is unbounded towards that end.
func DiffMapsKeyRange(ctx context.Context, from, to Map, start, stop val.Tuple, cb DiffFn) error {
return diffKeyRangeOrderedTrees(ctx, from.tuples, to.tuples, start, stop, cb)
}
func MergeMaps(ctx context.Context, left, right, base Map, cb tree.CollisionFn) (Map, error) {
serializer := message.NewProllyMapSerializer(left.valDesc, base.NodeStore().Pool())
tuples, err := mergeOrderedTrees(ctx, left.tuples, right.tuples, base.tuples, cb, serializer, base.valDesc)
@@ -233,6 +242,25 @@ func (m Map) IterRange(ctx context.Context, rng Range) (MapIter, error) {
return filteredIter{iter: iter, rng: rng}, nil
}
// IterKeyRange iterates over a physical key range defined by |start| and
// |stop|. If |startInclusive| and/or |stop| is nil, the range will be open
// towards that end.
func (m Map) IterKeyRange(ctx context.Context, start, stop val.Tuple) (MapIter, error) {
return m.tuples.iterKeyRange(ctx, start, stop)
}
// GetOrdinalForKey returns the smallest ordinal position at which the key >=
// |query|.
func (m Map) GetOrdinalForKey(ctx context.Context, query val.Tuple) (uint64, error) {
return m.tuples.getOrdinalForKey(ctx, query)
}
// GetKeyRangeCardinality returns the number of key-value tuples between |start|
// and |stopExclusive|. If |start| and/or |stop| is null that end is unbounded.
func (m Map) GetKeyRangeCardinality(ctx context.Context, startInclusive val.Tuple, endExclusive val.Tuple) (uint64, error) {
return m.tuples.getKeyRangeCardinality(ctx, startInclusive, endExclusive)
}
func (m Map) Node() tree.Node {
return m.tuples.root
}
+331
View File
@@ -0,0 +1,331 @@
// Copyright 2022 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prolly
import (
"context"
"errors"
"fmt"
"io"
"math/rand"
"sort"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/store/prolly/tree"
"github.com/dolthub/dolt/go/store/val"
)
func TestDiffKeyRangeMaps(t *testing.T) {
scales := []int{
10,
100,
1000,
10000,
}
for _, s := range scales {
name := fmt.Sprintf("test map RangeDiff at scale %d", s)
t.Run(name, func(t *testing.T) {
om, tuples := makeProllyMap(t, s)
require.Equal(t, s, len(tuples))
prollyMap := om.(Map)
kd, vd := prollyMap.Descriptors()
t.Run("BoundedKeyRange", func(t *testing.T) {
rngTest := makeRandomBoundedKeyRange(kd, tuples)
runDiffTestsWithKeyRange(t, s, prollyMap, tuples, rngTest)
})
t.Run("BoundedKeyRangeWithMissingKeys", func(t *testing.T) {
rngTest := makeBoundedKeyRangeWithMissingKeys(t, prollyMap, kd, vd, tuples)
runDiffTestsWithKeyRange(t, s, prollyMap, tuples, rngTest)
})
t.Run("UnboundedLowerKeyRange", func(t *testing.T) {
rngTest := makeRandomUnboundedLowerKeyRange(kd, tuples)
runDiffTestsWithKeyRange(t, s, prollyMap, tuples, rngTest)
})
t.Run("UnboundedUpperKeyRange", func(t *testing.T) {
rngTest := makeRandomUnboundedUpperKeyRange(kd, tuples)
runDiffTestsWithKeyRange(t, s, prollyMap, tuples, rngTest)
})
})
}
}
func runDiffTestsWithKeyRange(t *testing.T, s int, prollyMap testMap, tuples [][2]val.Tuple, rngTest keyRangeDiffTest) {
t.Run("map diff error handling", func(t *testing.T) {
testKeyRngMapDiffErrorHandling(t, prollyMap.(Map), rngTest)
})
t.Run("equal map diff", func(t *testing.T) {
testKeyRngEqualMapDiff(t, prollyMap.(Map), rngTest)
})
t.Run("map diff against empty", func(t *testing.T) {
testKeyRngMapDiffAgainstEmpty(t, s, rngTest)
})
// deletes
t.Run("single delete diff", func(t *testing.T) {
for k := 0; k < 100; k++ {
testKeyRngDeleteDiffs(t, prollyMap.(Map), tuples, 1, rngTest)
}
})
t.Run("many delete diffs", func(t *testing.T) {
for k := 0; k < 10; k++ {
testKeyRngDeleteDiffs(t, prollyMap.(Map), tuples, s/10, rngTest)
testKeyRngDeleteDiffs(t, prollyMap.(Map), tuples, s/2, rngTest)
}
})
t.Run("diff against empty map", func(t *testing.T) {
testKeyRngDeleteDiffs(t, prollyMap.(Map), tuples, s, rngTest)
})
// inserts
t.Run("single insert diff", func(t *testing.T) {
for k := 0; k < 100; k++ {
testKeyRngInsertDiffs(t, prollyMap.(Map), tuples, 1, rngTest)
}
})
t.Run("many insert diffs", func(t *testing.T) {
for k := 0; k < 10; k++ {
testKeyRngInsertDiffs(t, prollyMap.(Map), tuples, s/10, rngTest)
testKeyRngInsertDiffs(t, prollyMap.(Map), tuples, s/2, rngTest)
}
})
// updates
t.Run("single update diff", func(t *testing.T) {
for k := 0; k < 100; k++ {
testKeyRngUpdateDiffs(t, prollyMap.(Map), tuples, 1, rngTest)
}
})
t.Run("many update diffs", func(t *testing.T) {
for k := 0; k < 10; k++ {
testKeyRngUpdateDiffs(t, prollyMap.(Map), tuples, s/10, rngTest)
testKeyRngUpdateDiffs(t, prollyMap.(Map), tuples, s/2, rngTest)
}
})
}
func testKeyRngMapDiffErrorHandling(t *testing.T, m Map, test keyRangeDiffTest) {
ctx := context.Background()
expErr := errors.New("error case")
err := DiffMapsKeyRange(ctx, m, m, test.keyRange.start, test.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
return expErr
})
require.Error(t, expErr, err)
}
func testKeyRngEqualMapDiff(t *testing.T, m Map, rngTest keyRangeDiffTest) {
ctx := context.Background()
var counter int
err := DiffMapsKeyRange(ctx, m, m, rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
counter++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, 0, counter)
}
func testKeyRngMapDiffAgainstEmpty(t *testing.T, scale int, rngTest keyRangeDiffTest) {
ctx := context.Background()
m, tuples := makeProllyMap(t, scale)
empty, _ := makeProllyMap(t, 0)
inRange := getPairsInKeyRange(tuples, rngTest.keyRange)
cnt := 0
err := DiffMapsKeyRange(ctx, m.(Map), empty.(Map), rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
assert.Equal(t, inRange[cnt][0], val.Tuple(diff.Key))
assert.Equal(t, inRange[cnt][1], val.Tuple(diff.From))
assert.Nil(t, val.Tuple(diff.To))
assert.True(t, rngTest.keyRange.includes(val.Tuple(diff.Key)))
cnt++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, len(inRange), cnt)
cnt = 0
err = DiffMapsKeyRange(ctx, empty.(Map), m.(Map), rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
assert.Equal(t, inRange[cnt][0], val.Tuple(diff.Key))
assert.Equal(t, inRange[cnt][1], val.Tuple(diff.To))
assert.Nil(t, val.Tuple(diff.From))
assert.True(t, rngTest.keyRange.includes(val.Tuple(diff.Key)))
cnt++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, len(inRange), cnt)
}
func testKeyRngDeleteDiffs(t *testing.T, from Map, tups [][2]val.Tuple, numDeletes int, rngTest keyRangeDiffTest) {
ctx := context.Background()
rand.Shuffle(len(tups), func(i, j int) {
tups[i], tups[j] = tups[j], tups[i]
})
deletes := tups[:numDeletes]
sort.Slice(deletes, func(i, j int) bool {
return from.keyDesc.Compare(deletes[i][0], deletes[j][0]) < 0
})
inRange := getPairsInKeyRange(deletes, rngTest.keyRange)
to := makeMapWithDeletes(t, from, deletes...)
cnt := 0
err := DiffMapsKeyRange(ctx, from, to, rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
assert.Equal(t, tree.RemovedDiff, diff.Type)
assert.Equal(t, inRange[cnt][0], val.Tuple(diff.Key))
assert.True(t, rngTest.keyRange.includes(val.Tuple(diff.Key)))
cnt++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, len(inRange), cnt)
}
func testKeyRngInsertDiffs(t *testing.T, from Map, tups [][2]val.Tuple, numInserts int, rngTest keyRangeDiffTest) {
ctx := context.Background()
to, inserts := makeMapWithInserts(t, from, numInserts)
inRange := getPairsInKeyRange(inserts, rngTest.keyRange)
cnt := 0
err := DiffMapsKeyRange(ctx, from, to, rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
if !assert.Equal(t, tree.AddedDiff, diff.Type) {
fmt.Println("")
}
assert.Equal(t, inRange[cnt][0], val.Tuple(diff.Key))
assert.Equal(t, inRange[cnt][1], val.Tuple(diff.To))
assert.True(t, rngTest.keyRange.includes(val.Tuple(diff.Key)))
cnt++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, len(inRange), cnt)
}
func testKeyRngUpdateDiffs(t *testing.T, from Map, tups [][2]val.Tuple, numUpdates int, rngTest keyRangeDiffTest) {
ctx := context.Background()
rand.Shuffle(len(tups), func(i, j int) {
tups[i], tups[j] = tups[j], tups[i]
})
sub := tups[:numUpdates]
sort.Slice(sub, func(i, j int) bool {
return from.keyDesc.Compare(sub[i][0], sub[j][0]) < 0
})
kd, vd := from.Descriptors()
updates := makeUpdatesToTuples(kd, vd, sub...)
to := makeMapWithUpdates(t, from, updates...)
var inRange [][3]val.Tuple
for _, pair := range updates {
if rngTest.keyRange.includes(pair[0]) {
inRange = append(inRange, pair)
}
}
var cnt int
err := DiffMapsKeyRange(ctx, from, to, rngTest.keyRange.start, rngTest.keyRange.stop, func(ctx context.Context, diff tree.Diff) error {
assert.Equal(t, tree.ModifiedDiff, diff.Type)
assert.Equal(t, inRange[cnt][0], val.Tuple(diff.Key))
assert.Equal(t, inRange[cnt][1], val.Tuple(diff.From))
assert.Equal(t, inRange[cnt][2], val.Tuple(diff.To))
assert.True(t, rngTest.keyRange.includes(val.Tuple(diff.Key)))
cnt++
return nil
})
require.Error(t, io.EOF, err)
assert.Equal(t, len(inRange), cnt)
}
func makeRandomBoundedKeyRange(kd val.TupleDesc, tuples [][2]val.Tuple) keyRangeDiffTest {
i := rand.Intn(len(tuples))
j := rand.Intn(len(tuples))
if j < i {
i, j = j, i
}
start := tuples[i][0]
stop := tuples[j][0]
kR := keyRange{kd: kd, start: start, stop: stop}
return keyRangeDiffTest{tuples: tuples, keyRange: kR}
}
func makeRandomUnboundedLowerKeyRange(kd val.TupleDesc, tuples [][2]val.Tuple) keyRangeDiffTest {
i := rand.Intn(len(tuples))
end := tuples[i][0]
kR := keyRange{kd: kd, stop: end}
return keyRangeDiffTest{tuples: tuples, keyRange: kR}
}
func makeRandomUnboundedUpperKeyRange(kd val.TupleDesc, tuples [][2]val.Tuple) keyRangeDiffTest {
i := rand.Intn(len(tuples))
start := tuples[i][0]
kR := keyRange{kd: kd, start: start}
return keyRangeDiffTest{tuples: tuples, keyRange: kR}
}
func makeBoundedKeyRangeWithMissingKeys(t *testing.T, m Map, kd val.TupleDesc, vd val.TupleDesc, tuples [][2]val.Tuple) keyRangeDiffTest {
inserts := generateInserts(t, m, kd, vd, 2)
low, hi := inserts[0][0], inserts[1][0]
if kd.Compare(low, hi) > 0 {
hi, low = low, hi
}
kR := keyRange{kd: kd, start: low, stop: hi}
return keyRangeDiffTest{tuples: tuples, keyRange: kR}
}
func getPairsInKeyRange(tuples [][2]val.Tuple, rng keyRange) (keys [][2]val.Tuple) {
for _, pair := range tuples {
if rng.includes(pair[0]) {
keys = append(keys, pair)
}
}
return
}
type keyRange struct {
start val.Tuple
stop val.Tuple
kd val.TupleDesc
}
func (kR keyRange) includes(k val.Tuple) bool {
if len(kR.start) != 0 && kR.kd.Compare(k, kR.start) < 0 {
return false
}
if len(kR.stop) != 0 && kR.kd.Compare(k, kR.stop) >= 0 {
return false
}
return true
}
type keyRangeDiffTest struct {
tuples [][2]val.Tuple
keyRange keyRange
}
+1 -1
View File
@@ -245,7 +245,7 @@ func makeMapWithInserts(t *testing.T, m Map, numInserts int) (Map, [][2]val.Tupl
}
// generates tuple pairs not currently in |m|
func generateInserts(t *testing.T, m Map, kd, vd val.TupleDesc, numInserts int) [][2]val.Tuple {
func generateInserts(t *testing.T, m testMap, kd, vd val.TupleDesc, numInserts int) [][2]val.Tuple {
ctx := context.Background()
ns := tree.NewTestNodeStore()
tups := tree.RandomTuplePairs(numInserts*2, kd, vd, ns)
+37 -1
View File
@@ -62,6 +62,9 @@ func TestMap(t *testing.T) {
t.Run("iter ordinal range", func(t *testing.T) {
testIterOrdinalRange(t, prollyMap.(Map), tuples)
})
t.Run("iter key range", func(t *testing.T) {
testIterKeyRange(t, prollyMap.(Map), tuples)
})
indexMap, tuples2 := makeProllySecondaryIndex(t, s)
t.Run("iter prefix range", func(t *testing.T) {
@@ -302,7 +305,7 @@ func testGet(t *testing.T, om testMap, tuples [][2]val.Tuple) {
ctx := context.Background()
// test get
for _, kv := range tuples {
for i, kv := range tuples {
err := om.Get(ctx, kv[0], func(key, val val.Tuple) (err error) {
assert.NotNil(t, kv[0])
expKey, expVal := kv[0], kv[1]
@@ -311,6 +314,39 @@ func testGet(t *testing.T, om testMap, tuples [][2]val.Tuple) {
return
})
require.NoError(t, err)
if m, ok := om.(Map); ok {
ord, err := m.GetOrdinalForKey(ctx, kv[0])
require.NoError(t, err)
assert.Equal(t, uint64(i), ord)
}
}
// test get with non-existent keys
kd, vd := om.Descriptors()
inserts := generateInserts(t, om, kd, vd, len(tuples)/2)
for _, kv := range inserts {
err := om.Get(ctx, kv[0], func(key, val val.Tuple) (err error) {
assert.Equal(t, 0, len(key), "Got %s", kd.Format(key))
assert.Equal(t, 0, len(val), "Got %s", vd.Format(val))
return nil
})
require.NoError(t, err)
if m, ok := om.(Map); ok {
// find the expected ordinal return value for this non-existent key
exp := len(tuples)
for i := 0; i < len(tuples); i++ {
if kd.Compare(tuples[i][0], kv[0]) >= 0 {
exp = i
break
}
}
ord, err := m.GetOrdinalForKey(ctx, kv[0])
require.NoError(t, err)
assert.Equal(t, uint64(exp), ord)
}
}
desc := keyDescFromMap(om)
+5
View File
@@ -142,6 +142,11 @@ func (mut MutableMap) HasEdits() bool {
return mut.tuples.edits.Count() > 0
}
// Descriptors returns the key and value val.TupleDesc.
func (mut MutableMap) Descriptors() (val.TupleDesc, val.TupleDesc) {
return mut.keyDesc, mut.valDesc
}
func debugFormat(ctx context.Context, m MutableMap) (string, error) {
kd, vd := m.keyDesc, m.valDesc
+153
View File
@@ -115,6 +115,81 @@ func rangeDiffOrderedTrees[K, V ~[]byte, O ordering[K]](
return err
}
func diffKeyRangeOrderedTrees[K, V ~[]byte, O ordering[K]](
ctx context.Context,
from, to orderedTree[K, V, O],
start, stop K,
cb DiffFn,
) error {
var fromStart, fromStop, toStart, toStop *tree.Cursor
var err error
if len(start) == 0 {
fromStart, err = tree.NewCursorAtStart(ctx, from.ns, from.root)
if err != nil {
return err
}
toStart, err = tree.NewCursorAtStart(ctx, to.ns, to.root)
if err != nil {
return err
}
} else {
fromStart, err = tree.NewCursorAtItem(ctx, from.ns, from.root, tree.Item(start), from.searchNode)
if err != nil {
return err
}
toStart, err = tree.NewCursorAtItem(ctx, to.ns, to.root, tree.Item(start), to.searchNode)
if err != nil {
return err
}
}
if len(stop) == 0 {
fromStop, err = tree.NewCursorPastEnd(ctx, from.ns, from.root)
if err != nil {
return err
}
toStop, err = tree.NewCursorPastEnd(ctx, to.ns, to.root)
if err != nil {
return err
}
} else {
fromStop, err = tree.NewCursorAtItem(ctx, from.ns, from.root, tree.Item(stop), from.searchNode)
if err != nil {
return err
}
toStop, err = tree.NewCursorAtItem(ctx, to.ns, to.root, tree.Item(stop), to.searchNode)
if err != nil {
return err
}
}
cfn := func(left, right tree.Item) int {
return from.order.Compare(K(left), K(right))
}
differ, err := tree.DifferFromCursors(fromStart, toStart, fromStop, toStop, cfn)
if err != nil {
return err
}
for {
var diff tree.Diff
if diff, err = differ.Next(ctx); err != nil {
break
}
if err = cb(ctx, diff); err != nil {
break
}
}
return err
}
func mergeOrderedTrees[K, V ~[]byte, O ordering[K], S message.Serializer](
ctx context.Context,
l, r, base orderedTree[K, V, O],
@@ -330,6 +405,74 @@ func (t orderedTree[K, V, O]) fetchOrdinalRange(ctx context.Context, start, stop
}, nil
}
func (t orderedTree[K, V, O]) iterKeyRange(ctx context.Context, start, stop K) (*orderedTreeIter[K, V], error) {
lo, hi, err := t.getKeyRangeCursors(ctx, start, stop)
if err != nil {
return nil, err
}
stopF := func(curr *tree.Cursor) bool {
return curr.Compare(hi) >= 0
}
if stopF(lo) {
return &orderedTreeIter[K, V]{curr: nil}, nil
}
return &orderedTreeIter[K, V]{curr: lo, stop: stopF, step: lo.Advance}, nil
}
func (t orderedTree[K, V, O]) getKeyRangeCardinality(ctx context.Context, start, stop K) (uint64, error) {
lo, hi, err := t.getKeyRangeCursors(ctx, start, stop)
if err != nil {
return 0, err
}
startOrd, err := tree.GetOrdinalOfCursor(lo)
if err != nil {
return 0, err
}
endOrd, err := tree.GetOrdinalOfCursor(hi)
if err != nil {
return 0, err
}
if startOrd > endOrd {
return 0, nil
}
return endOrd - startOrd, nil
}
func (t orderedTree[K, V, O]) getKeyRangeCursors(ctx context.Context, startInclusive, stopExclusive K) (lo, hi *tree.Cursor, err error) {
if len(startInclusive) == 0 {
lo, err = tree.NewCursorAtStart(ctx, t.ns, t.root)
if err != nil {
return nil, nil, err
}
} else {
lo, err = tree.NewCursorAtItem(ctx, t.ns, t.root, tree.Item(startInclusive), t.searchNode)
if err != nil {
return nil, nil, err
}
}
if len(stopExclusive) == 0 {
hi, err = tree.NewCursorPastEnd(ctx, t.ns, t.root)
if err != nil {
return nil, nil, err
}
} else {
hi, err = tree.NewCursorAtItem(ctx, t.ns, t.root, tree.Item(stopExclusive), t.searchNode)
if err != nil {
return nil, nil, err
}
}
return
}
// searchNode returns the smallest index where nd[i] >= query
// Adapted from search.Sort to inline comparison.
func (t orderedTree[K, V, O]) searchNode(query tree.Item, nd tree.Node) int {
@@ -356,6 +499,16 @@ func (t orderedTree[K, V, O]) compareItems(left, right tree.Item) int {
return t.order.Compare(K(left), K(right))
}
// getOrdinalForKey returns the smallest ordinal position at which the key >= |query|.
func (t orderedTree[K, V, O]) getOrdinalForKey(ctx context.Context, query K) (uint64, error) {
cur, err := tree.NewCursorAtItem(ctx, t.ns, t.root, tree.Item(query), t.searchNode)
if err != nil {
return 0, err
}
return tree.GetOrdinalOfCursor(cur)
}
var _ tree.ItemSearchFn = orderedTree[tree.Item, tree.Item, ordering[tree.Item]]{}.searchNode
var _ tree.CompareFn = orderedTree[tree.Item, tree.Item, ordering[tree.Item]]{}.compareItems
+33
View File
@@ -113,6 +113,7 @@ func testIterRange(t *testing.T, om testMap, tuples [][2]val.Tuple) {
}
}
assert.Equal(t, io.EOF, err)
if !assert.Equal(t, test.expCount, actCount) {
fmt.Println("here")
}
@@ -492,6 +493,38 @@ func testIterOrdinalRangeWithBounds(t *testing.T, om Map, tuples [][2]val.Tuple,
})
}
func testIterKeyRange(t *testing.T, m Map, tuples [][2]val.Tuple) {
ctx := context.Background()
t.Run("RandomKeyRange", func(t *testing.T) {
bounds := generateInserts(t, m, m.keyDesc, m.valDesc, 2)
start, stop := bounds[0][0], bounds[1][0]
if m.keyDesc.Compare(start, stop) > 0 {
start, stop = stop, start
}
kR := keyRange{kd: m.keyDesc, start: start, stop: stop}
var expectedKeys []val.Tuple
for _, kv := range tuples {
if kR.includes(kv[0]) {
expectedKeys = append(expectedKeys, kv[0])
}
}
itr, err := m.IterKeyRange(ctx, start, stop)
require.NoError(t, err)
for _, eK := range expectedKeys {
k, _, err := itr.Next(ctx)
require.NoError(t, err)
assert.Equal(t, eK, k)
}
_, _, err = itr.Next(ctx)
require.Equal(t, io.EOF, err)
})
}
func iterOrdinalRange(t *testing.T, ctx context.Context, iter MapIter) (actual [][2]val.Tuple) {
for {
k, v, err := iter.Next(ctx)
+41
View File
@@ -24,6 +24,7 @@ package tree
import (
"context"
"errors"
"fmt"
"sort"
"github.com/dolthub/dolt/go/store/hash"
@@ -139,6 +140,46 @@ func NewCursorAtOrdinal(ctx context.Context, ns NodeStore, nd Node, ord uint64)
})
}
// GetOrdinalOfCursor returns the ordinal position of a Cursor.
func GetOrdinalOfCursor(curr *Cursor) (ord uint64, err error) {
leaf, err := curr.isLeaf()
if err != nil {
return 0, err
}
if !leaf {
return 0, fmt.Errorf("|cur| must be at a leaf")
}
ord += uint64(curr.idx)
for curr.parent != nil {
curr = curr.parent
// If a parent has been invalidated past end, act like we were at the
// last subtree.
if curr.idx >= curr.nd.Count() {
curr.skipToNodeEnd()
} else if curr.idx < 0 {
return 0, fmt.Errorf("found invalid parent cursor behind node start")
}
curr.nd, err = curr.nd.loadSubtrees()
if err != nil {
return 0, err
}
for idx := curr.idx - 1; idx >= 0; idx-- {
cnt, err := curr.nd.getSubtreeCount(idx)
if err != nil {
return 0, err
}
ord += cnt
}
}
return ord, nil
}
func NewCursorFromSearchFn(ctx context.Context, ns NodeStore, nd Node, search SearchFn) (cur *Cursor, err error) {
cur = &Cursor{nd: nd, nrw: ns}
+66
View File
@@ -16,6 +16,7 @@ package tree
import (
"context"
"fmt"
"sort"
"testing"
@@ -34,6 +35,15 @@ func TestNodeCursor(t *testing.T) {
testNewCursorAtItem(t, 10_000)
})
t.Run("get ordinal at item", func(t *testing.T) {
counts := []int{10, 100, 1000, 10_000}
for _, c := range counts {
t.Run(fmt.Sprintf("%d", c), func(t *testing.T) {
testGetOrdinalOfCursor(t, c)
})
}
})
t.Run("retreat past beginning", func(t *testing.T) {
ctx := context.Background()
root, _, ns := randomTree(t, 10_000)
@@ -78,6 +88,62 @@ func testNewCursorAtItem(t *testing.T, count int) {
validateTreeItems(t, ns, root, items)
}
func testGetOrdinalOfCursor(t *testing.T, count int) {
tuples, d := AscendingUintTuples(count)
search := func(item Item, nd Node) (idx int) {
return sort.Search(int(nd.count), func(i int) bool {
l, r := val.Tuple(item), val.Tuple(nd.GetKey(i))
return d.Compare(l, r) <= 0
})
}
ctx := context.Background()
ns := NewTestNodeStore()
serializer := message.NewProllyMapSerializer(d, ns.Pool())
chkr, err := newEmptyChunker(ctx, ns, serializer)
require.NoError(t, err)
for _, item := range tuples {
err = chkr.AddPair(ctx, Item(item[0]), Item(item[1]))
assert.NoError(t, err)
}
nd, err := chkr.Done(ctx)
assert.NoError(t, err)
for i := 0; i < len(tuples); i++ {
curr, err := NewCursorAtItem(ctx, ns, nd, Item(tuples[i][0]), search)
require.NoError(t, err)
ord, err := GetOrdinalOfCursor(curr)
require.NoError(t, err)
assert.Equal(t, uint64(i), ord)
}
b := val.NewTupleBuilder(d)
b.PutUint32(0, uint32(len(tuples)))
aboveItem := b.Build(sharedPool)
curr, err := NewCursorAtItem(ctx, ns, nd, Item(aboveItem), search)
require.NoError(t, err)
ord, err := GetOrdinalOfCursor(curr)
require.NoError(t, err)
require.Equal(t, uint64(len(tuples)), ord)
// A cursor past the end should return an ordinal count equal to number of
// nodes.
curr, err = NewCursorPastEnd(ctx, ns, nd)
require.NoError(t, err)
ord, err = GetOrdinalOfCursor(curr)
require.NoError(t, err)
require.Equal(t, uint64(len(tuples)), ord)
}
func randomTree(t *testing.T, count int) (Node, [][2]Item, NodeStore) {
ctx := context.Background()
ns := NewTestNodeStore()
+2
View File
@@ -29,9 +29,11 @@ import (
// testMap is a utility type that allows us to create a common test
// harness for Map, memoryMap, and MutableMap.
type testMap interface {
Has(ctx context.Context, key val.Tuple) (bool, error)
Get(ctx context.Context, key val.Tuple, cb KeyValueFn[val.Tuple, val.Tuple]) (err error)
IterAll(ctx context.Context) (MapIter, error)
IterRange(ctx context.Context, rng Range) (MapIter, error)
Descriptors() (val.TupleDesc, val.TupleDesc)
}
var _ testMap = Map{}
@@ -38,6 +38,32 @@ teardown() {
[[ "$output" =~ "1,ACADEMY DINOSAUR" ]] || false
}
@test "migration-integration: first-hour-db after garbage collection" {
dolt clone dolthub/first-hour-db
cd first-hour-db
dolt gc
dolt tag -v
run dolt tag -v
[ "$status" -eq 0 ]
[[ "$output" =~ "r9jv07tf9un3fm1fg72v7ad9er89oeo7" ]] || false
[[ ! "$output" =~ "eu5pgaa5kgsapqts6sar19isnegmqpqn" ]] || false
dolt migrate
[[ $(cat ./.dolt/noms/manifest | cut -f 2 -d :) = "$TARGET_NBF" ]] || false
dolt tag -v
run dolt tag -v
[ "$status" -eq 0 ]
[[ "$output" =~ "eu5pgaa5kgsapqts6sar19isnegmqpqn" ]] || false
[[ ! "$output" =~ "r9jv07tf9un3fm1fg72v7ad9er89oeo7" ]] || false
# validate TEXT migration
run dolt sql -q "select film_id, title from film order by film_id limit 1" -r csv
[ "$status" -eq 0 ]
[[ "$output" =~ "1,ACADEMY DINOSAUR" ]] || false
}
@test "migration-integration: us-jails" {
dolt clone dolthub/us-jails
cd us-jails
+6
View File
@@ -116,6 +116,12 @@ teardown() {
chmod 755 .
}
@test "no-repo: dolt version does not fail on empty .dolt dir" {
mkdir .dolt
run dolt version
[ "$status" -eq 0 ]
}
# Tests for dolt commands outside of a dolt repository
NOT_VALID_REPO_ERROR="The current directory is not a valid dolt repository."
@test "no-repo: dolt status outside of a dolt repository" {