Merge branch 'aaron/sql-cluster-transitions-kill-connections' into aaron/sql-cluster-standby-is-read-only

This commit is contained in:
Aaron Son
2022-09-30 13:49:47 -07:00
70 changed files with 728 additions and 957 deletions
+2 -2
View File
@@ -22,10 +22,10 @@ jobs:
fail-fast: true
matrix:
os: [ ubuntu-22.04, macos-latest ]
dolt_fmt: [ "", "__DOLT_DEV__", "__DOLT__" ]
dolt_fmt: [ "__DOLT__", "__DOLT_DEV__", "__LD_1__" ]
exclude:
- os: "macos-latest"
dolt_fmt: ["__DOLT_DEV__", "__DOLT__" ]
dolt_fmt: ["__DOLT_DEV__", "__LD_1__" ]
env:
use_credentials: ${{ secrets.AWS_SECRET_ACCESS_KEY != '' && secrets.AWS_ACCESS_KEY_ID != '' }}
steps:
@@ -1,38 +0,0 @@
name: __DOLT__ Enginetests
on:
pull_request:
branches: [ main ]
paths:
- 'go/**'
workflow_dispatch:
concurrency:
group: ci-dolt1-format-go-tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
test:
name: Go tests (new format)
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-22.04, windows-latest]
steps:
- name: Set up Go 1.x
uses: actions/setup-go@v2
with:
go-version: ^1.19
id: go
- uses: actions/checkout@v2
- name: Test All with New Format
working-directory: ./go
run: |
go test -timeout 30m "./libraries/doltcore/sqle/enginetest/..."
env:
MATRIX_OS: ${{ matrix.os }}
DOLT_DEFAULT_BIN_FORMAT: "__DOLT__"
@@ -18,7 +18,7 @@ jobs:
fail-fast: false
matrix:
os: [ ubuntu-22.04 ]
dolt_fmt: [ "", "__DOLT_DEV__" ]
dolt_fmt: [ "__DOLT__", "__LD_1__", "__DOLT_DEV__" ]
steps:
- name: Set up Go 1.x
uses: actions/setup-go@v2
+2 -2
View File
@@ -22,7 +22,7 @@ jobs:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-22.04, windows-latest]
dolt_fmt: [ "" ]
dolt_fmt: [ "__DOLT__", "__LD_1__" ]
include:
- os: "ubuntu-22.04"
dolt_fmt: "__DOLT_DEV__"
@@ -74,7 +74,7 @@ jobs:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-22.04, windows-latest]
dolt_fmt: [ "" ]
dolt_fmt: [ "__DOLT__", "__LD_1__" ]
include:
- os: "ubuntu-22.04"
dolt_fmt: "__DOLT_DEV__"
-1
View File
@@ -100,7 +100,6 @@ const (
BranchParam = "branch"
TrackFlag = "track"
AmendFlag = "amend"
NewFormatFlag = "new-format"
CommitFlag = "commit"
NoCommitFlag = "no-commit"
NoEditFlag = "no-edit"
+2 -2
View File
@@ -19,8 +19,6 @@ import (
"encoding/json"
"strings"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/store/datas/pull"
"github.com/dolthub/dolt/go/store/types"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
@@ -28,7 +26,9 @@ import (
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/store/datas/pull"
)
var backupDocs = cli.CommandDocumentationContent{
+2 -1
View File
@@ -19,6 +19,8 @@ import (
"path"
"strings"
"github.com/dolthub/dolt/go/store/types"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
@@ -29,7 +31,6 @@ import (
"github.com/dolthub/dolt/go/libraries/events"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/earl"
"github.com/dolthub/dolt/go/store/types"
)
var cloneDocs = cli.CommandDocumentationContent{
+12 -3
View File
@@ -32,6 +32,8 @@ const (
emailParamName = "email"
usernameParamName = "name"
initBranchParamName = "initial-branch"
newFormatFlag = "new-format"
oldFormatFlag = "old-format"
)
var initDocs = cli.CommandDocumentationContent{
@@ -75,7 +77,8 @@ func (cmd InitCmd) ArgParser() *argparser.ArgParser {
ap.SupportsString(emailParamName, "", "email", fmt.Sprintf("The email address used. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", env.UserEmailKey))
ap.SupportsString(cli.DateParam, "", "date", "Specify the date used in the initial commit. If not specified the current system time is used.")
ap.SupportsString(initBranchParamName, "b", "branch", fmt.Sprintf("The branch name used to initialize this database. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config. If unset, the default initialized branch will be named '%s'.", env.InitBranchName, env.DefaultInitBranch))
ap.SupportsFlag(cli.NewFormatFlag, "", fmt.Sprintf("Specify this flag to use the new storage format (%s).", types.Format_DOLT.VersionString()))
ap.SupportsFlag(newFormatFlag, "", fmt.Sprintf("Specify this flag to use the new storage format (%s).", types.Format_DOLT.VersionString()))
ap.SupportsFlag(oldFormatFlag, "", fmt.Sprintf("Specify this flag to use the old storage format (%s).", types.Format_LD_1.VersionString()))
return ap
}
@@ -89,9 +92,15 @@ func (cmd InitCmd) Exec(ctx context.Context, commandStr string, args []string, d
cli.PrintErrln(color.RedString("This directory has already been initialized."))
return 1
}
if apr.Contains(cli.NewFormatFlag) {
if apr.Contains(newFormatFlag) && apr.Contains(oldFormatFlag) {
e := fmt.Sprintf("options %s and %s are mutually exclusive", newFormatFlag, oldFormatFlag)
cli.PrintErrln(color.RedString(e))
return 1
}
if apr.Contains(newFormatFlag) {
types.Format_Default = types.Format_DOLT
} else if apr.Contains(oldFormatFlag) {
types.Format_Default = types.Format_LD_1
}
name, _ := apr.GetValue(usernameParamName)
+23 -41
View File
@@ -16,6 +16,7 @@ package commands
import (
"context"
"fmt"
"testing"
"github.com/dolthub/go-mysql-server/sql"
@@ -25,11 +26,8 @@ import (
"github.com/dolthub/dolt/go/cmd/dolt/commands/engine"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/doltcore/table"
"github.com/dolthub/dolt/go/store/types"
)
//var UUIDS = []uuid.UUID{
@@ -46,7 +44,7 @@ var tableName = "people"
// Smoke test: Console opens and exits
func TestSqlConsole(t *testing.T) {
t.Run("SQL console opens and exits", func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{}
commandStr := "dolt sql"
@@ -71,7 +69,7 @@ func TestSqlBatchMode(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-b", "-q", test.query}
@@ -108,7 +106,7 @@ func TestSqlSelect(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
@@ -132,7 +130,7 @@ func TestSqlShow(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
@@ -203,7 +201,7 @@ func TestShowTables(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
commandStr := "dolt sql"
@@ -232,7 +230,7 @@ func TestAlterTable(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
commandStr := "dolt sql"
@@ -257,7 +255,7 @@ func TestDropTable(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
commandStr := "dolt sql"
@@ -352,7 +350,7 @@ func TestInsert(t *testing.T) {
},
{
name: "missing required column",
query: `insert into people (id, name, age) values
query: `insert into people (id, title, age) values
('00000000-0000-0000-0000-000000000005', 'Frank Frankerson', 10)`,
expectedRes: 1,
},
@@ -373,7 +371,7 @@ func TestInsert(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctx := context.Background()
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
@@ -387,15 +385,10 @@ func TestInsert(t *testing.T) {
// Assert that all expected IDs exist after the insert
for _, expectedid := range test.expectedIds {
tbl, _, err := root.GetTable(ctx, tableName)
q := fmt.Sprintf("SELECT * FROM %s WHERE id = '%s'", tableName, expectedid.String())
rows, err := sqle.ExecuteSelect(t, dEnv, root, q)
assert.NoError(t, err)
taggedVals := row.TaggedValues{dtestutils.IdTag: types.String(expectedid.String())}
key := taggedVals.NomsTupleForPKCols(types.Format_Default, dtestutils.TypedSchema.GetPKCols())
kv, err := key.Value(ctx)
assert.NoError(t, err)
_, ok, err := table.GetRow(ctx, tbl, dtestutils.TypedSchema, kv.(types.Tuple))
assert.NoError(t, err)
assert.True(t, ok, "expected id not found")
assert.True(t, len(rows) > 0)
}
}
})
@@ -457,7 +450,7 @@ func TestUpdate(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
ctx := context.Background()
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
args := []string{"-q", test.query}
@@ -471,17 +464,11 @@ func TestUpdate(t *testing.T) {
// Assert that all rows have been updated
for i, expectedid := range test.expectedIds {
tbl, _, err := root.GetTable(ctx, tableName)
q := fmt.Sprintf("SELECT * FROM %s WHERE id = '%s'", tableName, expectedid.String())
rows, err := sqle.ExecuteSelect(t, dEnv, root, q)
assert.NoError(t, err)
taggedVals := row.TaggedValues{dtestutils.IdTag: types.String(expectedid.String())}
key := taggedVals.NomsTupleForPKCols(types.Format_Default, dtestutils.TypedSchema.GetPKCols())
kv, err := key.Value(ctx)
assert.NoError(t, err)
row, ok, err := table.GetRow(ctx, tbl, dtestutils.TypedSchema, kv.(types.Tuple))
assert.NoError(t, err)
assert.True(t, ok, "expected id not found")
ageVal, _ := row.GetColVal(dtestutils.AgeTag)
assert.Equal(t, test.expectedAges[i], uint(ageVal.(types.Uint)))
assert.True(t, len(rows) > 0)
assert.Equal(t, uint32(test.expectedAges[i]), rows[0][2])
}
}
})
@@ -535,7 +522,7 @@ func TestDelete(t *testing.T) {
for _, test := range tests {
t.Run(test.query, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
ctx := context.Background()
args := []string{"-q", test.query}
@@ -550,15 +537,10 @@ func TestDelete(t *testing.T) {
// Assert that all rows have been deleted
for _, expectedid := range test.deletedIds {
tbl, _, err := root.GetTable(ctx, tableName)
q := fmt.Sprintf("SELECT * FROM %s WHERE id = '%s'", tableName, expectedid.String())
rows, err := sqle.ExecuteSelect(t, dEnv, root, q)
assert.NoError(t, err)
taggedVals := row.TaggedValues{dtestutils.IdTag: types.UUID(expectedid)}
key := taggedVals.NomsTupleForPKCols(types.Format_Default, dtestutils.TypedSchema.GetPKCols())
kv, err := key.Value(ctx)
assert.NoError(t, err)
_, ok, err := table.GetRow(ctx, tbl, dtestutils.TypedSchema, kv.(types.Tuple))
assert.NoError(t, err)
assert.False(t, ok, "row not deleted")
assert.True(t, len(rows) == 0)
}
}
})
@@ -566,7 +548,7 @@ func TestDelete(t *testing.T) {
}
func TestCommitHooksNoErrors(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
sqle.AddDoltSystemVariables()
sql.SystemVariables.SetGlobal(dsess.SkipReplicationErrors, true)
sql.SystemVariables.SetGlobal(dsess.ReplicateToRemote, "unknown")
+1 -1
View File
@@ -270,7 +270,7 @@ func Serve(
clusterController.ManageQueryConnections(
mySQLServer.SessionManager().Iter,
sqlEngine.GetUnderlyingEngine().ProcessList.Kill,
func(uint32) {}, // TODO: mySQLServer.SessionManager().KillConnection,
mySQLServer.SessionManager().KillConnection,
)
} else {
lgr.Errorf("error creating SQL engine context for remotesapi server: %v", err)
+11 -8
View File
@@ -27,9 +27,9 @@ import (
"github.com/stretchr/testify/require"
"golang.org/x/net/context"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils/testcommands"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
@@ -70,7 +70,7 @@ func TestServerArgs(t *testing.T) {
"-t", "5",
"-l", "info",
"-r",
}, dtestutils.CreateEnvWithSeedData(t), serverController)
}, sqle.CreateEnvWithSeedData(t), serverController)
}()
err := serverController.WaitForStart()
require.NoError(t, err)
@@ -102,7 +102,7 @@ listener:
`
serverController := NewServerController()
go func() {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
dEnv.FS.WriteFile("config.yaml", []byte(yamlConfig))
startServer(context.Background(), "0.0.0", "dolt sql-server", []string{
"--config", "config.yaml",
@@ -120,7 +120,7 @@ listener:
}
func TestServerBadArgs(t *testing.T) {
env := dtestutils.CreateEnvWithSeedData(t)
env := sqle.CreateEnvWithSeedData(t)
tests := [][]string{
{"-H", "127.0.0.0.1"},
@@ -148,7 +148,7 @@ func TestServerBadArgs(t *testing.T) {
}
func TestServerGoodParams(t *testing.T) {
env := dtestutils.CreateEnvWithSeedData(t)
env := sqle.CreateEnvWithSeedData(t)
tests := []ServerConfig{
DefaultServerConfig(),
@@ -186,7 +186,7 @@ func TestServerGoodParams(t *testing.T) {
}
func TestServerSelect(t *testing.T) {
env := dtestutils.CreateEnvWithSeedData(t)
env := sqle.CreateEnvWithSeedData(t)
serverConfig := DefaultServerConfig().withLogLevel(LogLevel_Fatal).WithPort(15300)
sc := NewServerController()
@@ -253,7 +253,7 @@ func TestServerFailsIfPortInUse(t *testing.T) {
"-t", "5",
"-l", "info",
"-r",
}, dtestutils.CreateEnvWithSeedData(t), serverController)
}, sqle.CreateEnvWithSeedData(t), serverController)
}()
err := serverController.WaitForStart()
require.Error(t, err)
@@ -261,7 +261,7 @@ func TestServerFailsIfPortInUse(t *testing.T) {
}
func TestServerSetDefaultBranch(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := sqle.CreateEnvWithSeedData(t)
serverConfig := DefaultServerConfig().withLogLevel(LogLevel_Fatal).WithPort(15302)
sc := NewServerController()
@@ -414,6 +414,9 @@ func TestReadReplica(t *testing.T) {
sc := NewServerController()
serverConfig := DefaultServerConfig().withLogLevel(LogLevel_Fatal).WithPort(15303)
// set socket to nil to force tcp
serverConfig = serverConfig.WithHost("127.0.0.1").WithSocket("")
func() {
os.Chdir(multiSetup.DbPaths[readReplicaDbName])
go func() {
+1 -1
View File
@@ -57,7 +57,7 @@ import (
)
const (
Version = "0.41.7"
Version = "0.50.0"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}
+1 -1
View File
@@ -57,7 +57,7 @@ require (
require (
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible
github.com/cenkalti/backoff/v4 v4.1.3
github.com/dolthub/go-mysql-server v0.12.1-0.20220929062247-323a847921de
github.com/dolthub/go-mysql-server v0.12.1-0.20220929211840-02a9c38c169f
github.com/google/flatbuffers v2.0.6+incompatible
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6
github.com/mitchellh/go-ps v1.0.0
+2 -2
View File
@@ -178,8 +178,8 @@ github.com/dolthub/flatbuffers v1.13.0-dh.1 h1:OWJdaPep22N52O/0xsUevxJ6Qfw1M2txC
github.com/dolthub/flatbuffers v1.13.0-dh.1/go.mod h1:CorYGaDmXjHz1Z7i50PYXG1Ricn31GcA2wNOTFIQAKE=
github.com/dolthub/fslock v0.0.3 h1:iLMpUIvJKMKm92+N1fmHVdxJP5NdyDK5bK7z7Ba2s2U=
github.com/dolthub/fslock v0.0.3/go.mod h1:QWql+P17oAAMLnL4HGB5tiovtDuAjdDTPbuqx7bYfa0=
github.com/dolthub/go-mysql-server v0.12.1-0.20220929062247-323a847921de h1:YkKR9AOt/Mta3suApA5bEwwTF/GbdYLva3zVbP0lxi0=
github.com/dolthub/go-mysql-server v0.12.1-0.20220929062247-323a847921de/go.mod h1:Ndof+jmKE/AISRWgeyx+RUvNlAtMOPSUzTM/iCOfx70=
github.com/dolthub/go-mysql-server v0.12.1-0.20220929211840-02a9c38c169f h1:+7jgFuHeF3Vj2eG7thWUprQNFz18Dg6f1Y/yow35KVk=
github.com/dolthub/go-mysql-server v0.12.1-0.20220929211840-02a9c38c169f/go.mod h1:Ndof+jmKE/AISRWgeyx+RUvNlAtMOPSUzTM/iCOfx70=
github.com/dolthub/ishell v0.0.0-20220112232610-14e753f0f371 h1:oyPHJlzumKta1vnOQqUnfdz+pk3EmnHS3Nd0cCT0I2g=
github.com/dolthub/ishell v0.0.0-20220112232610-14e753f0f371/go.mod h1:dhGBqcCEfK5kuFmeO5+WOx3hqc1k3M29c1oS/R7N4ms=
github.com/dolthub/jsonpath v0.0.0-20210609232853-d49537a30474 h1:xTrR+l5l+1Lfq0NvhiEsctylXinUMFhhsqaEcl414p8=
+1 -1
View File
@@ -37,7 +37,7 @@ func (fact MemFactory) PrepareDB(ctx context.Context, nbf *types.NomsBinFormat,
func (fact MemFactory) CreateDB(ctx context.Context, nbf *types.NomsBinFormat, urlObj *url.URL, params map[string]interface{}) (datas.Database, types.ValueReadWriter, tree.NodeStore, error) {
var db datas.Database
storage := &chunks.MemoryStorage{}
cs := storage.NewViewWithDefaultFormat()
cs := storage.NewViewWithFormat(nbf.VersionString())
vrw := types.NewValueStore(cs)
ns := tree.NewNodeStore(cs)
db = datas.NewTypesDatabase(vrw, ns)
@@ -27,6 +27,7 @@ import (
"go.uber.org/zap/buffer"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb/durable"
"github.com/dolthub/dolt/go/libraries/doltcore/ref"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/libraries/utils/test"
@@ -105,7 +106,7 @@ func TestPushOnWriteHook(t *testing.T) {
}
tSchema := createTestSchema(t)
rowData, _ := createTestRowData(t, ddb.vrw, tSchema)
rowData := createTestRowData(t, ddb.vrw, ddb.ns, tSchema)
tbl, err := CreateTestTable(ddb.vrw, ddb.ns, tSchema, rowData)
if err != nil {
@@ -243,8 +244,10 @@ func TestAsyncPushOnWrite(t *testing.T) {
assert.NoError(t, err)
tSchema := createTestSchema(t)
rowData, _ := createTestRowData(t, ddb.vrw, tSchema)
rowData, err := durable.NewEmptyIndex(ctx, ddb.vrw, ddb.ns, tSchema)
require.NoError(t, err)
tbl, err := CreateTestTable(ddb.vrw, ddb.ns, tSchema, rowData)
require.NoError(t, err)
if err != nil {
t.Fatal("Failed to create test table with data")
+19 -18
View File
@@ -25,6 +25,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb/durable"
"github.com/dolthub/dolt/go/libraries/doltcore/ref"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
@@ -70,8 +71,8 @@ func createTestSchema(t *testing.T) schema.Schema {
return sch
}
func CreateTestTable(vrw types.ValueReadWriter, ns tree.NodeStore, tSchema schema.Schema, rowData types.Map) (*Table, error) {
tbl, err := NewNomsTable(context.Background(), vrw, ns, tSchema, rowData, nil, nil)
func CreateTestTable(vrw types.ValueReadWriter, ns tree.NodeStore, tSchema schema.Schema, rowData durable.Index) (*Table, error) {
tbl, err := NewTable(context.Background(), vrw, ns, tSchema, rowData, nil, nil)
if err != nil {
return nil, err
@@ -80,20 +81,20 @@ func CreateTestTable(vrw types.ValueReadWriter, ns tree.NodeStore, tSchema schem
return tbl, nil
}
func createTestRowData(t *testing.T, vrw types.ValueReadWriter, sch schema.Schema) (types.Map, []row.Row) {
return createTestRowDataFromTaggedValues(t, vrw, sch,
row.TaggedValues{
idTag: types.UUID(id0), firstTag: types.String("bill"), lastTag: types.String("billerson"), ageTag: types.Uint(53)},
row.TaggedValues{
idTag: types.UUID(id1), firstTag: types.String("eric"), lastTag: types.String("ericson"), isMarriedTag: types.Bool(true), ageTag: types.Uint(21)},
row.TaggedValues{
idTag: types.UUID(id2), firstTag: types.String("john"), lastTag: types.String("johnson"), isMarriedTag: types.Bool(false), ageTag: types.Uint(53)},
row.TaggedValues{
idTag: types.UUID(id3), firstTag: types.String("robert"), lastTag: types.String("robertson"), ageTag: types.Uint(36)},
)
}
func createTestRowData(t *testing.T, vrw types.ValueReadWriter, ns tree.NodeStore, sch schema.Schema) durable.Index {
if types.Format_Default == types.Format_DOLT {
idx, err := durable.NewEmptyIndex(context.Background(), vrw, ns, sch)
require.NoError(t, err)
return idx
}
vals := []row.TaggedValues{
{idTag: types.UUID(id0), firstTag: types.String("bill"), lastTag: types.String("billerson"), ageTag: types.Uint(53)},
{idTag: types.UUID(id1), firstTag: types.String("eric"), lastTag: types.String("ericson"), isMarriedTag: types.Bool(true), ageTag: types.Uint(21)},
{idTag: types.UUID(id2), firstTag: types.String("john"), lastTag: types.String("johnson"), isMarriedTag: types.Bool(false), ageTag: types.Uint(53)},
{idTag: types.UUID(id3), firstTag: types.String("robert"), lastTag: types.String("robertson"), ageTag: types.Uint(36)},
}
func createTestRowDataFromTaggedValues(t *testing.T, vrw types.ValueReadWriter, sch schema.Schema, vals ...row.TaggedValues) (types.Map, []row.Row) {
var err error
rows := make([]row.Row, len(vals))
@@ -110,8 +111,7 @@ func createTestRowDataFromTaggedValues(t *testing.T, vrw types.ValueReadWriter,
m, err = ed.Map(context.Background())
assert.NoError(t, err)
return m, rows
return durable.IndexFromNomsMap(m, vrw, ns)
}
func TestIsValidTableName(t *testing.T) {
@@ -290,8 +290,9 @@ func TestLDNoms(t *testing.T) {
t.Fatal("There should be no tables in empty db")
}
ctx := context.Background()
tSchema := createTestSchema(t)
rowData, _ := createTestRowData(t, ddb.vrw, tSchema)
rowData, err := durable.NewEmptyIndex(ctx, ddb.vrw, ddb.ns, tSchema)
tbl, err = CreateTestTable(ddb.vrw, ddb.ns, tSchema, rowData)
if err != nil {
+1 -1
View File
@@ -138,7 +138,7 @@ func testGarbageCollection(t *testing.T, test gcTest) {
working, err = dEnv.WorkingRoot(ctx)
require.NoError(t, err)
// assert all out rows are present after gc
actual, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, working, test.query)
actual, err := sqle.ExecuteSelect(t, dEnv, working, test.query)
require.NoError(t, err)
assert.Equal(t, test.expected, actual)
}
+6 -1
View File
@@ -78,6 +78,7 @@ type Table struct {
}
// NewNomsTable creates a noms Struct which stores row data, index data, and schema.
// Deprecated: use NewTable instead.
func NewNomsTable(ctx context.Context, vrw types.ValueReadWriter, ns tree.NodeStore, sch schema.Schema, rows types.Map, indexes durable.IndexSet, autoIncVal types.Value) (*Table, error) {
dt, err := durable.NewNomsTable(ctx, vrw, ns, sch, rows, indexes, autoIncVal)
if err != nil {
@@ -401,7 +402,7 @@ func (t *Table) SetConstraintViolations(ctx context.Context, violationsMap types
return &Table{table: table}, nil
}
// GetSchema will retrieve the schema being referenced from the table in noms and unmarshal it.
// GetSchema returns the schema.Schema for this Table.
func (t *Table) GetSchema(ctx context.Context) (schema.Schema, error) {
return t.table.GetSchema(ctx)
}
@@ -431,6 +432,7 @@ func (t *Table) HashOf() (hash.Hash, error) {
// UpdateNomsRows replaces the current row data and returns and updated Table.
// Calls to UpdateNomsRows will not be written to the database. The root must
// be updated with the updated table, and the root must be committed or written.
// Deprecated: use Table.UpdateRows() instead.
func (t *Table) UpdateNomsRows(ctx context.Context, updatedRows types.Map) (*Table, error) {
table, err := t.table.SetTableRows(ctx, durable.IndexFromNomsMap(updatedRows, t.ValueReadWriter(), t.NodeStore()))
if err != nil {
@@ -451,6 +453,7 @@ func (t *Table) UpdateRows(ctx context.Context, updatedRows durable.Index) (*Tab
}
// GetNomsRowData retrieves the underlying map which is a map from a primary key to a list of field values.
// Deprecated: use Table.GetRowData() instead.
func (t *Table) GetNomsRowData(ctx context.Context) (types.Map, error) {
idx, err := t.table.GetTableRows(ctx)
if err != nil {
@@ -531,6 +534,7 @@ func (t *Table) SetIndexSet(ctx context.Context, indexes durable.IndexSet) (*Tab
}
// GetNomsIndexRowData retrieves the underlying map of an index, in which the primary key consists of all indexed columns.
// Deprecated: use Table.GetIndexRowData() instead.
func (t *Table) GetNomsIndexRowData(ctx context.Context, indexName string) (types.Map, error) {
sch, err := t.GetSchema(ctx)
if err != nil {
@@ -581,6 +585,7 @@ func (t *Table) SetIndexRows(ctx context.Context, indexName string, idx durable.
}
// SetNomsIndexRows replaces the current row data for the given index and returns an updated Table.
// Deprecated: use Table.SetIndexRows() instead.
func (t *Table) SetNomsIndexRows(ctx context.Context, indexName string, idx types.Map) (*Table, error) {
indexes, err := t.GetIndexSet(ctx)
if err != nil {
@@ -16,13 +16,9 @@ package dtestutils
import (
"context"
"testing"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/table/editor"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/store/types"
)
@@ -64,43 +60,3 @@ func CreateTestEnvWithName(envName string) *env.DoltEnv {
return dEnv
}
func CreateEnvWithSeedData(t *testing.T) *env.DoltEnv {
dEnv := CreateTestEnv()
imt, sch := CreateTestDataTable(true)
ctx := context.Background()
vrw := dEnv.DoltDB.ValueReadWriter()
ns := dEnv.DoltDB.NodeStore()
rowMap, err := types.NewMap(ctx, vrw)
require.NoError(t, err)
me := rowMap.Edit()
for i := 0; i < imt.NumRows(); i++ {
r, err := imt.GetRow(i)
require.NoError(t, err)
k, v := r.NomsMapKey(sch), r.NomsMapValue(sch)
me.Set(k, v)
}
rowMap, err = me.Map(ctx)
require.NoError(t, err)
ai := sch.Indexes().AllIndexes()
sch.Indexes().Merge(ai...)
tbl, err := doltdb.NewNomsTable(ctx, vrw, ns, sch, rowMap, nil, nil)
require.NoError(t, err)
tbl, err = editor.RebuildAllIndexes(ctx, tbl, editor.TestEditorOptions(vrw))
require.NoError(t, err)
sch, err = tbl.GetSchema(ctx)
require.NoError(t, err)
rows, err := tbl.GetRowData(ctx)
require.NoError(t, err)
indexes, err := tbl.GetIndexSet(ctx)
require.NoError(t, err)
err = putTableToWorking(ctx, dEnv, sch, rows, indexes, TableName, nil)
require.NoError(t, err)
return dEnv
}
@@ -98,6 +98,28 @@ var TimestampComparer = cmp.Comparer(func(x, y types.Timestamp) bool {
return x.Equals(y)
})
// CreateEmptyTestTable creates a new test table with the name, schema, and rows given.
func CreateEmptyTestTable(t *testing.T, dEnv *env.DoltEnv, tableName string, sch schema.Schema) {
ctx := context.Background()
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
vrw := dEnv.DoltDB.ValueReadWriter()
ns := dEnv.DoltDB.NodeStore()
rows, err := durable.NewEmptyIndex(ctx, vrw, ns, sch)
require.NoError(t, err)
indexSet, err := durable.NewIndexSetWithEmptyIndexes(ctx, vrw, ns, sch)
require.NoError(t, err)
tbl, err := doltdb.NewTable(ctx, vrw, ns, sch, rows, indexSet, nil)
require.NoError(t, err)
newRoot, err := root.PutTable(ctx, tableName, tbl)
require.NoError(t, err)
err = dEnv.UpdateWorkingRoot(ctx, newRoot)
require.NoError(t, err)
}
// CreateTestTable creates a new test table with the name, schema, and rows given.
func CreateTestTable(t *testing.T, dEnv *env.DoltEnv, tableName string, sch schema.Schema, rs ...row.Row) {
imt := table.NewInMemTable(sch)
@@ -28,7 +28,6 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/table/editor"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/store/datas"
"github.com/dolthub/dolt/go/store/types"
@@ -283,7 +282,9 @@ func (mr *MultiRepoTestSetup) CreateTable(dbName, tblName string) {
}
rows[i] = r
}
createTestTable(dEnv, tblName, sch, mr.Errhand, rows...)
if err := createTestTable(dEnv, tblName, sch); err != nil {
mr.Errhand(err)
}
}
func (mr *MultiRepoTestSetup) StageAll(dbName string) {
@@ -335,66 +336,31 @@ func (mr *MultiRepoTestSetup) PushToRemote(dbName, remoteName, branchName string
}
// createTestTable creates a new test table with the name, schema, and rows given.
func createTestTable(dEnv *env.DoltEnv, tableName string, sch schema.Schema, errhand func(args ...interface{}), rs ...row.Row) {
func createTestTable(dEnv *env.DoltEnv, tableName string, sch schema.Schema) error {
ctx := context.Background()
vrw := dEnv.DoltDB.ValueReadWriter()
ns := dEnv.DoltDB.NodeStore()
rowMap, err := types.NewMap(ctx, vrw)
idx, err := durable.NewEmptyIndex(ctx, vrw, ns, sch)
if err != nil {
errhand(err)
return err
}
me := rowMap.Edit()
for _, r := range rs {
k, v := r.NomsMapKey(sch), r.NomsMapValue(sch)
me.Set(k, v)
}
rowMap, err = me.Map(ctx)
tbl, err := doltdb.NewTable(ctx, vrw, ns, sch, idx, nil, nil)
if err != nil {
errhand(err)
}
tbl, err := doltdb.NewNomsTable(ctx, vrw, ns, sch, rowMap, nil, nil)
if err != nil {
errhand(err)
}
tbl, err = editor.RebuildAllIndexes(ctx, tbl, editor.TestEditorOptions(vrw))
if err != nil {
errhand(err)
return err
}
sch, err = tbl.GetSchema(ctx)
if err != nil {
errhand(err)
return err
}
rows, err := tbl.GetNomsRowData(ctx)
if err != nil {
errhand(err)
}
indexes, err := tbl.GetIndexSet(ctx)
if err != nil {
errhand(err)
}
err = putTableToWorking(ctx, dEnv, sch, rows, indexes, tableName, nil)
if err != nil {
errhand(err)
}
}
func putTableToWorking(ctx context.Context, dEnv *env.DoltEnv, sch schema.Schema, rows types.Map, indexData durable.IndexSet, tableName string, autoVal types.Value) error {
root, err := dEnv.WorkingRoot(ctx)
if err != nil {
return fmt.Errorf("%w: %v", doltdb.ErrNomsIO, err)
}
vrw := dEnv.DoltDB.ValueReadWriter()
ns := dEnv.DoltDB.NodeStore()
tbl, err := doltdb.NewNomsTable(ctx, vrw, ns, sch, rows, indexData, autoVal)
if err != nil {
return err
}
newRoot, err := root.PutTable(ctx, tableName, tbl)
if err != nil {
return err
@@ -412,6 +378,5 @@ func putTableToWorking(ctx context.Context, dEnv *env.DoltEnv, sch schema.Schema
if rootHash == newRootHash {
return nil
}
return dEnv.UpdateWorkingRoot(ctx, newRoot)
}
@@ -132,7 +132,7 @@ func TestMerge(t *testing.T) {
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, test.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, test.query)
require.NoError(t, err)
require.Equal(t, len(test.expected), len(actRows))
@@ -251,7 +251,7 @@ func TestMergeConflicts(t *testing.T) {
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, test.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, test.query)
require.NoError(t, err)
require.Equal(t, len(test.expected), len(actRows))
@@ -116,12 +116,10 @@ func TestMigration(t *testing.T) {
ctx := context.Background()
preEnv := setupMigrationTest(t, ctx, test)
postEnv := runMigration(t, ctx, preEnv)
ddb := postEnv.DoltDB
root, err := postEnv.WorkingRoot(ctx)
require.NoError(t, err)
for _, a := range test.asserts {
actual, err := sqle.ExecuteSelect(t, postEnv, ddb, root, a.query)
actual, err := sqle.ExecuteSelect(t, postEnv, root, a.query)
assert.NoError(t, err)
assert.Equal(t, a.expected, actual)
}
@@ -227,7 +227,7 @@ func testFilterBranch(t *testing.T, test filterBranchTest) {
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, a.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, a.query)
require.NoError(t, err)
require.Equal(t, a.rows, actRows)
+104 -185
View File
@@ -28,7 +28,9 @@ import (
"gopkg.in/src-d/go-errors.v1"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb/durable"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/schema/typeinfo"
@@ -37,83 +39,81 @@ import (
)
func TestRenameTable(t *testing.T) {
otherTable := "other"
cc := schema.NewColCollection(
schema.NewColumn("id", uint64(100), types.StringKind, true, schema.NotNullConstraint{}),
)
otherSch, err := schema.SchemaFromCols(cc)
require.NoError(t, err)
setup := `
CREATE TABLE people (
id varchar(36) primary key,
name varchar(40) not null,
age int unsigned,
is_married int,
title varchar(40),
INDEX idx_name (name)
);
INSERT INTO people VALUES
('00000000-0000-0000-0000-000000000000', 'Bill Billerson', 32, 1, 'Senior Dufus'),
('00000000-0000-0000-0000-000000000001', 'John Johnson', 25, 0, 'Dufus'),
('00000000-0000-0000-0000-000000000002', 'Rob Robertson', 21, 0, '');
CREATE TABLE other (c0 int, c1 int);`
tests := []struct {
name string
tableName string
newTableName string
expectedSchema schema.Schema
expectedRows []row.Row
expectedErr string
description string
oldName string
newName string
expectedErr string
}{
{
name: "rename table",
tableName: "people",
newTableName: "newPeople",
expectedSchema: dtestutils.TypedSchema,
expectedRows: dtestutils.TypedRows,
description: "rename table",
oldName: "people",
newName: "newPeople",
},
{
name: "table not found",
tableName: "notFound",
newTableName: "newNotfound",
expectedErr: doltdb.ErrTableNotFound.Error(),
description: "table not found",
oldName: "notFound",
newName: "newNotfound",
expectedErr: doltdb.ErrTableNotFound.Error(),
},
{
name: "name already in use",
tableName: "people",
newTableName: otherTable,
expectedErr: doltdb.ErrTableExists.Error(),
description: "name already in use",
oldName: "people",
newName: "other",
expectedErr: doltdb.ErrTableExists.Error(),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
t.Run(tt.description, func(t *testing.T) {
ctx := context.Background()
dtestutils.CreateTestTable(t, dEnv, otherTable, otherSch)
dEnv := dtestutils.CreateTestEnv()
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
updatedRoot, err := renameTable(ctx, root, tt.tableName, tt.newTableName)
// setup tests
root, err = ExecuteSql(t, dEnv, root, setup)
require.NoError(t, err)
schemas, err := root.GetAllSchemas(ctx)
require.NoError(t, err)
beforeSch := schemas[tt.oldName]
updatedRoot, err := renameTable(ctx, root, tt.oldName, tt.newName)
if len(tt.expectedErr) > 0 {
require.Error(t, err)
assert.Error(t, err)
assert.Contains(t, err.Error(), tt.expectedErr)
return
} else {
require.NoError(t, err)
}
assert.NoError(t, err)
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
has, err := updatedRoot.HasTable(ctx, tt.tableName)
has, err := updatedRoot.HasTable(ctx, tt.oldName)
require.NoError(t, err)
assert.False(t, has)
newTable, ok, err := updatedRoot.GetTable(ctx, tt.newTableName)
has, err = updatedRoot.HasTable(ctx, tt.newName)
require.NoError(t, err)
require.True(t, ok)
assert.True(t, has)
sch, err := newTable.GetSchema(ctx)
schemas, err = updatedRoot.GetAllSchemas(ctx)
require.NoError(t, err)
require.Equal(t, tt.expectedSchema, sch)
rowData, err := newTable.GetNomsRowData(ctx)
require.NoError(t, err)
var foundRows []row.Row
err = rowData.Iter(ctx, func(key, value types.Value) (stop bool, err error) {
tpl, err := row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple))
foundRows = append(foundRows, tpl)
return false, err
})
assert.NoError(t, err)
assert.Equal(t, tt.expectedRows, foundRows)
require.Equal(t, beforeSch, schemas[tt.newName])
})
}
}
@@ -121,6 +121,7 @@ func TestRenameTable(t *testing.T) {
const tableName = "people"
func TestAddColumnToTable(t *testing.T) {
tests := []struct {
name string
tag uint64
@@ -221,12 +222,14 @@ func TestAddColumnToTable(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
ctx := context.Background()
dEnv, err := makePeopleTable(ctx, dtestutils.CreateTestEnv())
require.NoError(t, err)
root, err := dEnv.WorkingRoot(ctx)
assert.NoError(t, err)
tbl, _, err := root.GetTable(ctx, tableName)
require.NoError(t, err)
tbl, ok, err := root.GetTable(ctx, tableName)
assert.True(t, ok)
assert.NoError(t, err)
updatedTable, err := addColumnToTable(ctx, root, tbl, tableName, tt.tag, tt.newColName, typeinfo.FromKind(tt.colKind), tt.nullable, tt.defaultVal, "", tt.order)
@@ -244,34 +247,40 @@ func TestAddColumnToTable(t *testing.T) {
index := sch.Indexes().GetByName(dtestutils.IndexName)
assert.NotNil(t, index)
tt.expectedSchema.Indexes().AddIndex(index)
tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
_, err = tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
require.NoError(t, err)
require.Equal(t, tt.expectedSchema, sch)
rowData, err := updatedTable.GetNomsRowData(ctx)
require.NoError(t, err)
var foundRows []row.Row
err = rowData.Iter(ctx, func(key, value types.Value) (stop bool, err error) {
tpl, err := row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple))
if err != nil {
return false, err
}
foundRows = append(foundRows, tpl)
return false, nil
})
assert.NoError(t, err)
assert.Equal(t, tt.expectedRows, foundRows)
indexRowData, err := updatedTable.GetNomsIndexRowData(ctx, dtestutils.IndexName)
require.NoError(t, err)
assert.Greater(t, indexRowData.Len(), uint64(0))
})
}
}
func makePeopleTable(ctx context.Context, dEnv *env.DoltEnv) (*env.DoltEnv, error) {
root, err := dEnv.WorkingRoot(ctx)
if err != nil {
return nil, err
}
rows, err := durable.NewEmptyIndex(ctx, root.VRW(), root.NodeStore(), dtestutils.TypedSchema)
if err != nil {
return nil, err
}
indexes, err := durable.NewIndexSetWithEmptyIndexes(ctx, root.VRW(), root.NodeStore(), dtestutils.TypedSchema)
if err != nil {
return nil, err
}
tbl, err := doltdb.NewTable(ctx, root.VRW(), root.NodeStore(), dtestutils.TypedSchema, rows, indexes, nil)
if err != nil {
return nil, err
}
root, err = root.PutTable(ctx, tableName, tbl)
if err != nil {
return nil, err
}
if err = dEnv.UpdateWorkingRoot(ctx, root); err != nil {
return nil, err
}
return dEnv, nil
}
func mustStringToColumnDefault(defaultString string) *sql.ColumnDefaultValue {
def, err := parse.StringToColumnDefaultValue(sql.NewEmptyContext(), defaultString)
if err != nil {
@@ -291,20 +300,17 @@ func TestDropColumn(t *testing.T) {
name string
colName string
expectedSchema schema.Schema
expectedRows []row.Row
expectedErr string
}{
{
name: "remove int",
colName: "age",
expectedSchema: dtestutils.RemoveColumnFromSchema(dtestutils.TypedSchema, dtestutils.AgeTag),
expectedRows: dtestutils.TypedRows,
},
{
name: "remove string",
colName: "title",
expectedSchema: dtestutils.RemoveColumnFromSchema(dtestutils.TypedSchema, dtestutils.TitleTag),
expectedRows: dtestutils.TypedRows,
},
{
name: "column not found",
@@ -320,8 +326,9 @@ func TestDropColumn(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
ctx := context.Background()
dEnv, err := makePeopleTable(ctx, dtestutils.CreateTestEnv())
require.NoError(t, err)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
@@ -343,22 +350,9 @@ func TestDropColumn(t *testing.T) {
require.NoError(t, err)
index := originalSch.Indexes().GetByName(dtestutils.IndexName)
tt.expectedSchema.Indexes().AddIndex(index)
tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
require.Equal(t, tt.expectedSchema, sch)
rowData, err := updatedTable.GetNomsRowData(ctx)
_, err = tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
require.NoError(t, err)
var foundRows []row.Row
err = rowData.Iter(ctx, func(key, value types.Value) (stop bool, err error) {
tpl, err := row.FromNoms(dtestutils.TypedSchema, key.(types.Tuple), value.(types.Tuple))
assert.NoError(t, err)
foundRows = append(foundRows, tpl)
return false, nil
})
assert.NoError(t, err)
assert.Equal(t, tt.expectedRows, foundRows)
require.Equal(t, tt.expectedSchema, sch)
})
}
}
@@ -369,35 +363,32 @@ func TestDropColumnUsedByIndex(t *testing.T) {
colName string
expectedIndex bool
expectedSchema schema.Schema
expectedRows []row.Row
}{
{
name: "remove int",
colName: "age",
expectedIndex: true,
expectedSchema: dtestutils.RemoveColumnFromSchema(dtestutils.TypedSchema, dtestutils.AgeTag),
expectedRows: dtestutils.TypedRows,
},
{
name: "remove string",
colName: "title",
expectedIndex: true,
expectedSchema: dtestutils.RemoveColumnFromSchema(dtestutils.TypedSchema, dtestutils.TitleTag),
expectedRows: dtestutils.TypedRows,
},
{
name: "remove name",
colName: "name",
expectedIndex: false,
expectedSchema: dtestutils.RemoveColumnFromSchema(dtestutils.TypedSchema, dtestutils.NameTag),
expectedRows: dtestutils.TypedRows,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
ctx := context.Background()
dEnv, err := makePeopleTable(ctx, dtestutils.CreateTestEnv())
require.NoError(t, err)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
@@ -411,35 +402,22 @@ func TestDropColumnUsedByIndex(t *testing.T) {
require.NoError(t, err)
originalSch, err := tbl.GetSchema(ctx)
require.NoError(t, err)
tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
_, err = tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
require.NoError(t, err)
index := originalSch.Indexes().GetByName(dtestutils.IndexName)
assert.NotNil(t, index)
if tt.expectedIndex {
tt.expectedSchema.Indexes().AddIndex(index)
indexRowData, err := updatedTable.GetNomsIndexRowData(ctx, dtestutils.IndexName)
require.NoError(t, err)
assert.Greater(t, indexRowData.Len(), uint64(0))
indexRowData, err := updatedTable.GetIndexRowData(ctx, dtestutils.IndexName)
assert.NoError(t, err)
assert.NotNil(t, indexRowData)
} else {
assert.Nil(t, sch.Indexes().GetByName(dtestutils.IndexName))
_, err := updatedTable.GetNomsIndexRowData(ctx, dtestutils.IndexName)
_, err = updatedTable.GetIndexRowData(ctx, dtestutils.IndexName)
assert.Error(t, err)
}
require.Equal(t, tt.expectedSchema, sch)
rowData, err := updatedTable.GetNomsRowData(ctx)
require.NoError(t, err)
var foundRows []row.Row
err = rowData.Iter(ctx, func(key, value types.Value) (stop bool, err error) {
tpl, err := row.FromNoms(dtestutils.TypedSchema, key.(types.Tuple), value.(types.Tuple))
assert.NoError(t, err)
foundRows = append(foundRows, tpl)
return false, nil
})
assert.NoError(t, err)
assert.Equal(t, tt.expectedRows, foundRows)
})
}
}
@@ -802,7 +780,6 @@ func TestModifyColumn(t *testing.T) {
newColumn schema.Column
order *sql.ColumnOrder
expectedSchema schema.Schema
expectedRows []row.Row
expectedErr string
}{
{
@@ -816,7 +793,6 @@ func TestModifyColumn(t *testing.T) {
schema.NewColumn("is_married", dtestutils.IsMarriedTag, types.IntKind, false, schema.NotNullConstraint{}),
schema.NewColumn("title", dtestutils.TitleTag, types.StringKind, false),
),
expectedRows: dtestutils.TypedRows,
},
{
name: "remove null constraint",
@@ -829,7 +805,6 @@ func TestModifyColumn(t *testing.T) {
schema.NewColumn("is_married", dtestutils.IsMarriedTag, types.IntKind, false, schema.NotNullConstraint{}),
schema.NewColumn("title", dtestutils.TitleTag, types.StringKind, false),
),
expectedRows: dtestutils.TypedRows,
},
{
name: "reorder first",
@@ -843,7 +818,6 @@ func TestModifyColumn(t *testing.T) {
schema.NewColumn("is_married", dtestutils.IsMarriedTag, types.IntKind, false, schema.NotNullConstraint{}),
schema.NewColumn("title", dtestutils.TitleTag, types.StringKind, false),
),
expectedRows: dtestutils.TypedRows,
},
{
name: "reorder middle",
@@ -857,7 +831,6 @@ func TestModifyColumn(t *testing.T) {
schema.NewColumn("newAge", dtestutils.AgeTag, types.UintKind, false, schema.NotNullConstraint{}),
schema.NewColumn("title", dtestutils.TitleTag, types.StringKind, false),
),
expectedRows: dtestutils.TypedRows,
},
{
name: "tag collision",
@@ -876,55 +849,25 @@ func TestModifyColumn(t *testing.T) {
existingColumn: schema.NewColumn("id", dtestutils.IdTag, types.StringKind, true, schema.NotNullConstraint{}),
newColumn: schema.NewColumn("newId", dtestutils.IdTag, types.StringKind, true, schema.NotNullConstraint{}),
expectedSchema: alteredTypeSch,
expectedRows: []row.Row{
dtestutils.NewRow(
alteredTypeSch,
types.String("00000000-0000-0000-0000-000000000000"),
types.String("Bill Billerson"),
types.Uint(32),
types.Int(1),
types.String("Senior Dufus"),
),
dtestutils.NewRow(
alteredTypeSch,
types.String("00000000-0000-0000-0000-000000000001"),
types.String("John Johnson"),
types.Uint(25),
types.Int(0),
types.String("Dufus"),
),
dtestutils.NewRow(
alteredTypeSch,
types.String("00000000-0000-0000-0000-000000000002"),
types.String("Rob Robertson"),
types.Uint(21),
types.Int(0),
types.String(""),
),
},
},
{
name: "type change same tag",
existingColumn: schema.NewColumn("name", dtestutils.NameTag, types.StringKind, false, schema.NotNullConstraint{}),
newColumn: newNameColSameTag,
expectedSchema: alteredTypeSch2,
expectedRows: dtestutils.TypedRows,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
ctx := context.Background()
dEnv, err := makePeopleTable(ctx, dtestutils.CreateTestEnv())
require.NoError(t, err)
root, err := dEnv.WorkingRoot(ctx)
assert.NoError(t, err)
tbl, _, err := root.GetTable(ctx, tableName)
assert.NoError(t, err)
tmpDir, err := dEnv.TempTableFilesDir()
require.NoError(t, err)
opts := editor.Options{Deaf: dEnv.DbEaFactory(), Tempdir: tmpDir}
updatedTable, err := modifyColumn(ctx, tbl, tt.existingColumn, tt.newColumn, tt.order)
if len(tt.expectedErr) > 0 {
require.Error(t, err)
@@ -939,35 +882,11 @@ func TestModifyColumn(t *testing.T) {
index := sch.Indexes().GetByName(dtestutils.IndexName)
assert.NotNil(t, index)
tt.expectedSchema.Indexes().AddIndex(index)
tt.expectedSchema.SetPkOrdinals(sch.GetPkOrdinals())
tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
err = tt.expectedSchema.SetPkOrdinals(sch.GetPkOrdinals())
require.NoError(t, err)
_, err = tt.expectedSchema.Checks().AddCheck("test-check", "age < 123", true)
require.NoError(t, err)
require.Equal(t, tt.expectedSchema, sch)
rowData, err := updatedTable.GetNomsRowData(ctx)
require.NoError(t, err)
var foundRows []row.Row
err = rowData.Iter(ctx, func(key, value types.Value) (stop bool, err error) {
tpl, err := row.FromNoms(tt.expectedSchema, key.(types.Tuple), value.(types.Tuple))
if err != nil {
return false, err
}
foundRows = append(foundRows, tpl)
return false, nil
})
assert.NoError(t, err)
assert.Equal(t, tt.expectedRows, foundRows)
updatedIndexRows, err := updatedTable.GetNomsIndexRowData(context.Background(), index.Name())
require.NoError(t, err)
expectedIndexRows, err := editor.RebuildIndex(context.Background(), updatedTable, index.Name(), opts)
require.NoError(t, err)
if uint64(len(foundRows)) != updatedIndexRows.Len() || !updatedIndexRows.Equals(expectedIndexRows) {
t.Error("index contents are incorrect")
}
})
}
}
@@ -56,7 +56,7 @@ type Controller struct {
provider dbProvider
iterSessions IterSessions
killQuery func(uint32)
killConnection func(uint32)
killConnection func(uint32) error
}
type sqlvars interface {
@@ -145,7 +145,7 @@ func (c *Controller) ManageDatabaseProvider(p dbProvider) {
c.provider.SetIsStandby(c.role == RoleStandby)
}
func (c *Controller) ManageQueryConnections(iterSessions IterSessions, killQuery, killConnection func(uint32)) {
func (c *Controller) ManageQueryConnections(iterSessions IterSessions, killQuery func(uint32), killConnection func(uint32) error) {
if c == nil {
return
}
+8 -17
View File
@@ -180,29 +180,20 @@ func assertSchemasEqual(t *testing.T, expected, actual sql.Schema) {
}
// CreateTableFn returns a SetupFunc that creates a table with the rows given
// todo(andy): replace with ExecuteSetupSQL
func CreateTableFn(tableName string, tableSchema schema.Schema, initialRows ...row.Row) SetupFn {
return func(t *testing.T, dEnv *env.DoltEnv) {
dtestutils.CreateTestTable(t, dEnv, tableName, tableSchema, initialRows...)
}
}
// CreateTableWithRowsFn returns a SetupFunc that creates a table with the rows given, creating the rows on the fly
// from Value types conforming to the schema given.
func CreateTableWithRowsFn(tableName string, tableSchema schema.Schema, initialRows ...[]types.Value) SetupFn {
func ExecuteSetupSQL(ctx context.Context, queries string) SetupFn {
return func(t *testing.T, dEnv *env.DoltEnv) {
rows := make([]row.Row, len(initialRows))
for i, r := range initialRows {
rows[i] = NewRowWithSchema(tableSchema, r...)
}
dtestutils.CreateTestTable(t, dEnv, tableName, tableSchema, rows...)
}
}
// Compose takes an arbitrary number of SetupFns and composes them into a single func which executes all funcs given.
func Compose(fns ...SetupFn) SetupFn {
return func(t *testing.T, dEnv *env.DoltEnv) {
for _, f := range fns {
f(t, dEnv)
}
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
root, err = ExecuteSql(t, dEnv, root, queries)
require.NoError(t, err)
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
}
}
@@ -23,7 +23,6 @@ import (
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
@@ -492,7 +491,7 @@ func (p DoltDatabaseProvider) cloneDatabaseFromRemote(
// TODO: params for AWS, others that need them
r := env.NewRemote(remoteName, remoteUrl, nil)
srcDB, err := getRemoteDb(ctx, r, p.remoteDialer)
srcDB, err := r.GetRemoteDB(ctx, types.Format_Default, p.remoteDialer)
if err != nil {
return err
}
@@ -539,18 +538,6 @@ func (p DoltDatabaseProvider) cloneDatabaseFromRemote(
return sess.AddDB(ctx, dbstate)
}
// TODO: extract a shared library for this functionality
// TODO: this method only adds error handling. Remove?
func getRemoteDb(ctx *sql.Context, r env.Remote, dialer dbfactory.GRPCDialProvider) (*doltdb.DoltDB, error) {
ddb, err := r.GetRemoteDB(ctx, types.Format_Default, dialer)
if err != nil {
bdr := errhand.BuildDError("error: failed to get remote db").AddCause(err)
return nil, bdr.Build()
}
return ddb, nil
}
// DropDatabase implements the sql.MutableDatabaseProvider interface
func (p DoltDatabaseProvider) DropDatabase(ctx *sql.Context, name string) error {
isRevisionDatabase, err := p.IsRevisionDatabase(ctx, name)
@@ -57,7 +57,7 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
rows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, "select display_order, query, name, description from "+doltdb.DoltQueryCatalogTableName)
rows, err := sqle.ExecuteSelect(t, dEnv, root, "select display_order, query, name, description from "+doltdb.DoltQueryCatalogTableName)
require.NoError(t, err)
expectedRows := []sql.Row{
{uint64(1), "select 1 from dual", "name", "description"},
@@ -80,7 +80,7 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
rows, err = sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, "select display_order, query, name, description from "+doltdb.DoltQueryCatalogTableName+" order by display_order")
rows, err = sqle.ExecuteSelect(t, dEnv, root, "select display_order, query, name, description from "+doltdb.DoltQueryCatalogTableName+" order by display_order")
require.NoError(t, err)
expectedRows = []sql.Row{
{uint64(1), "select 1 from dual", "name", "description"},
@@ -89,7 +89,7 @@ func TestInsertIntoQueryCatalogTable(t *testing.T) {
assert.Equal(t, expectedRows, rows)
rows, err = sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, "select id from "+doltdb.DoltQueryCatalogTableName)
rows, err = sqle.ExecuteSelect(t, dEnv, root, "select id from "+doltdb.DoltQueryCatalogTableName)
require.NoError(t, err)
for _, r := range rows {
assert.NotEmpty(t, r)
@@ -701,6 +701,23 @@ var DoltScripts = []queries.ScriptTest{
},
},
},
{
Name: "dolt_schemas schema",
SetUpScript: []string{
"CREATE TABLE viewtest(v1 int, v2 int)",
"CREATE VIEW view1 AS SELECT v1 FROM viewtest",
"CREATE VIEW view2 AS SELECT v2 FROM viewtest",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT type, name, fragment, id FROM dolt_schemas ORDER BY 1, 2",
Expected: []sql.Row{
{"view", "view1", "SELECT v1 FROM viewtest", int64(1)},
{"view", "view2", "SELECT v2 FROM viewtest", int64(2)},
},
},
},
},
}
func makeLargeInsert(sz int) string {
@@ -25,6 +25,7 @@ import (
"time"
"github.com/dolthub/go-mysql-server/sql"
"github.com/shopspring/decimal"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -173,11 +174,11 @@ var typesTests = []struct {
}
var (
typesTableRow1 = sql.Row{int32(-3), uint64(1), forceParseTime("2020-05-14 12:00:00"), "-3.30000", uint16(2), -3.3, uint64(1), sql.Timespan(-183000000), "a", int16(1980)}
typesTableRow2 = sql.Row{int32(-1), uint64(2), forceParseTime("2020-05-14 12:00:01"), "-1.10000", uint16(3), -1.1, uint64(3), sql.Timespan(-61000000), "b", int16(1990)}
typesTableRow3 = sql.Row{int32(0), uint64(3), forceParseTime("2020-05-14 12:00:02"), "0.00000", uint16(4), 0.0, uint64(4), sql.Timespan(0), "c", int16(2000)}
typesTableRow4 = sql.Row{int32(1), uint64(4), forceParseTime("2020-05-14 12:00:03"), "1.10000", uint16(5), 1.1, uint64(5), sql.Timespan(61000000), "d", int16(2010)}
typesTableRow5 = sql.Row{int32(3), uint64(5), forceParseTime("2020-05-14 12:00:04"), "3.30000", uint16(6), 3.3, uint64(6), sql.Timespan(183000000), "e", int16(2020)}
typesTableRow1 = sql.Row{int32(-3), uint64(1), mustTime("2020-05-14 12:00:00"), mustDecimal("-3.30000"), uint16(2), -3.3, uint64(1), sql.Timespan(-183000000), "a", int16(1980)}
typesTableRow2 = sql.Row{int32(-1), uint64(2), mustTime("2020-05-14 12:00:01"), mustDecimal("-1.10000"), uint16(3), -1.1, uint64(3), sql.Timespan(-61000000), "b", int16(1990)}
typesTableRow3 = sql.Row{int32(0), uint64(3), mustTime("2020-05-14 12:00:02"), mustDecimal("0.00000"), uint16(4), 0.0, uint64(4), sql.Timespan(0), "c", int16(2000)}
typesTableRow4 = sql.Row{int32(1), uint64(4), mustTime("2020-05-14 12:00:03"), mustDecimal("1.10000"), uint16(5), 1.1, uint64(5), sql.Timespan(61000000), "d", int16(2010)}
typesTableRow5 = sql.Row{int32(3), uint64(5), mustTime("2020-05-14 12:00:04"), mustDecimal("3.30000"), uint16(6), 3.3, uint64(6), sql.Timespan(183000000), "e", int16(2020)}
)
func TestDoltIndexEqual(t *testing.T) {
@@ -1067,12 +1068,6 @@ func TestDoltIndexBetween(t *testing.T) {
indexIter, err := index.RowIterForIndexLookup(ctx, NoCacheTableable{dt}, indexLookup, pkSch, nil)
require.NoError(t, err)
// If this is a primary index assert that a covering index was used
if idx.ID() == "PRIMARY" {
_, ok := indexIter.(*index.CoveringIndexRowIterAdapter)
require.True(t, ok)
}
var readRows []sql.Row
var nextRow sql.Row
for nextRow, err = indexIter.Next(ctx); err == nil; nextRow, err = indexIter.Next(ctx) {
@@ -1080,7 +1075,7 @@ func TestDoltIndexBetween(t *testing.T) {
}
require.Equal(t, io.EOF, err)
requireUnorderedRowsEqual(t, expectedRows, readRows)
requireUnorderedRowsEqual(t, pkSch.Schema, expectedRows, readRows)
})
}
}
@@ -1185,6 +1180,19 @@ func (r *rowSlice) Less(i, j int) bool {
return false
}
func (r *rowSlice) equals(other *rowSlice, sch sql.Schema) bool {
if len(r.rows) != len(other.rows) {
return false
}
for i := range r.rows {
ok, err := r.rows[i].Equals(other.rows[i], sch)
if err != nil || !ok {
return false
}
}
return true
}
func signedCompare(n1 int64, c interface{}) (int, error) {
var n2 int64
switch typedVal := c.(type) {
@@ -1264,7 +1272,7 @@ func (r *rowSlice) Swap(i, j int) {
r.rows[i], r.rows[j] = r.rows[j], r.rows[i]
}
func requireUnorderedRowsEqual(t *testing.T, rows1, rows2 []sql.Row) {
func requireUnorderedRowsEqual(t *testing.T, s sql.Schema, rows1, rows2 []sql.Row) {
slice1 := &rowSlice{rows: rows1}
sort.Stable(slice1)
require.NoError(t, slice1.sortErr)
@@ -1273,7 +1281,7 @@ func requireUnorderedRowsEqual(t *testing.T, rows1, rows2 []sql.Row) {
sort.Stable(slice2)
require.NoError(t, slice2.sortErr)
require.Equal(t, rows1, rows2)
assert.True(t, slice1.equals(slice2, s))
}
func testDoltIndex(t *testing.T, ctx *sql.Context, root *doltdb.RootValue, keys []interface{}, expectedRows []sql.Row, idx index.DoltIndex, cmp indexComp) {
@@ -1317,7 +1325,7 @@ func testDoltIndex(t *testing.T, ctx *sql.Context, root *doltdb.RootValue, keys
}
require.Equal(t, io.EOF, err)
requireUnorderedRowsEqual(t, convertSqlRowToInt64(expectedRows), readRows)
requireUnorderedRowsEqual(t, pkSch.Schema, convertSqlRowToInt64(expectedRows), readRows)
}
func doltIndexSetup(t *testing.T) (*sql.Context, *doltdb.RootValue, map[string]index.DoltIndex) {
@@ -1406,9 +1414,20 @@ func NewTestSQLCtx(ctx context.Context) *sql.Context {
return sqlCtx
}
func forceParseTime(timeString string) time.Time {
tim, _ := time.Parse("2006-01-02 15:04:05", timeString)
return tim
func mustTime(timeString string) time.Time {
t, err := time.Parse("2006-01-02 15:04:05", timeString)
if err != nil {
panic(err)
}
return t
}
func mustDecimal(s string) decimal.Decimal {
d, err := decimal.NewFromString(s)
if err != nil {
panic(err)
}
return d
}
func convertSqlRowToInt64(sqlRows []sql.Row) []sql.Row {
@@ -74,7 +74,7 @@ func setupIndexes(t *testing.T, tableName, insertQuery string) (*sqle.Engine, *e
table := dsqle.DoltTableFromAlterableTable(sqlCtx, tbl)
idxv1RowData, err := table.GetNomsIndexRowData(context.Background(), idxv1.Name())
idxv1RowData, err := table.GetIndexRowData(context.Background(), idxv1.Name())
require.NoError(t, err)
idxv1Cols := make([]schema.Column, idxv1.Count())
for i, tag := range idxv1.IndexedColumnTags() {
@@ -30,6 +30,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/index"
"github.com/dolthub/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/dolthub/dolt/go/store/types"
)
// This tests mergeable indexes by using the SQL engine and intercepting specific calls. This way, we can verify that
@@ -37,6 +38,10 @@ import (
// they're converted into a format that Noms understands to verify that they were handled correctly. Lastly, we ensure
// that the final output is as expected.
func TestMergeableIndexes(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip()
}
engine, denv, root, db, indexTuples := setupIndexes(t, "test", `INSERT INTO test VALUES
(-3, NULL, NULL),
(-2, NULL, NULL),
@@ -1373,6 +1378,10 @@ func TestMergeableIndexes(t *testing.T) {
// ranges may be incorrect.
// TODO: disassociate NULL ranges from value ranges and fix the intermediate ranges (finalRanges).
func TestMergeableIndexesNulls(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip()
}
engine, denv, root, db, indexTuples := setupIndexes(t, "test", `INSERT INTO test VALUES
(0, 10, 20),
(1, 11, 21),
@@ -181,7 +181,7 @@ func TestDbRevision(t *testing.T) {
func populateCommitHashes(t *testing.T, dEnv *env.DoltEnv, root *doltdb.RootValue) (cm1, cm2, cm3 hash.Hash) {
q := "SELECT commit_hash FROM dolt_log;"
rows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, q)
rows, err := sqle.ExecuteSelect(t, dEnv, root, q)
require.NoError(t, err)
assert.Len(t, rows, 4)
cm3 = hash.Parse(rows[0][0].(string))
@@ -191,7 +191,7 @@ func populateCommitHashes(t *testing.T, dEnv *env.DoltEnv, root *doltdb.RootValu
}
func makeTestAssertion(t *testing.T, a testAssert, dEnv *env.DoltEnv, root *doltdb.RootValue) {
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, a.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, a.query)
require.NoError(t, err)
require.Equal(t, len(a.rows), len(actRows))
@@ -212,7 +212,7 @@ func setupHistoryTests(t *testing.T) *env.DoltEnv {
// get commit hashes from the log table
q := "select commit_hash, date from dolt_log order by date desc;"
rows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, q)
rows, err := sqle.ExecuteSelect(t, dEnv, root, q)
require.NoError(t, err)
require.Equal(t, 5, len(rows))
HEAD = rows[0][0].(string)
@@ -234,7 +234,7 @@ func testHistoryTable(t *testing.T, test historyTableTest, dEnv *env.DoltEnv) {
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, test.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, test.query)
require.NoError(t, err)
require.Equal(t, len(test.rows), len(actRows))
@@ -33,6 +33,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/json"
"github.com/dolthub/dolt/go/store/types"
)
type jsonValueTest struct {
@@ -43,6 +44,10 @@ type jsonValueTest struct {
}
func TestJsonValues(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
sqle.SkipByDefaultInCI(t)
setupCommon := []testCommand{
{cmd.SqlCmd{}, args{"-q", `create table js (pk int primary key, js json);`}},
@@ -131,7 +136,7 @@ func testJsonValue(t *testing.T, test jsonValueTest, setupCommon []testCommand)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
actRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, test.query)
actRows, err := sqle.ExecuteSelect(t, dEnv, root, test.query)
require.NoError(t, err)
require.Equal(t, len(test.rows), len(actRows))
@@ -154,6 +159,10 @@ func testJsonValue(t *testing.T, test jsonValueTest, setupCommon []testCommand)
// round-trips large random JSON objects through the SQL engine
func TestLargeJsonObjects(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
sqle.SkipByDefaultInCI(t)
setupCommon := []testCommand{
{cmd.SqlCmd{}, args{"-q", `create table js (pk int primary key, js json);`}},
@@ -25,6 +25,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle"
"github.com/dolthub/dolt/go/store/types"
)
// This tests running queries against a modified subset of the stockmarket data set found here:
@@ -20144,6 +20145,9 @@ func TestCreateTables(t *testing.T) {
}
func TestInserts(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
sqle.SkipByDefaultInCI(t)
dEnv := dtestutils.CreateTestEnv()
ctx := context.Background()
@@ -20170,6 +20174,9 @@ func TestInserts(t *testing.T) {
}
func TestInsertsWithIndexes(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
sqle.SkipByDefaultInCI(t)
dEnv := dtestutils.CreateTestEnv()
ctx := context.Background()
@@ -20217,12 +20224,12 @@ func TestJoin(t *testing.T) {
root, err = sqle.ExecuteSql(t, dEnv, root, insertRows)
require.NoError(t, err)
rows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, `select Type, d.Symbol, Country, TradingDate, Open, High, Low, Close, Volume, OpenInt, Name, Sector, IPOYear
rows, err := sqle.ExecuteSelect(t, dEnv, root, `select Type, d.Symbol, Country, TradingDate, Open, High, Low, Close, Volume, OpenInt, Name, Sector, IPOYear
from daily_summary d join symbols t on d.Symbol = t.Symbol order by d.Symbol, Country, TradingDate`)
require.NoError(t, err)
assert.Equal(t, 5210, len(rows))
expectedJoinRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, `select * from join_result order by symbol, country, TradingDate`)
expectedJoinRows, err := sqle.ExecuteSelect(t, dEnv, root, `select * from join_result order by symbol, country, TradingDate`)
require.NoError(t, err)
assertResultRowsEqual(t, expectedJoinRows, rows)
}
@@ -20262,7 +20269,7 @@ func TestExplain(t *testing.T) {
root, err = sqle.ExecuteSql(t, dEnv, root, createTables)
require.NoError(t, err)
rows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, "explain select * from daily_summary d join symbols t on d.Symbol = t.Symbol")
rows, err := sqle.ExecuteSelect(t, dEnv, root, "explain select * from daily_summary d join symbols t on d.Symbol = t.Symbol")
require.NoError(t, err)
rowStrings := make([]string, len(rows))
for i, row := range rows {
@@ -73,25 +73,25 @@ func TestDoltHarness(t *testing.T) {
queryTests := []queryTest{
{
query: "SELECT a,c,e FROM t1;",
query: "SELECT a,c,e FROM t1 ORDER BY 1, 2, 3;",
expErr: nil,
expSchema: "III",
expResults: []string{"104", "102", "NULL", "107", "106", "109"},
},
{
query: "SELECT b,d FROM t1;",
query: "SELECT b,d FROM t1 ORDER BY 1, 2;",
expErr: nil,
expSchema: "II",
expResults: []string{"NULL", "101", "105", "108"},
},
{
query: "SELECT * FROM t1 WHERE d < 107;",
query: "SELECT * FROM t1 WHERE d < 107 ORDER BY 1, 2, 3, 4;",
expErr: nil,
expSchema: "IIIII",
expResults: []string{"104", "NULL", "102", "101", "NULL"},
},
{
query: "SELECT * FROM t1 WHERE d > 102;",
query: "SELECT * FROM t1 WHERE d > 102 ORDER BY 1, 2, 3, 4;",
expErr: nil,
expSchema: "IIIII",
expResults: []string{"107", "105", "106", "108", "109"},
@@ -23,13 +23,12 @@ import (
"go.uber.org/zap/buffer"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/ref"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
)
func TestCommitHooksNoErrors(t *testing.T) {
dEnv := dtestutils.CreateEnvWithSeedData(t)
dEnv := CreateEnvWithSeedData(t)
AddDoltSystemVariables()
sql.SystemVariables.SetGlobal(dsess.SkipReplicationErrors, true)
sql.SystemVariables.SetGlobal(dsess.ReplicateToRemote, "unknown")
@@ -32,6 +32,9 @@ import (
)
func TestSchemaTableRecreationOlder(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // schema table migrations predate NBF __DOLT__
}
ctx := NewTestSQLCtx(context.Background())
dEnv := dtestutils.CreateTestEnv()
tmpDir, err := dEnv.TempTableFilesDir()
@@ -112,6 +115,9 @@ func TestSchemaTableRecreationOlder(t *testing.T) {
}
func TestSchemaTableRecreation(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // schema table migrations predate NBF __DOLT__
}
ctx := NewTestSQLCtx(context.Background())
dEnv := dtestutils.CreateTestEnv()
tmpDir, err := dEnv.TempTableFilesDir()
+16 -12
View File
@@ -33,6 +33,10 @@ import (
)
func TestSqlBatchInserts(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
insertStatements := []string{
`insert into people (id, first_name, last_name, is_married, age, rating, uuid, num_episodes) values
(7, "Maggie", "Simpson", false, 1, 5.1, '00000000-0000-0000-0000-000000000007', 677)`,
@@ -58,11 +62,9 @@ func TestSqlBatchInserts(t *testing.T) {
insertStatements[i], insertStatements[j] = insertStatements[j], insertStatements[i]
})
dEnv := dtestutils.CreateTestEnv()
ctx := context.Background()
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
dEnv := CreateTestDatabase(t)
root, err := dEnv.WorkingRoot(ctx)
tmpDir, err := dEnv.TempTableFilesDir()
require.NoError(t, err)
@@ -152,11 +154,10 @@ func TestSqlBatchInsertIgnoreReplace(t *testing.T) {
(2, "Milhouse", "VanHouten", false, 1, 5.1, '00000000-0000-0000-0000-000000000008', 677)`,
}
dEnv := dtestutils.CreateTestEnv()
ctx := context.Background()
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
dEnv := CreateTestDatabase(t)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
tmpDir, err := dEnv.TempTableFilesDir()
require.NoError(t, err)
@@ -196,11 +197,14 @@ func TestSqlBatchInsertIgnoreReplace(t *testing.T) {
}
func TestSqlBatchInsertErrors(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
ctx := context.Background()
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetests
}
CreateTestDatabase(dEnv, t)
root, _ := dEnv.WorkingRoot(ctx)
ctx := context.Background()
dEnv := CreateTestDatabase(t)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
tmpDir, err := dEnv.TempTableFilesDir()
require.NoError(t, err)
+28 -15
View File
@@ -255,13 +255,12 @@ func TestCreateTable(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
dtestutils.CreateTestTable(t, dEnv, PeopleTableName, PeopleTestSchema, AllPeopleRows...)
ctx := context.Background()
root, _ := dEnv.WorkingRoot(ctx)
dEnv := CreateEmptyTestDatabase(t)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
updatedRoot, err := ExecuteSql(t, dEnv, root, tt.query)
if tt.expectedErr == "" {
require.NoError(t, err)
} else {
@@ -329,10 +328,10 @@ func TestDropTable(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
ctx := context.Background()
root, _ := dEnv.WorkingRoot(ctx)
dEnv := CreateTestDatabase(t)
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
updatedRoot, err := ExecuteSql(t, dEnv, root, tt.query)
@@ -502,8 +501,7 @@ func TestAddColumn(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
ctx := context.Background()
root, _ := dEnv.WorkingRoot(ctx)
@@ -525,6 +523,10 @@ func TestAddColumn(t *testing.T) {
assert.NoError(t, err)
equalSchemas(t, tt.expectedSchema, sch)
if types.Format_Default != types.Format_LD_1 {
return // todo: convert these to enginetests
}
updatedTable, ok, err := updatedRoot.GetTable(ctx, "people")
assert.NoError(t, err)
require.True(t, ok)
@@ -617,8 +619,7 @@ func TestRenameColumn(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
ctx := context.Background()
root, _ := dEnv.WorkingRoot(ctx)
@@ -639,6 +640,10 @@ func TestRenameColumn(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, tt.expectedSchema, sch)
if types.Format_Default != types.Format_LD_1 {
return // todo: convert these to enginetests
}
updatedTable, ok, err := updatedRoot.GetTable(ctx, "people")
assert.NoError(t, err)
require.True(t, ok)
@@ -727,8 +732,7 @@ func TestRenameTableStatements(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
ctx := context.Background()
root, _ := dEnv.WorkingRoot(ctx)
@@ -745,6 +749,7 @@ func TestRenameTableStatements(t *testing.T) {
has, err := updatedRoot.HasTable(ctx, tt.oldTableName)
require.NoError(t, err)
assert.False(t, has)
newTable, ok, err := updatedRoot.GetTable(ctx, tt.newTableName)
require.NoError(t, err)
require.True(t, ok)
@@ -753,6 +758,10 @@ func TestRenameTableStatements(t *testing.T) {
require.NoError(t, err)
require.Equal(t, tt.expectedSchema, sch)
if types.Format_Default != types.Format_LD_1 {
return // todo: convert these to enginetests
}
rowData, err := newTable.GetNomsRowData(ctx)
require.NoError(t, err)
var foundRows []row.Row
@@ -773,13 +782,17 @@ func TestRenameTableStatements(t *testing.T) {
}
func TestAlterSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip("") // todo: convert to enginetests
}
systemTableNames := []string{"dolt_log", "dolt_history_people", "dolt_diff_people", "dolt_commit_diff_people"} // "dolt_docs",
reservedTableNames := []string{"dolt_schemas", "dolt_query_catalog"}
var dEnv *env.DoltEnv
setup := func() {
dEnv = dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv = CreateTestDatabase(t)
dtestutils.CreateEmptyTestTable(t, dEnv, "dolt_docs", doltdb.DocsSchema)
dtestutils.CreateEmptyTestTable(t, dEnv, doltdb.SchemasTableName, SchemasTableSchema())
dtestutils.CreateTestTable(t, dEnv, "dolt_docs",
doltdb.DocsSchema,
+4 -4
View File
@@ -23,7 +23,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dtables"
"github.com/dolthub/dolt/go/store/types"
@@ -181,6 +180,9 @@ func TestExecuteDelete(t *testing.T) {
}
func TestExecuteDeleteSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetest
}
for _, test := range systemTableDeleteTests {
t.Run(test.Name, func(t *testing.T) {
testDeleteQuery(t, test)
@@ -232,9 +234,7 @@ func testDeleteQuery(t *testing.T, test DeleteTest) {
t.Skip("Skipping tests until " + singleDeleteQueryTest)
}
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
}
+7 -6
View File
@@ -24,7 +24,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dtables"
"github.com/dolthub/dolt/go/store/types"
@@ -238,9 +237,9 @@ var BasicInsertTests = []InsertTest{
},
{
Name: "insert partial columns existing pk",
AdditionalSetup: CreateTableWithRowsFn("temppeople",
NewSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind),
[]types.Value{types.Int(2), types.String("Bart"), types.String("Simpson")}),
AdditionalSetup: ExecuteSetupSQL(context.Background(), `
CREATE TABLE temppeople (id bigint primary key, first_name varchar(16383), last_name varchar(16383));
INSERT INTO temppeople VALUES (2, 'Bart', 'Simpson');`),
InsertQuery: "insert into temppeople (id, first_name, last_name) values (2, 'Bart', 'Simpson')",
ExpectedErr: "duplicate primary key",
},
@@ -415,6 +414,9 @@ var systemTableInsertTests = []InsertTest{
}
func TestInsertIntoSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetest
}
for _, test := range systemTableInsertTests {
t.Run(test.Name, func(t *testing.T) {
testInsertQuery(t, test)
@@ -437,8 +439,7 @@ func testInsertQuery(t *testing.T, test InsertTest) {
t.Skip("Skipping test broken on SQL engine")
}
dEnv := dtestutils.CreateTestEnv()
CreateEmptyTestDatabase(dEnv, t)
dEnv := CreateEmptyTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
@@ -22,7 +22,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/store/types"
@@ -95,8 +94,7 @@ func TestExecutePersist(t *testing.T) {
// Tests the given query on a freshly created dataset, asserting that the result has the given schema and rows. If
// expectedErr is set, asserts instead that the execution returns an error that matches.
func testPersistQuery(t *testing.T, test PersistTest) {
dEnv := dtestutils.CreateTestEnv()
CreateEmptyTestDatabase(dEnv, t)
dEnv := CreateEmptyTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
+16 -15
View File
@@ -24,7 +24,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dtables"
"github.com/dolthub/dolt/go/store/types"
@@ -207,18 +206,6 @@ var BasicReplaceTests = []ReplaceTest{
),
ExpectedSchema: NewResultSetSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind),
},
{
Name: "replace partial columns existing pk",
AdditionalSetup: CreateTableFn("temppeople",
NewSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind, "num", types.IntKind),
NewRow(types.Int(2), types.String("Bart"), types.String("Simpson"), types.Int(44))),
ReplaceQuery: "replace into temppeople (id, first_name, last_name, num) values (2, 'Bart', 'Simpson', 88)",
SelectQuery: "select id, first_name, last_name, num from temppeople where id = 2 ORDER BY id",
ExpectedRows: ToSqlRows(
NewResultSetSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind, "num", types.IntKind),
NewResultSetRow(types.Int(2), types.String("Bart"), types.String("Simpson"), types.Int(88))),
ExpectedSchema: NewResultSetSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind, "num", types.IntKind),
},
{
Name: "replace partial columns multiple rows replace existing pk",
ReplaceQuery: `replace into people (id, first_name, last_name, is_married, age, rating) values
@@ -240,6 +227,18 @@ var BasicReplaceTests = []ReplaceTest{
(7, "Maggie", null, false, 1, 5.1)`,
ExpectedErr: "Constraint failed for column 'last_name': Not null",
},
{
Name: "replace partial columns existing pk",
AdditionalSetup: ExecuteSetupSQL(context.Background(), `
CREATE TABLE temppeople (id bigint primary key, first_name varchar(16383), last_name varchar(16383), num bigint);
INSERT INTO temppeople VALUES (2, 'Bart', 'Simpson', 44);`),
ReplaceQuery: "replace into temppeople (id, first_name, last_name, num) values (2, 'Bart', 'Simpson', 88)",
SelectQuery: "select id, first_name, last_name, num from temppeople where id = 2 ORDER BY id",
ExpectedRows: ToSqlRows(
NewResultSetSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind, "num", types.IntKind),
NewResultSetRow(types.Int(2), types.String("Bart"), types.String("Simpson"), types.Int(88))),
ExpectedSchema: NewResultSetSchema("id", types.IntKind, "first_name", types.StringKind, "last_name", types.StringKind, "num", types.IntKind),
},
}
func TestExecuteReplace(t *testing.T) {
@@ -286,6 +285,9 @@ var systemTableReplaceTests = []ReplaceTest{
}
func TestReplaceIntoSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetest
}
for _, test := range systemTableReplaceTests {
t.Run(test.Name, func(t *testing.T) {
testReplaceQuery(t, test)
@@ -304,8 +306,7 @@ func testReplaceQuery(t *testing.T, test ReplaceTest) {
t.Skip("Skipping tests until " + singleReplaceQueryTest)
}
dEnv := dtestutils.CreateTestEnv()
CreateEmptyTestDatabase(dEnv, t)
dEnv := CreateEmptyTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
+22 -224
View File
@@ -85,10 +85,16 @@ func LoadedLocalLocation() *time.Location {
// BasicSelectTests cover basic select statement features and error handling
func BasicSelectTests() []SelectTest {
headCommitHash := "73hc2robs4v0kt9taoe3m5hd49dmrgun"
if types.Format_Default == types.Format_DOLT_DEV {
var headCommitHash string
switch types.Format_Default {
case types.Format_DOLT:
headCommitHash = "4ej7hfduufg4o2837g3gc4p5uolrlmv9"
case types.Format_DOLT_DEV:
headCommitHash = "4ej7hfduufg4o2837g3gc4p5uolrlmv9"
case types.Format_LD_1:
headCommitHash = "73hc2robs4v0kt9taoe3m5hd49dmrgun"
}
return []SelectTest{
{
Name: "select * on primary key",
@@ -969,216 +975,6 @@ var AsOfTests = []SelectTest{
},
}
// SQL is supposed to be case insensitive. These are tests of that promise.
// Many of these are currently broken in go-myqsl-server. The queries return the correct results in all cases, but the
// column names in the result schemas often have the wrong case. They sometimes use the case from the table, rather
// than the case of the expression in the query (the correct behavior). This is a minor issue, but we should fix it
// eventually.
var CaseSensitivityTests = []SelectTest{
{
Name: "table name has mixed case, select lower case",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select test from mixedcase",
ExpectedSchema: NewResultSetSchema("test", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
},
{
Name: "table name has mixed case, select upper case",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select test from MIXEDCASE",
ExpectedSchema: NewResultSetSchema("test", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
},
{
Name: "qualified select *",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select mixedcAse.* from MIXEDCASE",
ExpectedSchema: NewResultSetSchema("test", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
},
{
Name: "qualified select column",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select mixedcAse.TeSt from MIXEDCASE",
ExpectedSchema: NewResultSetSchema("TeSt", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true,
},
{
Name: "table alias select *",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select Mc.* from MIXEDCASE as mc",
ExpectedSchema: NewResultSetSchema("test", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
},
{
Name: "table alias select column",
AdditionalSetup: CreateTableWithRowsFn("MiXeDcAsE",
NewSchema("test", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select mC.TeSt from MIXEDCASE as MC",
ExpectedSchema: NewResultSetSchema("TeSt", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true,
},
{
Name: "multiple tables with the same case-insensitive name, exact match",
AdditionalSetup: Compose(
// the table name passed to NewSchemaForTable isn't important, except to get unique tags
CreateTableWithRowsFn("tableName", NewSchemaForTable("tableName1", "test", types.StringKind), []types.Value{types.String("1")}),
CreateTableWithRowsFn("TABLENAME", NewSchemaForTable("TABLENAME2", "test", types.StringKind)),
CreateTableWithRowsFn("tablename", NewSchemaForTable("tablename3", "test", types.StringKind)),
),
Query: "select test from tableName",
ExpectedSchema: NewResultSetSchema("test", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
},
{
Name: "alias with same name as table",
AdditionalSetup: Compose(
CreateTableWithRowsFn("tableName", NewSchema("test", types.StringKind)),
CreateTableWithRowsFn("other", NewSchema("othercol", types.StringKind)),
),
Query: "select other.test from tablename as other, other",
ExpectedErr: "Non-unique table name / alias: 'other'",
},
{
Name: "two table aliases with same name",
AdditionalSetup: Compose(
CreateTableWithRowsFn("tableName", NewSchema("test", types.StringKind)),
CreateTableWithRowsFn("other", NewSchema("othercol", types.StringKind)),
),
Query: "select bad.test from tablename as bad, other as bad",
ExpectedErr: "Non-unique table name / alias: 'bad'",
},
{
Name: "column name has mixed case, select lower case",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema("MiXeDcAsE", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select mixedcase from test",
ExpectedSchema: NewResultSetSchema("mixedcase", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true,
},
{
Name: "column name has mixed case, select upper case",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema("MiXeDcAsE", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select MIXEDCASE from test",
ExpectedSchema: NewResultSetSchema("MIXEDCASE", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true,
},
{
Name: "select with multiple matching columns, exact match",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema("MiXeDcAsE", types.StringKind, "mixedcase", types.StringKind),
[]types.Value{types.String("1"), types.String("2")}),
Query: "select mixedcase from test",
ExpectedSchema: NewResultSetSchema("mixedcase", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true, // TODO: table should be illegal. field names cannot be the same case-insensitive
},
{
Name: "column is reserved word, select not backticked",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema(
"Timestamp", types.StringKind,
"and", types.StringKind,
"or", types.StringKind,
"select", types.StringKind),
[]types.Value{types.String("1"), types.String("1.1"), types.String("aaa"), types.String("create")}),
Query: "select Timestamp from test",
ExpectedRows: []sql.Row{{"1"}},
ExpectedSchema: NewResultSetSchema("Timestamp", types.StringKind),
},
{
Name: "column is reserved word, qualified with table alias",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema(
"Timestamp", types.StringKind,
"and", types.StringKind,
"or", types.StringKind,
"select", types.StringKind),
[]types.Value{types.String("1"), types.String("1.1"), types.String("aaa"), types.String("create")}),
Query: "select t.Timestamp from test as t",
ExpectedRows: []sql.Row{{"1"}},
ExpectedSchema: NewResultSetSchema("Timestamp", types.StringKind),
},
{
Name: "column is reserved word, select not backticked #2",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema("YeAr", types.StringKind),
[]types.Value{types.String("1")}),
Query: "select Year from test",
ExpectedSchema: NewResultSetSchema("Year", types.StringKind),
ExpectedRows: []sql.Row{{"1"}},
SkipOnSqlEngine: true,
},
{
Name: "column is reserved word, select backticked",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema(
"Timestamp", types.StringKind,
"and", types.StringKind,
"or", types.StringKind,
"select", types.StringKind),
[]types.Value{types.String("1"), types.String("1.1"), types.String("aaa"), types.String("create")}),
Query: "select `Timestamp` from test",
ExpectedRows: []sql.Row{{"1"}},
ExpectedSchema: NewResultSetSchema("Timestamp", types.StringKind),
SkipOnSqlEngine: true,
},
{
Name: "column is reserved word, select backticked #2",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema(
"Year", types.StringKind,
"and", types.StringKind,
"or", types.StringKind,
"select", types.StringKind),
[]types.Value{types.String("1"), types.String("1.1"), types.String("aaa"), types.String("create")}),
Query: "select `Year`, `OR`, `SELect`, `anD` from test",
ExpectedSchema: NewResultSetSchema(
"Year", types.StringKind,
"OR", types.StringKind,
"SELect", types.StringKind,
"anD", types.StringKind),
ExpectedRows: []sql.Row{{"1", "aaa", "create", "1.1"}},
SkipOnSqlEngine: true,
},
{
Name: "column is reserved word, table qualified",
AdditionalSetup: CreateTableWithRowsFn("test",
NewSchema(
"Year", types.StringKind,
"and", types.StringKind,
"or", types.StringKind,
"select", types.StringKind),
[]types.Value{types.String("1"), types.String("1.1"), types.String("aaa"), types.String("create")}),
Query: "select Year, t.OR, t.SELect, t.anD from test t",
ExpectedSchema: NewResultSetSchema(
"Year", types.StringKind,
"OR", types.StringKind,
"SELect", types.StringKind,
"anD", types.StringKind),
ExpectedRows: []sql.Row{{"1", "aaa", "create", "1.1"}},
SkipOnSqlEngine: true,
},
}
// Tests of join functionality, basically any query involving more than one table should go here for now.
var JoinTests = []SelectTest{
{
@@ -1452,6 +1248,9 @@ func TestSelect(t *testing.T) {
}
func TestDiffQueries(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip("") // todo: convert to enginetests
}
for _, test := range SelectDiffTests {
t.Run(test.Name, func(t *testing.T) {
testSelectDiffQuery(t, test)
@@ -1460,6 +1259,9 @@ func TestDiffQueries(t *testing.T) {
}
func TestAsOfQueries(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip("") // todo: convert to enginetests
}
for _, test := range AsOfTests {
t.Run(test.Name, func(t *testing.T) {
// AS OF queries use the same history as the diff tests, so exercise the same test setup
@@ -1478,15 +1280,6 @@ func TestJoins(t *testing.T) {
}
}
// Tests of case sensitivity handling
func TestCaseSensitivity(t *testing.T) {
for _, tt := range CaseSensitivityTests {
t.Run(tt.Name, func(t *testing.T) {
testSelectQuery(t, tt)
})
}
}
var systemTableSelectTests = []SelectTest{
{
Name: "select from dolt_docs",
@@ -1545,6 +1338,9 @@ func CreateTestJSON() types.JSON {
}
func TestSelectSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetest
}
for _, test := range systemTableSelectTests {
t.Run(test.Name, func(t *testing.T) {
testSelectQuery(t, test)
@@ -1582,9 +1378,7 @@ func testSelectQuery(t *testing.T, test SelectTest) {
cleanup := installTestCommitClock()
defer cleanup()
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
}
@@ -1627,6 +1421,10 @@ func testSelectQuery(t *testing.T, test SelectTest) {
}
func testSelectDiffQuery(t *testing.T, test SelectTest) {
if types.Format_Default != types.Format_LD_1 {
t.Skip("") // todo: convert to enginetests
}
validateTest(t, test)
ctx := context.Background()
+4 -4
View File
@@ -24,7 +24,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dtables"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/json"
@@ -351,6 +350,9 @@ func TestExecuteUpdate(t *testing.T) {
}
func TestExecuteUpdateSystemTables(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip() // todo: convert to enginetest
}
for _, test := range systemTableUpdateTests {
t.Run(test.Name, func(t *testing.T) {
testUpdateQuery(t, test)
@@ -419,9 +421,7 @@ func testUpdateQuery(t *testing.T, test UpdateTest) {
t.Skip("Skipping tests until " + singleUpdateQueryTest)
}
dEnv := dtestutils.CreateTestEnv()
CreateTestDatabase(dEnv, t)
dEnv := CreateTestDatabase(t)
if test.AdditionalSetup != nil {
test.AdditionalSetup(t, dEnv)
}
@@ -25,44 +25,9 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/table/typed/noms"
"github.com/dolthub/dolt/go/store/types"
)
type mapSqlIter struct {
ctx context.Context
nmr *noms.NomsMapReader
sch schema.Schema
}
var _ sql.RowIter = (*mapSqlIter)(nil)
// Next implements the interface sql.RowIter.
func (m *mapSqlIter) Next(ctx *sql.Context) (sql.Row, error) {
dRow, err := m.nmr.ReadRow(ctx)
if err != nil {
return nil, err
}
return DoltRowToSqlRow(dRow, m.sch)
}
// Close implements the interface sql.RowIter.
func (m *mapSqlIter) Close(ctx *sql.Context) error {
return m.nmr.Close(ctx)
}
// MapToSqlIter returns a map reader that converts all rows to sql rows, creating a sql row iterator.
func MapToSqlIter(ctx context.Context, sch schema.Schema, data types.Map) (sql.RowIter, error) {
mapReader, err := noms.NewNomsMapReader(ctx, data, sch)
if err != nil {
return nil, err
}
return &mapSqlIter{
nmr: mapReader,
sch: sch,
}, nil
}
// DoltRowToSqlRow constructs a go-mysql-server sql.Row from a Dolt row.Row.
func DoltRowToSqlRow(doltRow row.Row, sch schema.Schema) (sql.Row, error) {
if doltRow == nil {
@@ -17,6 +17,7 @@ package sqle
import (
"context"
"fmt"
"io"
"math"
"reflect"
"sort"
@@ -28,11 +29,15 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb/durable"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/index"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/sqlutil"
"github.com/dolthub/dolt/go/store/types"
"github.com/dolthub/dolt/go/store/val"
)
func setupEditorFkTest(t *testing.T) (*env.DoltEnv, *doltdb.RootValue) {
@@ -655,25 +660,22 @@ func assertTableEditorRows(t *testing.T, root *doltdb.RootValue, expected []sql.
sch, err := tbl.GetSchema(context.Background())
require.NoError(t, err)
rowData, err := tbl.GetNomsRowData(context.Background())
rows, err := tbl.GetRowData(context.Background())
require.NoError(t, err)
var sqlRows []sql.Row
if len(expected) > 0 {
_ = rowData.IterAll(context.Background(), func(key, value types.Value) error {
r, err := row.FromNoms(sch, key.(types.Tuple), value.(types.Tuple))
assert.NoError(t, err)
sqlRow, err := sqlutil.DoltRowToSqlRow(r, sch)
assert.NoError(t, err)
sqlRows = append(sqlRows, sqlRow)
return nil
})
assert.Equal(t, convertSqlRowToInt64(expected), sqlRows)
sqlRows = sqlRowsFromDurableIndex(t, rows, sch)
expected = sortInt64Rows(convertSqlRowToInt64(expected))
sqlRows = sortInt64Rows(convertSqlRowToInt64(sqlRows))
if !assert.Equal(t, expected, sqlRows) {
t.Fail()
}
}
// we can verify that each index also has the proper contents
for _, index := range sch.Indexes().AllIndexes() {
indexRowData, err := tbl.GetNomsIndexRowData(context.Background(), index.Name())
indexRowData, err := tbl.GetIndexRowData(context.Background(), index.Name())
require.NoError(t, err)
indexSch := index.Schema()
@@ -702,61 +704,42 @@ func assertTableEditorRows(t *testing.T, root *doltdb.RootValue, expected []sql.
expectedIndexRows[rowIndex] = expectedIndex
}
expectedIndexRows = convertSqlRowToInt64(expectedIndexRows)
sort.Slice(expectedIndexRows, func(leftIndex, rightIndex int) bool {
a := expectedIndexRows[leftIndex]
b := expectedIndexRows[rightIndex]
for i := range a {
aVal, aNotNil := a[i].(int64)
bVal, bNotNil := b[i].(int64)
if !aNotNil {
aVal = math.MaxInt64
}
if !bNotNil {
bVal = math.MaxInt64
}
if aVal < bVal {
return true
}
}
return false
})
expectedIndexRows = sortInt64Rows(expectedIndexRows)
if len(expectedIndexRows) > 0 {
var sqlRows []sql.Row
_ = indexRowData.IterAll(context.Background(), func(key, value types.Value) error {
r, err := row.FromNoms(indexSch, key.(types.Tuple), value.(types.Tuple))
assert.NoError(t, err)
sqlRow, err := sqlutil.DoltRowToSqlRow(r, indexSch)
assert.NoError(t, err)
sqlRows = append(sqlRows, sqlRow)
return nil
})
sqlRows = sqlRowsFromDurableIndex(t, indexRowData, indexSch)
expected = sortInt64Rows(convertSqlRowToInt64(expected))
sqlRows = sortInt64Rows(convertSqlRowToInt64(sqlRows))
if !reflect.DeepEqual(expectedIndexRows, sqlRows) {
sort.Slice(sqlRows, func(leftIndex, rightIndex int) bool {
a := sqlRows[leftIndex]
b := sqlRows[rightIndex]
for i := range a {
aVal, aNotNil := a[i].(int64)
bVal, bNotNil := b[i].(int64)
if !aNotNil {
aVal = math.MaxInt64
}
if !bNotNil {
bVal = math.MaxInt64
}
if aVal < bVal {
return true
}
}
return false
})
sqlRows = sortInt64Rows(sqlRows)
}
assert.Equal(t, expectedIndexRows, sqlRows)
}
}
}
func sortInt64Rows(rows []sql.Row) []sql.Row {
sort.Slice(rows, func(l, r int) bool {
a, b := rows[l], rows[r]
for i := range a {
aa, ok := a[i].(int64)
if !ok {
aa = math.MaxInt64
}
bb, ok := b[i].(int64)
if !ok {
bb = math.MaxInt64
}
if aa < bb {
return true
}
}
return false
})
return rows
}
func setupEditorKeylessFkTest(t *testing.T) (*env.DoltEnv, *doltdb.RootValue) {
dEnv := dtestutils.CreateTestEnv()
root, err := dEnv.WorkingRoot(context.Background())
@@ -900,3 +883,67 @@ ALTER TABLE three ADD FOREIGN KEY (v1, v2) REFERENCES two(v1, v2) ON DELETE CASC
})
}
}
func sqlRowsFromDurableIndex(t *testing.T, idx durable.Index, sch schema.Schema) []sql.Row {
ctx := context.Background()
var sqlRows []sql.Row
if types.Format_Default == types.Format_DOLT {
rowData := durable.ProllyMapFromIndex(idx)
kd, vd := rowData.Descriptors()
iter, err := rowData.IterAll(ctx)
require.NoError(t, err)
for {
var k, v val.Tuple
k, v, err = iter.Next(ctx)
if err == io.EOF {
break
}
require.NoError(t, err)
sqlRow, err := sqlRowFromTuples(sch, kd, vd, k, v)
require.NoError(t, err)
sqlRows = append(sqlRows, sqlRow)
}
} else {
// types.Format_LD_1 and types.Format_DOLT_DEV
rowData := durable.NomsMapFromIndex(idx)
_ = rowData.IterAll(ctx, func(key, value types.Value) error {
r, err := row.FromNoms(sch, key.(types.Tuple), value.(types.Tuple))
assert.NoError(t, err)
sqlRow, err := sqlutil.DoltRowToSqlRow(r, sch)
assert.NoError(t, err)
sqlRows = append(sqlRows, sqlRow)
return nil
})
}
return sqlRows
}
func sqlRowFromTuples(sch schema.Schema, kd, vd val.TupleDesc, k, v val.Tuple) (sql.Row, error) {
var err error
ctx := context.Background()
r := make(sql.Row, sch.GetAllCols().Size())
keyless := schema.IsKeyless(sch)
for i, col := range sch.GetAllCols().GetColumns() {
pos, ok := sch.GetPKCols().TagToIdx[col.Tag]
if ok {
r[i], err = index.GetField(ctx, kd, pos, k, nil)
if err != nil {
return nil, err
}
}
pos, ok = sch.GetNonPKCols().TagToIdx[col.Tag]
if keyless {
pos += 1 // compensate for cardinality field
}
if ok {
r[i], err = index.GetField(ctx, vd, pos, v, nil)
if err != nil {
return nil, err
}
}
}
return r, nil
}
@@ -145,6 +145,10 @@ UPDATE onepk SET pk1 = v1 + pk1 ORDER BY pk1 DESC;
idx_v2v1 := twopkSch.Indexes().GetByName("idx_v2v1")
require.NotNil(t, idx_v2v1)
if types.Format_Default != types.Format_LD_1 {
t.Skip("need a prolly sql row iter")
}
idx_v1RowData, err := onepk.GetNomsIndexRowData(context.Background(), idx_v1.Name())
require.NoError(t, err)
idx_v2v1RowData, err := twopk.GetNomsIndexRowData(context.Background(), idx_v2v1.Name())
@@ -310,6 +314,10 @@ UPDATE oneuni SET v1 = v1 + pk1;
idx_v1v2 := twouniSch.Indexes().GetByName("idx_v1v2")
require.NotNil(t, idx_v1v2)
if types.Format_Default != types.Format_LD_1 {
t.Skip("need a prolly sql row iter")
}
idx_v1RowData, err := oneuni.GetNomsIndexRowData(context.Background(), idx_v1.Name())
require.NoError(t, err)
idx_v1v2RowData, err := twouni.GetNomsIndexRowData(context.Background(), idx_v1v2.Name())
-14
View File
@@ -353,20 +353,6 @@ func GetAllRows(root *doltdb.RootValue, tableName string) ([]row.Row, error) {
return rows, nil
}
// Creates a test database with the test data set in it
func CreateTestDatabase(dEnv *env.DoltEnv, t *testing.T) {
dtestutils.CreateTestTable(t, dEnv, PeopleTableName, PeopleTestSchema, AllPeopleRows...)
dtestutils.CreateTestTable(t, dEnv, EpisodesTableName, EpisodesTestSchema, AllEpsRows...)
dtestutils.CreateTestTable(t, dEnv, AppearancesTableName, AppearancesTestSchema, AllAppsRows...)
}
// Creates a test database without any data in it
func CreateEmptyTestDatabase(dEnv *env.DoltEnv, t *testing.T) {
dtestutils.CreateTestTable(t, dEnv, PeopleTableName, PeopleTestSchema)
dtestutils.CreateTestTable(t, dEnv, EpisodesTableName, EpisodesTestSchema)
dtestutils.CreateTestTable(t, dEnv, AppearancesTableName, AppearancesTestSchema)
}
var idColTag0TypeUUID = schema.NewColumn("id", 0, types.IntKind, true)
var firstColTag1TypeStr = schema.NewColumn("first_name", 1, types.StringKind, false)
var lastColTag2TypeStr = schema.NewColumn("last_name", 2, types.StringKind, false)
+77 -11
View File
@@ -29,6 +29,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
@@ -176,10 +177,10 @@ func getDbState(t *testing.T, db sql.Database, dEnv *env.DoltEnv) dsess.InitialD
}
// ExecuteSelect executes the select statement given and returns the resulting rows, or an error if one is encountered.
func ExecuteSelect(t *testing.T, dEnv *env.DoltEnv, ddb *doltdb.DoltDB, root *doltdb.RootValue, query string) ([]sql.Row, error) {
func ExecuteSelect(t *testing.T, dEnv *env.DoltEnv, root *doltdb.RootValue, query string) ([]sql.Row, error) {
dbData := env.DbData{
Ddb: ddb,
Ddb: dEnv.DoltDB,
Rsw: dEnv.RepoStateWriter(),
Rsr: dEnv.RepoStateReader(),
}
@@ -279,15 +280,6 @@ func CompressSchemas(schs ...schema.Schema) schema.Schema {
return schema.UnkeyedSchemaFromCols(colCol)
}
// Compresses each of the rows given ala compressRow
func CompressRows(sch schema.Schema, rs ...row.Row) []row.Row {
compressed := make([]row.Row, len(rs))
for i := range rs {
compressed[i] = CompressRow(sch, rs[i])
}
return compressed
}
// Rewrites the tag numbers for the row given to begin at zero and be contiguous, just like result set schemas. We don't
// want to just use the field mappings in the result set schema used by sqlselect, since that would only demonstrate
// that the code was consistent with itself, not actually correct.
@@ -360,3 +352,77 @@ func drainIter(ctx *sql.Context, iter sql.RowIter) error {
}
return iter.Close(ctx)
}
func CreateEnvWithSeedData(t *testing.T) *env.DoltEnv {
const seedData = `
CREATE TABLE people (
id varchar(36) primary key,
name varchar(40) not null,
age int unsigned,
is_married int,
title varchar(40),
INDEX idx_name (name)
);
INSERT INTO people VALUES
('00000000-0000-0000-0000-000000000000', 'Bill Billerson', 32, 1, 'Senior Dufus'),
('00000000-0000-0000-0000-000000000001', 'John Johnson', 25, 0, 'Dufus'),
('00000000-0000-0000-0000-000000000002', 'Rob Robertson', 21, 0, '');`
ctx := context.Background()
dEnv := dtestutils.CreateTestEnv()
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
root, err = ExecuteSql(t, dEnv, root, seedData)
require.NoError(t, err)
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
return dEnv
}
// CreateEmptyTestDatabase creates a test database without any data in it.
func CreateEmptyTestDatabase(t *testing.T) *env.DoltEnv {
dEnv := dtestutils.CreateTestEnv()
dtestutils.CreateEmptyTestTable(t, dEnv, PeopleTableName, PeopleTestSchema)
dtestutils.CreateEmptyTestTable(t, dEnv, EpisodesTableName, EpisodesTestSchema)
dtestutils.CreateEmptyTestTable(t, dEnv, AppearancesTableName, AppearancesTestSchema)
return dEnv
}
// CreateTestDatabase creates a test database with the test data set in it.
func CreateTestDatabase(t *testing.T) *env.DoltEnv {
ctx := context.Background()
dEnv := CreateEmptyTestDatabase(t)
const simpsonsRowData = `
INSERT INTO people VALUES
(0, "Homer", "Simpson", 1, 40, 8.5, NULL, NULL),
(1, "Marge", "Simpson", 1, 38, 8, "00000000-0000-0000-0000-000000000001", 111),
(2, "Bart", "Simpson", 0, 10, 9, "00000000-0000-0000-0000-000000000002", 222),
(3, "Lisa", "Simpson", 0, 8, 10, "00000000-0000-0000-0000-000000000003", 333),
(4, "Moe", "Szyslak", 0, 48, 6.5, "00000000-0000-0000-0000-000000000004", 444),
(5, "Barney", "Gumble", 0, 40, 4, "00000000-0000-0000-0000-000000000005", 555);
INSERT INTO episodes VALUES
(1, "Simpsons Roasting On an Open Fire", "1989-12-18 03:00:00", 8.0),
(2, "Bart the Genius", "1990-01-15 03:00:00", 9.0),
(3, "Homer's Odyssey", "1990-01-22 03:00:00", 7.0),
(4, "There's No Disgrace Like Home", "1990-01-29 03:00:00", 8.5);
INSERT INTO appearances VALUES
(0, 1, "Homer is great in this one"),
(1, 1, "Marge is here too"),
(0, 2, "Homer is great in this one too"),
(2, 2, "This episode is named after Bart"),
(3, 2, "Lisa is here too"),
(4, 2, "I think there's a prank call scene"),
(0, 3, "Homer is in every episode"),
(1, 3, "Marge shows up a lot too"),
(3, 3, "Lisa is the best Simpson"),
(5, 3, "I'm making this all up");`
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
root, err = ExecuteSql(t, dEnv, root, simpsonsRowData)
require.NoError(t, err)
err = dEnv.UpdateWorkingRoot(ctx, root)
require.NoError(t, err)
return dEnv
}
@@ -22,7 +22,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/dtestutils"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
@@ -31,6 +30,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/sqlutil"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/writer"
"github.com/dolthub/dolt/go/libraries/doltcore/table/editor"
"github.com/dolthub/dolt/go/store/types"
)
type tableEditorTest struct {
@@ -47,6 +47,10 @@ type tableEditorTest struct {
}
func TestTableEditor(t *testing.T) {
if types.Format_Default != types.Format_LD_1 {
t.Skip()
}
edna := sqle.NewPeopleRow(10, "Edna", "Krabapple", false, 38, 8.0)
krusty := sqle.NewPeopleRow(11, "Krusty", "Klown", false, 48, 9.5)
smithers := sqle.NewPeopleRow(12, "Waylon", "Smithers", false, 44, 7.1)
@@ -157,9 +161,7 @@ func TestTableEditor(t *testing.T) {
t.Run(test.name, func(t *testing.T) {
expectedErr = nil
dEnv := dtestutils.CreateTestEnv()
sqle.CreateTestDatabase(dEnv, t)
dEnv := sqle.CreateTestDatabase(t)
ctx := sqle.NewTestSQLCtx(context.Background())
root, _ := dEnv.WorkingRoot(context.Background())
tmpDir, err := dEnv.TempTableFilesDir()
@@ -194,7 +196,7 @@ func TestTableEditor(t *testing.T) {
require.NoError(t, dEnv.UpdateWorkingRoot(context.Background(), root))
actualRows, err := sqle.ExecuteSelect(t, dEnv, dEnv.DoltDB, root, test.selectQuery)
actualRows, err := sqle.ExecuteSelect(t, dEnv, root, test.selectQuery)
require.NoError(t, err)
assert.Equal(t, test.expectedRows, actualRows)
@@ -52,8 +52,9 @@ var id2, _ = uuid.NewRandom()
var id3, _ = uuid.NewRandom()
func TestIndexEditorConcurrency(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -147,8 +148,9 @@ func TestIndexEditorConcurrency(t *testing.T) {
}
func TestIndexEditorConcurrencyPostInsert(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -239,8 +241,9 @@ func TestIndexEditorConcurrencyPostInsert(t *testing.T) {
}
func TestIndexEditorUniqueMultipleNil(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -284,8 +287,9 @@ func TestIndexEditorUniqueMultipleNil(t *testing.T) {
}
func TestIndexEditorWriteAfterFlush(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -352,8 +356,9 @@ func TestIndexEditorWriteAfterFlush(t *testing.T) {
}
func TestIndexEditorUniqueErrorDoesntPersist(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -395,7 +400,8 @@ func TestIndexEditorUniqueErrorDoesntPersist(t *testing.T) {
}
func TestIndexRebuildingWithZeroIndexes(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
tSchema := createTestSchema(t)
_, err := tSchema.Indexes().RemoveIndex(testSchemaIndexName)
require.NoError(t, err)
@@ -417,7 +423,8 @@ func TestIndexRebuildingWithZeroIndexes(t *testing.T) {
}
func TestIndexRebuildingWithOneIndex(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
tSchema := createTestSchema(t)
_, err := tSchema.Indexes().RemoveIndex(testSchemaIndexAge)
require.NoError(t, err)
@@ -434,7 +441,7 @@ func TestIndexRebuildingWithOneIndex(t *testing.T) {
require.True(t, ok)
indexKey[tag] = val
}
indexExpectedRows[i], err = row.New(types.Format_Default, indexSch, indexKey)
indexExpectedRows[i], err = row.New(types.Format_LD_1, indexSch, indexKey)
require.NoError(t, err)
}
@@ -469,7 +476,8 @@ func TestIndexRebuildingWithOneIndex(t *testing.T) {
}
func TestIndexRebuildingWithTwoIndexes(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
tSchema := createTestSchema(t)
indexName := tSchema.Indexes().GetByName(testSchemaIndexName)
@@ -593,7 +601,8 @@ func TestIndexRebuildingWithTwoIndexes(t *testing.T) {
}
func TestIndexRebuildingUniqueSuccessOneCol(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
@@ -623,7 +632,8 @@ func TestIndexRebuildingUniqueSuccessOneCol(t *testing.T) {
}
func TestIndexRebuildingUniqueSuccessTwoCol(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
@@ -653,7 +663,8 @@ func TestIndexRebuildingUniqueSuccessTwoCol(t *testing.T) {
}
func TestIndexRebuildingUniqueFailOneCol(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
@@ -683,7 +694,8 @@ func TestIndexRebuildingUniqueFailOneCol(t *testing.T) {
}
func TestIndexRebuildingUniqueFailTwoCol(t *testing.T) {
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_Default, nil, nil)
_, vrw, ns, _ := dbfactory.MemFactory{}.CreateDB(context.Background(), types.Format_LD_1, nil, nil)
require.Equal(t, types.Format_LD_1, vrw.Format())
colColl := schema.NewColCollection(
schema.NewColumn("pk1", 1, types.IntKind, true, schema.NotNullConstraint{}),
schema.NewColumn("v1", 2, types.IntKind, false),
@@ -717,8 +729,9 @@ func TestIndexRebuildingUniqueFailTwoCol(t *testing.T) {
func TestIndexEditorCapacityExceeded(t *testing.T) {
// In the event that we reach the iea capacity on Undo, we need to verify that all code paths fail and remain failing
ctx := context.Background()
format := types.Format_Default
format := types.Format_LD_1
_, vrw, _, err := dbfactory.MemFactory{}.CreateDB(ctx, format, nil, nil)
require.Equal(t, format, vrw.Format())
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
@@ -801,7 +814,7 @@ func createTestRowDataFromTaggedValues(t *testing.T, vrw types.ValueReadWriter,
ed := m.Edit()
for i, val := range vals {
r, err := row.New(types.Format_Default, sch, val)
r, err := row.New(types.Format_LD_1, sch, val)
require.NoError(t, err)
rows[i] = r
ed = ed.Set(r.NomsMapKey(sch), r.NomsMapValue(sch))
@@ -848,7 +861,7 @@ func rowsToIndexRows(t *testing.T, rows []row.Row, indexName schema.Index, index
require.True(t, ok)
indexNameKey[tag] = val
}
indexNameExpectedRows[i], err = row.New(types.Format_Default, indexNameSch, indexNameKey)
indexNameExpectedRows[i], err = row.New(types.Format_LD_1, indexNameSch, indexNameKey)
require.NoError(t, err)
indexAgeKey := make(row.TaggedValues)
@@ -857,7 +870,7 @@ func rowsToIndexRows(t *testing.T, rows []row.Row, indexName schema.Index, index
require.True(t, ok)
indexAgeKey[tag] = val
}
indexAgeExpectedRows[i], err = row.New(types.Format_Default, indexAgeSch, indexAgeKey)
indexAgeExpectedRows[i], err = row.New(types.Format_LD_1, indexAgeSch, indexAgeKey)
require.NoError(t, err)
}
return
@@ -30,7 +30,7 @@ import (
)
func TestKeylessTableEditorConcurrency(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl := schema.NewColCollection(
@@ -138,7 +138,7 @@ func TestKeylessTableEditorConcurrency(t *testing.T) {
}
func TestKeylessTableEditorConcurrencyPostInsert(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl := schema.NewColCollection(
@@ -245,7 +245,7 @@ func TestKeylessTableEditorConcurrencyPostInsert(t *testing.T) {
}
func TestKeylessTableEditorWriteAfterFlush(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl := schema.NewColCollection(
@@ -326,7 +326,7 @@ func TestKeylessTableEditorWriteAfterFlush(t *testing.T) {
}
func TestKeylessTableEditorDuplicateKeyHandling(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl := schema.NewColCollection(
@@ -416,7 +416,7 @@ func TestKeylessTableEditorDuplicateKeyHandling(t *testing.T) {
func TestKeylessTableEditorMultipleIndexErrorHandling(t *testing.T) {
ctx := context.Background()
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(ctx, format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -573,7 +573,7 @@ func TestKeylessTableEditorMultipleIndexErrorHandling(t *testing.T) {
func TestKeylessTableEditorIndexCardinality(t *testing.T) {
ctx := context.Background()
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(ctx, format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -41,7 +41,7 @@ const (
)
func TestTableEditorConcurrency(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -136,7 +136,7 @@ func TestTableEditorConcurrency(t *testing.T) {
}
func TestTableEditorConcurrencyPostInsert(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -229,7 +229,7 @@ func TestTableEditorConcurrencyPostInsert(t *testing.T) {
}
func TestTableEditorWriteAfterFlush(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -303,7 +303,7 @@ func handleDuplicateKeyError(newKeyString, indexName string, existingKey, existi
}
func TestTableEditorDuplicateKeyHandling(t *testing.T) {
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -382,7 +382,7 @@ func TestTableEditorDuplicateKeyHandling(t *testing.T) {
func TestTableEditorMultipleIndexErrorHandling(t *testing.T) {
ctx := context.Background()
format := types.Format_Default
format := types.Format_LD_1
_, vrw, ns, err := dbfactory.MemFactory{}.CreateDB(ctx, format, nil, nil)
require.NoError(t, err)
opts := TestEditorOptions(vrw)
@@ -25,7 +25,7 @@ import (
"github.com/dolthub/dolt/go/store/types"
)
var emptyTpl = types.EmptyTuple(types.Format_Default)
var emptyTpl = types.EmptyTuple(types.Format_LD_1)
func newTestTEAF() *dbEaFactory {
dir := os.TempDir()
@@ -37,28 +37,28 @@ func newTestTEAF() *dbEaFactory {
}
func newTuple(t *testing.T, vals ...types.Value) types.Tuple {
tpl, err := types.NewTuple(types.Format_Default, vals...)
tpl, err := types.NewTuple(types.Format_LD_1, vals...)
require.NoError(t, err)
return tpl
}
func teaInsert(t *testing.T, tea TableEditAccumulator, key types.Tuple) {
h, err := key.Hash(types.Format_Default)
h, err := key.Hash(types.Format_LD_1)
require.NoError(t, err)
tea.Insert(h, key, emptyTpl)
}
func teaDelete(t *testing.T, tea TableEditAccumulator, key types.Tuple) {
h, err := key.Hash(types.Format_Default)
h, err := key.Hash(types.Format_LD_1)
require.NoError(t, err)
tea.Delete(h, key)
}
func requireGet(ctx context.Context, t *testing.T, tea TableEditAccumulator, key types.Tuple, expected bool) {
h, err := key.Hash(types.Format_Default)
h, err := key.Hash(types.Format_LD_1)
require.NoError(t, err)
_, has, err := tea.Get(ctx, h, key)
require.NoError(t, err)
@@ -73,7 +73,7 @@ func TestIndexEditAccumulatorStableOrder(t *testing.T) {
indexFlushThreshold = 1
ctx := context.Background()
nbf := types.Format_Default
nbf := types.Format_LD_1
teaf := newTestTEAF()
m, err := types.NewMap(ctx, teaf.vrw)
require.NoError(t, err)
@@ -117,7 +117,7 @@ func TestTableEditAccumulatorStableOrder(t *testing.T) {
flushThreshold = 2
ctx := context.Background()
nbf := types.Format_Default
nbf := types.Format_LD_1
teaf := newTestTEAF()
m, err := types.NewMap(ctx, teaf.vrw)
require.NoError(t, err)
@@ -151,7 +151,7 @@ func TestTableEditAccumulatorStableOrder(t *testing.T) {
func TestGet(t *testing.T) {
ctx := context.Background()
nbf := types.Format_Default
nbf := types.Format_LD_1
teaf := newTestTEAF()
m, err := types.NewMap(ctx, teaf.vrw)
require.NoError(t, err)
-20
View File
@@ -19,29 +19,9 @@ import (
"errors"
"io"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/store/types"
)
// GetRow returns a row from |tbl| corresponding to |key| if it exists.
func GetRow(ctx context.Context, tbl *doltdb.Table, sch schema.Schema, key types.Tuple) (r row.Row, ok bool, err error) {
rowMap, err := tbl.GetNomsRowData(ctx)
if err != nil {
return nil, false, err
}
var fields types.Value
fields, ok, err = rowMap.MaybeGet(ctx, key)
if err != nil || !ok {
return nil, ok, err
}
r, err = row.FromNoms(sch, key, fields.(types.Tuple))
return
}
// PipeRows will read a row from given TableReader and write it to the provided RowWriter. It will do this
// for every row until the TableReader's ReadRow method returns io.EOF or encounters an error in either reading
// or writing. The caller will need to handle closing the tables as necessary. If contOnBadRow is true, errors reading
@@ -29,7 +29,6 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/row"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/sqlfmt"
"github.com/dolthub/dolt/go/store/types"
)
type StringBuilderCloser struct {
@@ -90,16 +89,17 @@ func TestEndToEnd(t *testing.T) {
root, err := dEnv.WorkingRoot(ctx)
require.NoError(t, err)
empty, err := types.NewMap(ctx, root.VRW())
empty, err := durable.NewEmptyIndex(ctx, root.VRW(), root.NodeStore(), tt.sch)
require.NoError(t, err)
indexes, err := durable.NewIndexSet(ctx, root.VRW(), root.NodeStore())
require.NoError(t, err)
indexes, err = indexes.PutNomsIndex(ctx, dtestutils.IndexName, empty)
indexes, err = indexes.PutIndex(ctx, dtestutils.IndexName, empty)
require.NoError(t, err)
tbl, err := doltdb.NewNomsTable(ctx, root.VRW(), root.NodeStore(), tt.sch, empty, indexes, nil)
tbl, err := doltdb.NewTable(ctx, root.VRW(), root.NodeStore(), tt.sch, empty, indexes, nil)
require.NoError(t, err)
root, err = root.PutTable(ctx, tableName, tbl)
require.NoError(t, err)
+5 -3
View File
@@ -25,6 +25,8 @@ import (
"syscall"
"time"
"github.com/dolthub/dolt/go/store/types"
"golang.org/x/sync/errgroup"
)
@@ -51,7 +53,7 @@ func BenchmarkDolt(ctx context.Context, config *Config, serverConfig *ServerConf
return nil, err
}
testRepo, err := initDoltRepo(ctx, serverConfig, config.InitBigRepo, config.NomsBinFormat)
testRepo, err := initDoltRepo(ctx, serverConfig, config.NomsBinFormat)
if err != nil {
return nil, err
}
@@ -134,14 +136,14 @@ func DoltVersion(ctx context.Context, serverExec string) error {
}
// initDoltRepo initializes a dolt repo and returns the repo path
func initDoltRepo(ctx context.Context, config *ServerConfig, initBigRepo bool, nbf string) (string, error) {
func initDoltRepo(ctx context.Context, config *ServerConfig, nbf string) (string, error) {
cwd, err := os.Getwd()
if err != nil {
return "", err
}
testRepo := filepath.Join(cwd, dbName)
if initBigRepo {
if nbf == types.Format_LD_1.VersionString() {
err := ExecCommand(ctx, config.ServerExec, "clone", bigEmptyRepo, dbName).Run()
if err != nil {
return "", err
+11 -3
View File
@@ -61,11 +61,19 @@ func (s *nomsDsTestSuite) TestNomsDs() {
cs, err := nbs.NewLocalStore(context.Background(), types.Format_Default.VersionString(), dir, clienttest.DefaultMemTableSize, nbs.NewUnlimitedMemQuotaProvider())
golden1 := "oetp3jigkp5pid2f5c4mknpo17mso31b"
golden2 := "tsbj1qq88llk3k8qqqb5n3188sbpiu7r"
if types.Format_Default == types.Format_DOLT_DEV {
var golden1, golden2 string
switch types.Format_Default {
case types.Format_DOLT:
golden1 = "c7g244286kom2a1326kkgs85pi97cjs7"
golden2 = "rn7dsl1146qr2n4chtg41n24n0jqgnte"
case types.Format_LD_1:
golden1 = "oetp3jigkp5pid2f5c4mknpo17mso31b"
golden2 = "tsbj1qq88llk3k8qqqb5n3188sbpiu7r"
case types.Format_DOLT_DEV:
golden1 = "c7g244286kom2a1326kkgs85pi97cjs7"
golden2 = "rn7dsl1146qr2n4chtg41n24n0jqgnte"
default:
s.Fail("no golden values exist for NBF %s", types.Format_Default.VersionString())
}
s.NoError(err)
+11 -3
View File
@@ -52,11 +52,19 @@ func (s *nomsRootTestSuite) TestBasic() {
dbSpecStr := spec.CreateDatabaseSpecString("nbs", s.DBDir)
db := ds.Database()
goldenHello := "u8g2r4qg97kkqn42lvao77st2mv3bpl0\n"
goldenGoodbye := "70b9adi6amrab3a5t4hcibdob0cq49m0\n"
if types.Format_Default == types.Format_DOLT_DEV {
var goldenHello, goldenGoodbye string
switch types.Format_Default {
case types.Format_DOLT:
goldenHello = "sf173aaa57qjoakme0iufkg4c17beoqe\n"
goldenGoodbye = "gjcehnn4v0sbtt1hste082hfv1kg0hqv\n"
case types.Format_LD_1:
goldenHello = "u8g2r4qg97kkqn42lvao77st2mv3bpl0\n"
goldenGoodbye = "70b9adi6amrab3a5t4hcibdob0cq49m0\n"
case types.Format_DOLT_DEV:
goldenHello = "sf173aaa57qjoakme0iufkg4c17beoqe\n"
goldenGoodbye = "gjcehnn4v0sbtt1hste082hfv1kg0hqv\n"
default:
s.Fail("no golden values exist for NBF %s", types.Format_Default.VersionString())
}
ds, _ = datas.CommitValue(context.Background(), db, ds, types.String("hello!"))
+1 -1
View File
@@ -73,7 +73,7 @@ func (s *nomsShowTestSuite) writeTestData(str string, value types.Value) types.R
}
func (s *nomsShowTestSuite) TestNomsShow() {
if types.Format_Default == types.Format_DOLT_DEV {
if types.Format_Default != types.Format_LD_1 {
s.T().Skip()
}
datasetName := "dsTest"
+1 -1
View File
@@ -50,4 +50,4 @@ const FormatDoltString = "__DOLT__"
// data maps.
const FormatDoltDevString = "__DOLT_DEV__"
var FormatDefaultString = FormatLD1String
var FormatDefaultString = FormatDoltString
+3 -1
View File
@@ -64,7 +64,9 @@ assert_feature_version() {
}
skip_nbf_dolt() {
if [ "$DOLT_DEFAULT_BIN_FORMAT" = "__DOLT__" ]; then
if [ ! "$DOLT_DEFAULT_BIN_FORMAT" = "__LD_1__" ] &&
[ ! "$DOLT_DEFAULT_BIN_FORMAT" = "__DOLT_DEV__" ];
then
skip "skipping test for nomsBinFormat __DOLT__"
fi
}
+2 -1
View File
@@ -257,7 +257,7 @@ teardown() {
# Default format is OLD (__LD_1__) when DOLT_DEFAULT_BIN_FORMAT is undefined
if [ "$DOLT_DEFAULT_BIN_FORMAT" = "" ]
then
orig_bin_format="__LD_1__"
orig_bin_format="__DOLT__"
else
orig_bin_format=$DOLT_DEFAULT_BIN_FORMAT
fi
@@ -281,6 +281,7 @@ teardown() {
[[ $output =~ "test" ]] || false
cd test
dolt version
run dolt version
[ "$status" -eq 0 ]
[[ "$output" =~ "$orig_bin_format" ]] || false
+2 -2
View File
@@ -2,11 +2,11 @@
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
skip_nbf_dolt
skip_nbf_dolt_dev
TARGET_NBF="__DOLT__"
setup_common
setup_no_dolt_init
dolt init --old-format
}
teardown() {
@@ -130,9 +130,8 @@ cluster:
server_query_with_port "${SERVERONE_MYSQL_PORT}" repo1 1 dolt "" "call dolt_assume_cluster_role('standby', '10');" "status\n0"
# same role, new epoch
server_query_with_port "${SERVERONE_MYSQL_PORT}" repo1 1 dolt "" "call dolt_assume_cluster_role('standby', '12'); select @@GLOBAL.dolt_cluster_role, @@GLOBAL.dolt_cluster_role_epoch;" "status\n0;@@GLOBAL.dolt_cluster_role,@@GLOBAL.dolt_cluster_role_epoch\nstandby,12"
# new role, new epoch
# new role, new epoch (this can drop the connection, so check the results in a new connection)
run server_query_with_port "${SERVERONE_MYSQL_PORT}" repo1 1 dolt "" "call dolt_assume_cluster_role('primary', '13');" "" 1
# we assert on a new connection, since the server may have killed the old one on the transition.
server_query_with_port "${SERVERONE_MYSQL_PORT}" repo1 1 dolt "" "select @@GLOBAL.dolt_cluster_role, @@GLOBAL.dolt_cluster_role_epoch;" "@@GLOBAL.dolt_cluster_role,@@GLOBAL.dolt_cluster_role_epoch\nprimary,13"
# Server comes back up with latest assumed role.