Merge remote-tracking branch 'origin/main' into andy/locking-journal-manifest

This commit is contained in:
Andy Arthur
2023-03-27 09:21:41 -07:00
21 changed files with 1656 additions and 86 deletions

View File

@@ -455,6 +455,7 @@ func getConfigFromServerConfig(serverConfig ServerConfig) (server.Config, error,
serverConf.TLSConfig = tlsConfig
serverConf.RequireSecureTransport = serverConfig.RequireSecureTransport()
serverConf.MaxLoggedQueryLen = serverConfig.MaxLoggedQueryLen()
serverConf.EncodeLoggedQuery = serverConfig.ShouldEncodeLoggedQuery()
return serverConf, nil, nil
}

View File

@@ -60,6 +60,7 @@ const (
defaultAllowCleartextPasswords = false
defaultUnixSocketFilePath = "/tmp/mysql.sock"
defaultMaxLoggedQueryLen = 0
defaultEncodeLoggedQuery = false
)
const (
@@ -129,6 +130,10 @@ type ServerConfig interface {
// If this value is 0 then the query is not truncated and will be written to the logs in its entirety. If the value
// is less than 0 then the queries will be omitted from the logs completely
MaxLoggedQueryLen() int
// ShouldEncodeLoggedQuery determines if logged queries are base64 encoded.
// If true, queries will be logged as base64 encoded strings.
// If false (default behavior), queries will be logged as strings, but newlines and tabs will be replaced with spaces.
ShouldEncodeLoggedQuery() bool
// PersistenceBehavior is "load" if we include persisted system globals on server init
PersistenceBehavior() string
// DisableClientMultiStatements is true if we want the server to not
@@ -186,6 +191,7 @@ type commandLineServerConfig struct {
tlsCert string
requireSecureTransport bool
maxLoggedQueryLen int
shouldEncodeLoggedQuery bool
persistenceBehavior string
privilegeFilePath string
branchControlFilePath string
@@ -279,6 +285,13 @@ func (cfg *commandLineServerConfig) MaxLoggedQueryLen() int {
return cfg.maxLoggedQueryLen
}
// ShouldEncodeLoggedQuery determines if logged queries are base64 encoded.
// If true, queries will be logged as base64 encoded strings.
// If false (default behavior), queries will be logged as strings, but newlines and tabs will be replaced with spaces.
func (cfg *commandLineServerConfig) ShouldEncodeLoggedQuery() bool {
return cfg.shouldEncodeLoggedQuery
}
// DisableClientMultiStatements is true if we want the server to not
// process incoming ComQuery packets as if they had multiple queries in
// them, even if the client advertises support for MULTI_STATEMENTS.

View File

@@ -100,7 +100,7 @@ type ListenerYAMLConfig struct {
// AllowCleartextPasswords enables use of cleartext passwords.
AllowCleartextPasswords *bool `yaml:"allow_cleartext_passwords"`
// Socket is unix socket file path
Socket *string `yaml:"socket"`
Socket *string `yaml:"socket,omitempty"`
}
// PerformanceYAMLConfig contains configuration parameters for performance tweaking
@@ -129,23 +129,24 @@ type UserSessionVars struct {
// YAMLConfig is a ServerConfig implementation which is read from a yaml file
type YAMLConfig struct {
LogLevelStr *string `yaml:"log_level"`
MaxQueryLenInLogs *int `yaml:"max_logged_query_len"`
LogLevelStr *string `yaml:"log_level,omitempty"`
MaxQueryLenInLogs *int `yaml:"max_logged_query_len,omitempty"`
EncodeLoggedQuery *bool `yaml:"encode_logged_query,omitempty"`
BehaviorConfig BehaviorYAMLConfig `yaml:"behavior"`
UserConfig UserYAMLConfig `yaml:"user"`
ListenerConfig ListenerYAMLConfig `yaml:"listener"`
DatabaseConfig []DatabaseYAMLConfig `yaml:"databases"`
PerformanceConfig PerformanceYAMLConfig `yaml:"performance"`
DataDirStr *string `yaml:"data_dir"`
CfgDirStr *string `yaml:"cfg_dir"`
DataDirStr *string `yaml:"data_dir,omitempty"`
CfgDirStr *string `yaml:"cfg_dir,omitempty"`
MetricsConfig MetricsYAMLConfig `yaml:"metrics"`
RemotesapiConfig RemotesapiYAMLConfig `yaml:"remotesapi"`
ClusterCfg *ClusterYAMLConfig `yaml:"cluster"`
PrivilegeFile *string `yaml:"privilege_file"`
BranchControlFile *string `yaml:"branch_control_file"`
ClusterCfg *ClusterYAMLConfig `yaml:"cluster,omitempty"`
PrivilegeFile *string `yaml:"privilege_file,omitempty"`
BranchControlFile *string `yaml:"branch_control_file,omitempty"`
Vars []UserSessionVars `yaml:"user_session_vars"`
Jwks []engine.JwksConfig `yaml:"jwks"`
GoldenMysqlConn *string `yaml:"golden_mysql_conn"`
GoldenMysqlConn *string `yaml:"golden_mysql_conn,omitempty"`
}
var _ ServerConfig = YAMLConfig{}
@@ -442,6 +443,14 @@ func (cfg YAMLConfig) MaxLoggedQueryLen() int {
return *cfg.MaxQueryLenInLogs
}
func (cfg YAMLConfig) ShouldEncodeLoggedQuery() bool {
if cfg.EncodeLoggedQuery == nil {
return defaultEncodeLoggedQuery
}
return *cfg.EncodeLoggedQuery
}
// PersistenceBehavior is "load" if we include persisted system globals on server init
func (cfg YAMLConfig) PersistenceBehavior() string {
if cfg.BehaviorConfig.PersistenceBehavior == nil {

View File

@@ -57,7 +57,7 @@ import (
)
const (
Version = "0.75.6"
Version = "0.75.7"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}

View File

@@ -58,7 +58,7 @@ require (
github.com/cenkalti/backoff/v4 v4.1.3
github.com/cespare/xxhash v1.1.0
github.com/creasty/defaults v1.6.0
github.com/dolthub/go-mysql-server v0.14.1-0.20230321172208-6b3260e96b9c
github.com/dolthub/go-mysql-server v0.14.1-0.20230323180110-e8b040614c18
github.com/google/flatbuffers v2.0.6+incompatible
github.com/jmoiron/sqlx v1.3.4
github.com/kch42/buzhash v0.0.0-20160816060738-9bdec3dec7c6

View File

@@ -166,8 +166,8 @@ github.com/dolthub/flatbuffers v1.13.0-dh.1 h1:OWJdaPep22N52O/0xsUevxJ6Qfw1M2txC
github.com/dolthub/flatbuffers v1.13.0-dh.1/go.mod h1:CorYGaDmXjHz1Z7i50PYXG1Ricn31GcA2wNOTFIQAKE=
github.com/dolthub/fslock v0.0.3 h1:iLMpUIvJKMKm92+N1fmHVdxJP5NdyDK5bK7z7Ba2s2U=
github.com/dolthub/fslock v0.0.3/go.mod h1:QWql+P17oAAMLnL4HGB5tiovtDuAjdDTPbuqx7bYfa0=
github.com/dolthub/go-mysql-server v0.14.1-0.20230321172208-6b3260e96b9c h1:OT8c7iUrWTG3LI2qGJEU+nNzkcDWPGk4bqgvgryDKaE=
github.com/dolthub/go-mysql-server v0.14.1-0.20230321172208-6b3260e96b9c/go.mod h1:Mo0dPxaaVFWQoxLRBH7UXKO2H6yHXq3dRmq4/vvARbI=
github.com/dolthub/go-mysql-server v0.14.1-0.20230323180110-e8b040614c18 h1:ENKqy8+GxNrzuZx0GM7p/9oKsmz8A8JxMaMWpL9YVbM=
github.com/dolthub/go-mysql-server v0.14.1-0.20230323180110-e8b040614c18/go.mod h1:Mo0dPxaaVFWQoxLRBH7UXKO2H6yHXq3dRmq4/vvARbI=
github.com/dolthub/ishell v0.0.0-20221214210346-d7db0b066488 h1:0HHu0GWJH0N6a6keStrHhUAK5/o9LVfkh44pvsV4514=
github.com/dolthub/ishell v0.0.0-20221214210346-d7db0b066488/go.mod h1:ehexgi1mPxRTk0Mok/pADALuHbvATulTh6gzr7NzZto=
github.com/dolthub/jsonpath v0.0.0-20210609232853-d49537a30474 h1:xTrR+l5l+1Lfq0NvhiEsctylXinUMFhhsqaEcl414p8=

View File

@@ -268,6 +268,9 @@ const (
// DiffTableName is the name of the table with a map of commits to tables changed
DiffTableName = "dolt_diff"
// ColumnDiffTableName is the name of the table with a map of commits to tables and columns changed
ColumnDiffTableName = "dolt_column_diff"
// TableOfTablesInConflictName is the conflicts system table name
TableOfTablesInConflictName = "dolt_conflicts"

View File

@@ -178,6 +178,12 @@ func WithArgs(args ...string) SqlServerOpt {
}
}
func WithEnvs(envs ...string) SqlServerOpt {
return func(s *SqlServer) {
s.Cmd.Env = append(s.Cmd.Env, envs...)
}
}
func WithPort(port int) SqlServerOpt {
return func(s *SqlServer) {
s.Port = port
@@ -235,7 +241,7 @@ func (s *SqlServer) ErrorStop() error {
return s.Cmd.Wait()
}
func (s *SqlServer) Restart(newargs *[]string) error {
func (s *SqlServer) Restart(newargs *[]string, newenvs *[]string) error {
err := s.GracefulStop()
if err != nil {
return err
@@ -245,6 +251,9 @@ func (s *SqlServer) Restart(newargs *[]string) error {
args = append([]string{"sql-server"}, (*newargs)...)
}
s.Cmd = s.RecreateCmd(args...)
if newenvs != nil {
s.Cmd.Env = append(s.Cmd.Env, (*newenvs)...)
}
stdout, err := s.Cmd.StdoutPipe()
if err != nil {
return err

View File

@@ -75,6 +75,7 @@ func (c Connection) Password() (string, error) {
// example, to change server config on a restart.
type RestartArgs struct {
Args *[]string `yaml:"args"`
Envs *[]string `yaml:"envs"`
}
// |TestRepo| represents an init'd dolt repository that is available to a
@@ -155,6 +156,7 @@ func (f WithFile) WriteAtDir(dir string) error {
type Server struct {
Name string `yaml:"name"`
Args []string `yaml:"args"`
Envs []string `yaml:"envs"`
// The |Port| which the server will be running on. For now, it is up to
// the |Args| to make sure this is true. Defaults to 3308.

View File

@@ -426,6 +426,16 @@ func (db Database) getTableInsensitive(ctx *sql.Context, head *doltdb.Commit, ds
}
dt, found = dtables.NewUnscopedDiffTable(ctx, db.name, db.ddb, head), true
case doltdb.ColumnDiffTableName:
if head == nil {
var err error
head, err = ds.GetHeadCommit(ctx, db.Name())
if err != nil {
return nil, false, err
}
}
dt, found = dtables.NewColumnDiffTable(ctx, db.name, db.ddb, head), true
case doltdb.TableOfTablesInConflictName:
dt, found = dtables.NewTableOfTablesInConflict(ctx, db.name, db.ddb), true
case doltdb.TableOfTablesWithViolationsName:

View File

@@ -0,0 +1,656 @@
// Copyright 2022 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dtables
import (
"bytes"
"context"
"fmt"
"io"
"time"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/go-mysql-server/sql/plan"
"github.com/dolthub/go-mysql-server/sql/types"
"github.com/dolthub/dolt/go/libraries/doltcore/diff"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/schema"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/index"
"github.com/dolthub/dolt/go/store/datas"
"github.com/dolthub/dolt/go/store/hash"
dtypes "github.com/dolthub/dolt/go/store/types"
)
var _ sql.FilteredTable = (*ColumnDiffTable)(nil)
// ColumnDiffTable is a sql.Table implementation of a system table that shows which tables and columns have
// changed in each commit, across all branches.
type ColumnDiffTable struct {
dbName string
ddb *doltdb.DoltDB
head *doltdb.Commit
partitionFilters []sql.Expression
commitCheck doltdb.CommitFilter
}
// NewColumnDiffTable creates an ColumnDiffTable
func NewColumnDiffTable(_ *sql.Context, dbName string, ddb *doltdb.DoltDB, head *doltdb.Commit) sql.Table {
return &ColumnDiffTable{dbName: dbName, ddb: ddb, head: head}
}
// Name is a sql.Table interface function which returns the name of the table which is defined by the constant
// ColumnDiffTableName
func (dt *ColumnDiffTable) Name() string {
return doltdb.ColumnDiffTableName
}
// String is a sql.Table interface function which returns the name of the table which is defined by the constant
// ColumnDiffTableName
func (dt *ColumnDiffTable) String() string {
return doltdb.ColumnDiffTableName
}
// Schema is a sql.Table interface function that returns the sql.Schema for this system table.
func (dt *ColumnDiffTable) Schema() sql.Schema {
return []*sql.Column{
{Name: "commit_hash", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: true},
{Name: "table_name", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: true},
{Name: "column_name", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: true},
{Name: "committer", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: false},
{Name: "email", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: false},
{Name: "date", Type: types.Datetime, Source: doltdb.ColumnDiffTableName, PrimaryKey: false},
{Name: "message", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: false},
{Name: "diff_type", Type: types.Text, Source: doltdb.ColumnDiffTableName, PrimaryKey: false},
}
}
// Filters returns the list of filters that are applied to this table.
func (dt *ColumnDiffTable) Filters() []sql.Expression {
return dt.partitionFilters
}
// HandledFilters returns the list of filters that will be handled by the table itself
func (dt *ColumnDiffTable) HandledFilters(filters []sql.Expression) []sql.Expression {
filters = append(filters, dt.partitionFilters...)
dt.partitionFilters = FilterFilters(filters, ColumnPredicate(filterColumnNameSet))
return dt.partitionFilters
}
// WithFilters returns a new sql.Table instance with the filters applied
func (dt *ColumnDiffTable) WithFilters(_ *sql.Context, filters []sql.Expression) sql.Table {
dt.partitionFilters = FilterFilters(filters, ColumnPredicate(filterColumnNameSet))
commitCheck, err := commitFilterForDiffTableFilterExprs(dt.partitionFilters)
if err != nil {
return nil
}
dt.commitCheck = commitCheck
return dt
}
// Partitions is a sql.Table interface function that returns a partition of the data. Returns one
// partition for working set changes and one partition for all commit history.
func (dt *ColumnDiffTable) Partitions(ctx *sql.Context) (sql.PartitionIter, error) {
return NewSliceOfPartitionsItr([]sql.Partition{
newDoltDiffPartition(workingSetPartitionKey),
newDoltDiffPartition(commitHistoryPartitionKey),
}), nil
}
// PartitionRows is a sql.Table interface function that gets a row iterator for a partition.
func (dt *ColumnDiffTable) PartitionRows(ctx *sql.Context, partition sql.Partition) (sql.RowIter, error) {
switch p := partition.(type) {
case *doltdb.CommitPart:
return dt.newCommitHistoryRowItrFromCommits(ctx, []*doltdb.Commit{p.Commit()})
default:
if bytes.Equal(partition.Key(), workingSetPartitionKey) {
return dt.newWorkingSetRowItr(ctx)
} else if bytes.Equal(partition.Key(), commitHistoryPartitionKey) {
cms, hasCommitHashEquality := getCommitsFromCommitHashEquality(ctx, dt.ddb, dt.partitionFilters)
if hasCommitHashEquality {
return dt.newCommitHistoryRowItrFromCommits(ctx, cms)
}
iter := doltdb.CommitItrForRoots(dt.ddb, dt.head)
if dt.commitCheck != nil {
iter = doltdb.NewFilteringCommitItr(iter, dt.commitCheck)
}
return dt.newCommitHistoryRowItrFromItr(ctx, iter)
} else {
return nil, fmt.Errorf("unexpected partition: %v", partition)
}
}
}
// GetIndexes implements sql.IndexAddressable
func (dt *ColumnDiffTable) GetIndexes(ctx *sql.Context) ([]sql.Index, error) {
return index.DoltCommitIndexes(dt.Name(), dt.ddb, true)
}
// IndexedAccess implements sql.IndexAddressable
func (dt *ColumnDiffTable) IndexedAccess(lookup sql.IndexLookup) sql.IndexedTable {
nt := *dt
return &nt
}
// Collation implements the sql.Table interface.
func (dt *ColumnDiffTable) Collation() sql.CollationID {
return sql.Collation_Default
}
func (dt *ColumnDiffTable) LookupPartitions(ctx *sql.Context, lookup sql.IndexLookup) (sql.PartitionIter, error) {
if lookup.Index.ID() == index.CommitHashIndexId {
hs, ok := index.LookupToPointSelectStr(lookup)
if !ok {
return nil, fmt.Errorf("failed to parse commit lookup ranges: %s", sql.DebugString(lookup.Ranges))
}
hashes, commits, metas := index.HashesToCommits(ctx, dt.ddb, hs, dt.head, false)
if len(hashes) == 0 {
return sql.PartitionsToPartitionIter(), nil
}
headHash, err := dt.head.HashOf()
if err != nil {
return nil, err
}
var partitions []sql.Partition
for i, h := range hashes {
if h == headHash && commits[i] == nil {
partitions = append(partitions, newDoltDiffPartition(workingSetPartitionKey))
} else {
partitions = append(partitions, doltdb.NewCommitPart(h, commits[i], metas[i]))
}
}
return sql.PartitionsToPartitionIter(partitions...), nil
}
return dt.Partitions(ctx)
}
type doltColDiffWorkingSetRowItr struct {
ddb *doltdb.DoltDB
stagedIndex int
unstagedIndex int
colIndex int
changeSet string
stagedTableDeltas []diff.TableDelta
unstagedTableDeltas []diff.TableDelta
currentTableDelta *diff.TableDelta
tableName string
colNames []string
diffTypes []string
}
func (dt *ColumnDiffTable) newWorkingSetRowItr(ctx *sql.Context) (sql.RowIter, error) {
sess := dsess.DSessFromSess(ctx.Session)
roots, ok := sess.GetRoots(ctx, dt.dbName)
if !ok {
return nil, fmt.Errorf("unable to lookup roots for database %s", dt.dbName)
}
staged, unstaged, err := diff.GetStagedUnstagedTableDeltas(ctx, roots)
if err != nil {
return nil, err
}
var ri sql.RowIter
ri = &doltColDiffWorkingSetRowItr{
ddb: dt.ddb,
stagedTableDeltas: staged,
unstagedTableDeltas: unstaged,
}
for _, filter := range dt.partitionFilters {
ri = plan.NewFilterIter(filter, ri)
}
return ri, nil
}
// incrementColIndex increments the column index and table changes index. When the end of the column names array is
// reached, moves to the next table changes delta.
func (d *doltColDiffWorkingSetRowItr) incrementColIndex() {
d.colIndex++
// move to next table once all modified columns are iterated through
if d.colIndex >= len(d.colNames) {
d.colIndex = 0
d.currentTableDelta = nil
if d.changeSet == "STAGED" {
d.stagedIndex++
} else {
d.unstagedIndex++
}
}
}
func (d *doltColDiffWorkingSetRowItr) Next(ctx *sql.Context) (sql.Row, error) {
defer d.incrementColIndex()
// only need to load new changes when we're finished iterating through the previous tableDelta
for d.currentTableDelta == nil {
if d.stagedIndex < len(d.stagedTableDeltas) {
d.changeSet = "STAGED"
d.currentTableDelta = &d.stagedTableDeltas[d.stagedIndex]
} else if d.unstagedIndex < len(d.unstagedTableDeltas) {
d.changeSet = "WORKING"
d.currentTableDelta = &d.unstagedTableDeltas[d.unstagedIndex]
} else {
return nil, io.EOF
}
change, err := processTableColDelta(ctx, d.ddb, *d.currentTableDelta)
if err != nil {
return nil, err
}
// ignore changes with no modified columns
if len(change.colNames) != 0 {
d.colNames = change.colNames
d.diffTypes = change.diffTypes
d.tableName = change.tableName
} else {
if d.changeSet == "STAGED" {
d.stagedIndex++
} else {
d.unstagedIndex++
}
d.currentTableDelta = nil
}
}
sqlRow := sql.NewRow(
d.changeSet,
d.tableName,
d.colNames[d.colIndex],
nil, // committer
nil, // email
nil, // date
nil, // message
d.diffTypes[d.colIndex],
)
return sqlRow, nil
}
func (d *doltColDiffWorkingSetRowItr) Close(c *sql.Context) error {
return nil
}
// doltColDiffCommitHistoryRowItr is a sql.RowItr implementation which iterates over each commit as if it's a row in the table.
type doltColDiffCommitHistoryRowItr struct {
ctx *sql.Context
ddb *doltdb.DoltDB
child doltdb.CommitItr
commits []*doltdb.Commit
meta *datas.CommitMeta
hash hash.Hash
tableChanges []tableColChange
tableChangesIdx int
colIdx int
}
// newCommitHistoryRowItr creates a doltDiffCommitHistoryRowItr from a CommitItr.
func (dt *ColumnDiffTable) newCommitHistoryRowItrFromItr(ctx *sql.Context, iter doltdb.CommitItr) (*doltColDiffCommitHistoryRowItr, error) {
dchItr := &doltColDiffCommitHistoryRowItr{
ctx: ctx,
ddb: dt.ddb,
tableChangesIdx: -1,
child: iter,
}
return dchItr, nil
}
// newCommitHistoryRowItr creates a doltDiffCommitHistoryRowItr from a list of commits.
func (dt *ColumnDiffTable) newCommitHistoryRowItrFromCommits(ctx *sql.Context, commits []*doltdb.Commit) (*doltColDiffCommitHistoryRowItr, error) {
dchItr := &doltColDiffCommitHistoryRowItr{
ctx: ctx,
ddb: dt.ddb,
tableChangesIdx: -1,
commits: commits,
}
return dchItr, nil
}
// incrementIndexes increments the column index and table changes index. When the end of the column names array is
// reached, moves to the next table. When the end of the table changes array is reached, moves to the next commit,
// and resets the table changes index so that it can be populated when Next() is called.
func (itr *doltColDiffCommitHistoryRowItr) incrementIndexes(tableChange tableColChange) {
itr.colIdx++
if itr.colIdx >= len(tableChange.colNames) {
itr.tableChangesIdx++
itr.colIdx = 0
if itr.tableChangesIdx >= len(itr.tableChanges) {
itr.tableChangesIdx = -1
itr.tableChanges = nil
}
}
}
// Next retrieves the next row. It will return io.EOF if it's the last row.
// After retrieving the last row, Close will be automatically closed.
func (itr *doltColDiffCommitHistoryRowItr) Next(ctx *sql.Context) (sql.Row, error) {
for itr.tableChanges == nil {
if itr.commits != nil {
for _, commit := range itr.commits {
err := itr.loadTableChanges(ctx, commit)
if err != nil {
return nil, err
}
}
itr.commits = nil
} else if itr.child != nil {
_, commit, err := itr.child.Next(ctx)
if err != nil {
return nil, err
}
err = itr.loadTableChanges(ctx, commit)
if err != nil {
return nil, err
}
} else {
return nil, io.EOF
}
}
tableChange := itr.tableChanges[itr.tableChangesIdx]
defer itr.incrementIndexes(tableChange)
meta := itr.meta
h := itr.hash
col := tableChange.colNames[itr.colIdx]
diffType := tableChange.diffTypes[itr.colIdx]
return sql.NewRow(
h.String(),
tableChange.tableName,
col,
meta.Name,
meta.Email,
meta.Time(),
meta.Description,
diffType,
), nil
}
// loadTableChanges loads the current commit's table changes and metadata into the iterator.
func (itr *doltColDiffCommitHistoryRowItr) loadTableChanges(ctx context.Context, commit *doltdb.Commit) error {
tableChanges, err := itr.calculateTableChanges(ctx, commit)
if err != nil {
return err
}
itr.tableChanges = tableChanges
itr.tableChangesIdx = 0
if len(tableChanges) == 0 {
return nil
}
meta, err := commit.GetCommitMeta(ctx)
if err != nil {
return err
}
itr.meta = meta
cmHash, err := commit.HashOf()
if err != nil {
return err
}
itr.hash = cmHash
return nil
}
// calculateTableChanges calculates the tables that changed in the specified commit, by comparing that
// commit with its immediate ancestor commit.
func (itr *doltColDiffCommitHistoryRowItr) calculateTableChanges(ctx context.Context, commit *doltdb.Commit) ([]tableColChange, error) {
if len(commit.DatasParents()) == 0 {
return nil, nil
}
toRootValue, err := commit.GetRootValue(ctx)
if err != nil {
return nil, err
}
parent, err := itr.ddb.ResolveParent(ctx, commit, 0)
if err != nil {
return nil, err
}
fromRootValue, err := parent.GetRootValue(ctx)
if err != nil {
return nil, err
}
deltas, err := diff.GetTableDeltas(ctx, fromRootValue, toRootValue)
if err != nil {
return nil, err
}
tableChanges := make([]tableColChange, 0)
for i := 0; i < len(deltas); i++ {
change, err := processTableColDelta(itr.ctx, itr.ddb, deltas[i])
if err != nil {
return nil, err
}
// only add changes that have modified columns
if len(change.colNames) != 0 {
tableChanges = append(tableChanges, *change)
}
}
// Not all commits mutate tables (e.g. empty commits)
if len(tableChanges) == 0 {
return nil, nil
}
return tableChanges, nil
}
// Close closes the iterator.
func (itr *doltColDiffCommitHistoryRowItr) Close(*sql.Context) error {
return nil
}
// tableColChange is an internal data structure used to hold the results of processing
// a diff.TableDelta structure into the output data for this system table.
type tableColChange struct {
tableName string
colNames []string
diffTypes []string
}
// processTableColDelta processes the specified TableDelta to determine what kind of change it was (i.e. table drop,
// table rename, table create, or data update) and returns a tableChange struct representing the change.
func processTableColDelta(ctx *sql.Context, ddb *doltdb.DoltDB, delta diff.TableDelta) (*tableColChange, error) {
// Dropping a table is always a schema change, and also a data change if the table contained data
if delta.IsDrop() {
diffTypes := make([]string, delta.FromSch.GetAllCols().Size())
for i := range diffTypes {
diffTypes[i] = diffTypeRemoved
}
return &tableColChange{
tableName: delta.FromName,
colNames: delta.FromSch.GetAllCols().GetColumnNames(),
diffTypes: diffTypes,
}, nil
}
// Creating a table is always a schema change, and also a data change if data was inserted
if delta.IsAdd() {
diffTypes := make([]string, delta.ToSch.GetAllCols().Size())
for i := range diffTypes {
diffTypes[i] = diffTypeAdded
}
return &tableColChange{
tableName: delta.ToName,
colNames: delta.ToSch.GetAllCols().GetColumnNames(),
diffTypes: diffTypes,
}, nil
}
// NOTE: Renaming a table does not affect columns necessarily, if table data was changed it will be checked below
// calculate which columns have been modified
colSchDiff := calculateColSchemaDiff(delta.ToSch.GetAllCols(), delta.FromSch.GetAllCols())
colNames, diffTypes, err := calculateColDelta(ctx, ddb, &delta, colSchDiff)
if err != nil {
return nil, err
}
return &tableColChange{
tableName: delta.ToName,
colNames: colNames,
diffTypes: diffTypes,
}, nil
}
// calculateColDelta iterates through the rows of the given table delta and compares each cell in the to_ and from_
// cells to compile a list of modified columns
func calculateColDelta(ctx *sql.Context, ddb *doltdb.DoltDB, delta *diff.TableDelta, colSchDiff *colSchemaDiff) ([]string, []string, error) {
// initialize row iterator
diffTableSchema, j, err := GetDiffTableSchemaAndJoiner(delta.ToTable.Format(), delta.FromSch, delta.ToSch)
if err != nil {
return nil, nil, err
}
diffTableCols := diffTableSchema.GetAllCols()
now := time.Now() // accurate commit time returned elsewhere
dp := NewDiffPartition(delta.ToTable, delta.FromTable, delta.ToName, delta.FromName, (*dtypes.Timestamp)(&now), (*dtypes.Timestamp)(&now), delta.ToSch, delta.FromSch)
ri := NewDiffPartitionRowIter(*dp, ddb, j)
var resultColNames []string
var resultDiffTypes []string
// add all added/dropped columns to result
for _, col := range colSchDiff.addedCols {
resultColNames = append(resultColNames, col)
resultDiffTypes = append(resultDiffTypes, diffTypeAdded)
}
for _, col := range colSchDiff.droppedCols {
resultColNames = append(resultColNames, col)
resultDiffTypes = append(resultDiffTypes, diffTypeRemoved)
}
colNamesSet := make(map[string]struct{})
// check each row for diffs in modified columns
for {
r, err := ri.Next(ctx)
if err == io.EOF {
for col := range colNamesSet {
// append modified columns to result
resultColNames = append(resultColNames, col)
resultDiffTypes = append(resultDiffTypes, diffTypeModified)
}
return resultColNames, resultDiffTypes, nil
} else if err != nil {
return nil, nil, err
}
// only need to check modified columns
for _, col := range colSchDiff.modifiedCols {
toColTag := diffTableCols.NameToCol["to_"+col].Tag
fromColTag := diffTableCols.NameToCol["from_"+col].Tag
toIdx := diffTableCols.TagToIdx[toColTag]
fromIdx := diffTableCols.TagToIdx[fromColTag]
if r[toIdx] != r[fromIdx] {
colNamesSet[col] = struct{}{}
}
}
// can stop checking rows when we already have all modified columns in the result set
if len(colNamesSet) == len(colSchDiff.modifiedCols) {
for col := range colNamesSet {
// append modified columns to result
resultColNames = append(resultColNames, col)
resultDiffTypes = append(resultDiffTypes, diffTypeModified)
}
return resultColNames, resultDiffTypes, nil
}
}
}
// colSchemaDiff is a collection of column names that hold the results of doing a schema diff between to/from schemas,
// i.e. a list of column names for each type of change, the total list of column names, and a corresponding list of
// diff_types for each column
type colSchemaDiff struct {
modifiedCols []string
addedCols []string
droppedCols []string
allCols []string
diffTypes []string
}
// calculateColSchemaDiff calculates which columns were modified, added, or dropped between to and from schemas and
// returns a colSchemaDiff to hold the results of the diff
func calculateColSchemaDiff(toCols *schema.ColCollection, fromCols *schema.ColCollection) *colSchemaDiff {
// put to/from columns into a set
toColTags := make(map[uint64]struct{})
fromColTags := make(map[uint64]struct{})
if toCols != nil {
for _, tag := range toCols.Tags {
toColTags[tag] = struct{}{}
}
}
if fromCols != nil {
for _, tag := range fromCols.Tags {
fromColTags[tag] = struct{}{}
}
}
var modifiedCols []string
var addedCols []string
var droppedCols []string
var allCols []string
var diffTypes []string
if toCols != nil {
for _, tag := range toCols.Tags {
if _, ok := fromColTags[tag]; ok {
// if the tag is also in fromColumnTags, this column was modified
modifiedCols = append(modifiedCols, toCols.TagToCol[tag].Name)
allCols = append(allCols, toCols.TagToCol[tag].Name)
diffTypes = append(diffTypes, diffTypeModified)
delete(fromColTags, tag)
} else {
// else if it isn't in fromColumnTags, this column was added
addedCols = append(addedCols, toCols.TagToCol[tag].Name)
allCols = append(allCols, toCols.TagToCol[tag].Name)
diffTypes = append(diffTypes, diffTypeAdded)
}
}
}
if fromCols != nil {
for tag, _ := range fromColTags {
// all remaining tags are columns not in toColumnTags, i.e. dropped columns
droppedCols = append(droppedCols, fromCols.TagToCol[tag].Name)
allCols = append(allCols, fromCols.TagToCol[tag].Name)
diffTypes = append(diffTypes, diffTypeRemoved)
}
}
return &colSchemaDiff{
modifiedCols: modifiedCols,
addedCols: addedCols,
droppedCols: droppedCols,
allCols: allCols,
diffTypes: diffTypes,
}
}

View File

@@ -944,7 +944,7 @@ func CalculateDiffSchema(fromSch, toSch schema.Schema) (schema.Schema, error) {
j := toSch.GetAllCols().Size()
err = fromSch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) {
fromCol, err := schema.NewColumnWithTypeInfo(diff.FromColNamer(col.Name), uint64(i), col.TypeInfo, false, col.Default, false, col.Comment)
fromCol, err := schema.NewColumnWithTypeInfo(diff.FromColNamer(col.Name), uint64(j), col.TypeInfo, false, col.Default, false, col.Comment)
if err != nil {
return true, err
}

View File

@@ -81,7 +81,7 @@ func (dt *UnscopedDiffTable) WithFilters(_ *sql.Context, filters []sql.Expressio
}
// Name is a sql.Table interface function which returns the name of the table which is defined by the constant
// LogTableName
// DiffTableName
func (dt *UnscopedDiffTable) Name() string {
return doltdb.DiffTableName
}

View File

@@ -957,8 +957,10 @@ func TestDescribeTableAsOf(t *testing.T) {
enginetest.TestScript(t, newDoltHarness(t), DescribeTableAsOfScriptTest)
}
func TestShowCreateTableAsOf(t *testing.T) {
enginetest.TestScript(t, newDoltHarness(t), ShowCreateTableAsOfScriptTest)
func TestShowCreateTable(t *testing.T) {
for _, script := range ShowCreateTableScriptTests {
enginetest.TestScript(t, newDoltHarness(t), script)
}
}
func TestViewsWithAsOf(t *testing.T) {
@@ -1240,6 +1242,28 @@ func TestUnscopedDiffSystemTablePrepared(t *testing.T) {
}
}
func TestColumnDiffSystemTable(t *testing.T) {
if !types.IsFormat_DOLT(types.Format_Default) {
t.Skip("correct behavior of dolt_column_diff only guaranteed on new format")
}
for _, test := range ColumnDiffSystemTableScriptTests {
t.Run(test.Name, func(t *testing.T) {
enginetest.TestScriptPrepared(t, newDoltHarness(t), test)
})
}
}
func TestColumnDiffSystemTablePrepared(t *testing.T) {
if !types.IsFormat_DOLT(types.Format_Default) {
t.Skip("correct behavior of dolt_column_diff only guaranteed on new format")
}
for _, test := range ColumnDiffSystemTableScriptTests {
t.Run(test.Name, func(t *testing.T) {
enginetest.TestScriptPrepared(t, newDoltHarness(t), test)
})
}
}
func TestDiffTableFunction(t *testing.T) {
harness := newDoltHarness(t)
harness.Setup(setup.MydbData)

View File

@@ -90,73 +90,142 @@ var ViewsWithAsOfScriptTest = queries.ScriptTest{
},
}
var ShowCreateTableAsOfScriptTest = queries.ScriptTest{
Name: "Show create table as of",
SetUpScript: []string{
"set @Commit0 = '';",
"set @Commit1 = '';",
"set @Commit2 = '';",
"set @Commit3 = '';",
"set @Commit0 = hashof('main');",
"create table a (pk int primary key, c1 int);",
"call dolt_add('.');",
"call dolt_commit_hash_out(@Commit1, '-am', 'creating table a');",
"alter table a add column c2 varchar(20);",
"call dolt_commit_hash_out(@Commit2, '-am', 'adding column c2');",
"alter table a drop column c1;",
"alter table a add constraint unique_c2 unique(c2);",
"call dolt_commit_hash_out(@Commit3, '-am', 'dropping column c1');",
var ShowCreateTableScriptTests = []queries.ScriptTest{
{
Name: "Show create table as of",
SetUpScript: []string{
"set @Commit0 = '';",
"set @Commit1 = '';",
"set @Commit2 = '';",
"set @Commit3 = '';",
"set @Commit0 = hashof('main');",
"create table a (pk int primary key, c1 int);",
"call dolt_add('.');",
"call dolt_commit_hash_out(@Commit1, '-am', 'creating table a');",
"alter table a add column c2 varchar(20);",
"call dolt_commit_hash_out(@Commit2, '-am', 'adding column c2');",
"alter table a drop column c1;",
"alter table a add constraint unique_c2 unique(c2);",
"call dolt_commit_hash_out(@Commit3, '-am', 'dropping column c1');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "show create table a as of @Commit0;",
ExpectedErr: sql.ErrTableNotFound,
},
{
Query: "show create table a as of @Commit1;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c1` int,\n" +
" PRIMARY KEY (`pk`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
{
Query: "show create table a as of @Commit2;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c1` int,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
{
Query: "show create table a as of @Commit3;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`),\n" +
" UNIQUE KEY `unique_c2` (`c2`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
{
Query: "show create table a as of HEAD;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`),\n" +
" UNIQUE KEY `unique_c2` (`c2`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
},
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "show create table a as of @Commit0;",
ExpectedErr: sql.ErrTableNotFound,
{
// "https://github.com/dolthub/dolt/issues/5478"
Name: "show table for default types with unique indexes",
SetUpScript: []string{
`create table tbl (a int primary key,
b int not null default 42,
c int not null default (24),
d int not null default '-108',
e int not null default ((((7+11)))),
f int default (now()))`,
`call dolt_commit('-Am', 'new table');`,
`create index tbl_bc on tbl (b,c);`,
`create unique index tbl_cbd on tbl (c,b,d);`,
`create unique index tbl_c on tbl (c);`,
`create unique index tbl_e on tbl (e);`,
},
{
Query: "show create table a as of @Commit1;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c1` int,\n" +
" PRIMARY KEY (`pk`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "show create table tbl",
Expected: []sql.Row{sql.Row{"tbl", "CREATE TABLE `tbl` (\n" +
" `a` int NOT NULL,\n" +
" `b` int NOT NULL DEFAULT '42',\n" + //
" `c` int NOT NULL DEFAULT (24),\n" + // Ensure these match setup above.
" `d` int NOT NULL DEFAULT '-108',\n" + //
" `e` int NOT NULL DEFAULT ((7 + 11)),\n" + // Matches MySQL behavior.
" `f` int DEFAULT (NOW()),\n" + // MySql preserves now as lower case.
" PRIMARY KEY (`a`),\n" +
" KEY `tbl_bc` (`b`,`c`),\n" +
" UNIQUE KEY `tbl_c` (`c`),\n" +
" UNIQUE KEY `tbl_cbd` (`c`,`b`,`d`),\n" +
" UNIQUE KEY `tbl_e` (`e`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin"}},
},
},
{
Query: "show create table a as of @Commit2;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c1` int,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
{
// "https://github.com/dolthub/dolt/issues/5478"
Name: "show table for default types with unique indexes no PK",
SetUpScript: []string{
`create table tbl (a int not null default (now()),
b int not null default 42,
c int not null default (24),
d int not null default '-108',
e int not null default ((((7+11)))));`,
`call dolt_commit('-Am', 'new table');`,
`create index tbl_bc on tbl (b,c);`,
`create unique index tbl_cab on tbl (c,a,b);`,
`create unique index tbl_c on tbl (c);`,
`create unique index tbl_e on tbl (e);`,
},
{
Query: "show create table a as of @Commit3;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`),\n" +
" UNIQUE KEY `unique_c2` (`c2`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
},
},
{
Query: "show create table a as of HEAD;",
Expected: []sql.Row{
{"a", "CREATE TABLE `a` (\n" +
" `pk` int NOT NULL,\n" +
" `c2` varchar(20),\n" +
" PRIMARY KEY (`pk`),\n" +
" UNIQUE KEY `unique_c2` (`c2`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "show create table tbl",
Expected: []sql.Row{sql.Row{"tbl", "CREATE TABLE `tbl` (\n" +
" `a` int NOT NULL DEFAULT (NOW()),\n" + // MySql preserves now as lower case.
" `b` int NOT NULL DEFAULT '42',\n" + //
" `c` int NOT NULL DEFAULT (24),\n" + // Ensure these match setup above.
" `d` int NOT NULL DEFAULT '-108',\n" + //
" `e` int NOT NULL DEFAULT ((7 + 11)),\n" + // Matches MySQL behavior.
" KEY `tbl_bc` (`b`,`c`),\n" +
" UNIQUE KEY `tbl_c` (`c`),\n" +
" UNIQUE KEY `tbl_cab` (`c`,`a`,`b`),\n" +
" UNIQUE KEY `tbl_e` (`e`)\n" +
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_bin"}},
},
},
},

View File

@@ -3701,6 +3701,740 @@ var UnscopedDiffSystemTableScriptTests = []queries.ScriptTest{
},
}
var ColumnDiffSystemTableScriptTests = []queries.ScriptTest{
{
Name: "table changes - commit history",
SetUpScript: []string{
"create table modifiedTable (a int primary key, b int);",
"insert into modifiedTable values (1, 2), (2, 3);",
"create table droppedTable (a int primary key, b int);",
"insert into droppedTable values (1, 2), (2, 3);",
"create table renamedTable (a int primary key, b int);",
"call dolt_add('.')",
"call dolt_commit('-am', 'creating tables');",
"update modifiedTable set b = 5 where a = 1;",
"drop table droppedTable;",
"rename table renamedTable to newRenamedTable;",
"create table addedTable (a int primary key, b int);",
"call dolt_add('.')",
"call dolt_commit('-am', 'make table changes');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'modifiedTable';",
Expected: []sql.Row{
{"modifiedTable", "a", "added"},
{"modifiedTable", "b", "added"},
{"modifiedTable", "b", "modified"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'droppedTable';",
Expected: []sql.Row{
{"droppedTable", "a", "added"},
{"droppedTable", "b", "added"},
{"droppedTable", "a", "removed"},
{"droppedTable", "b", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'renamedTable' OR table_name = 'newRenamedTable';",
Expected: []sql.Row{
{"renamedTable", "a", "added"},
{"renamedTable", "b", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'addedTable';",
Expected: []sql.Row{
{"addedTable", "a", "added"},
{"addedTable", "b", "added"},
},
},
},
},
{
Name: "table changes - working set",
SetUpScript: []string{
"create table modifiedTable (a int primary key, b int);",
"insert into modifiedTable values (1, 2), (2, 3);",
"create table droppedTable (a int primary key, b int);",
"insert into droppedTable values (1, 2), (2, 3);",
"create table renamedTable (a int primary key, b int);",
"call dolt_add('.')",
"update modifiedTable set b = 5 where a = 1;",
"drop table droppedTable;",
"rename table renamedTable to newRenamedTable;",
"create table addedTable (a int primary key, b int);",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT commit_hash, table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'modifiedTable' ORDER BY commit_hash, table_name, column_name;",
Expected: []sql.Row{
{"STAGED", "modifiedTable", "a", "added"},
{"STAGED", "modifiedTable", "b", "added"},
{"WORKING", "modifiedTable", "b", "modified"},
},
},
{
Query: "SELECT commit_hash, table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'droppedTable' ORDER BY commit_hash, table_name, column_name;",
Expected: []sql.Row{
{"STAGED", "droppedTable", "a", "added"},
{"STAGED", "droppedTable", "b", "added"},
{"WORKING", "droppedTable", "a", "removed"},
{"WORKING", "droppedTable", "b", "removed"},
},
},
{
Query: "SELECT commit_hash, table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'renamedTable' OR table_name = 'newRenamedTable' ORDER BY commit_hash, table_name, column_name;",
Expected: []sql.Row{
{"STAGED", "renamedTable", "a", "added"},
{"STAGED", "renamedTable", "b", "added"},
},
},
{
Query: "SELECT commit_hash, table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE table_name = 'addedTable' ORDER BY commit_hash, table_name, column_name;",
Expected: []sql.Row{
{"WORKING", "addedTable", "a", "added"},
{"WORKING", "addedTable", "b", "added"},
},
},
},
},
{
Name: "add column - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"call dolt_commit_hash_out(@Commit1, '-am', 'creating table t');",
"alter table t add column d int;",
"set @Commit2 = '';",
"call dolt_add('.')",
"call dolt_commit_hash_out(@Commit2, '-m', 'updating d in t');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = @Commit1;",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = @Commit2;",
Expected: []sql.Row{{"t", "d", "added"}},
},
},
},
{
Name: "add column - working set",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"alter table t add column d int;",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = 'STAGED';",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = 'WORKING';",
Expected: []sql.Row{{"t", "d", "added"}},
},
},
},
{
Name: "modify column - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"call dolt_commit_hash_out(@Commit1, '-am', 'creating table t');",
"update t set c = 5 where pk = 3;",
"call dolt_add('.')",
"set @Commit2 = '';",
"call dolt_commit_hash_out(@Commit2, '-am', 'updating value in t');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = @Commit1;",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = @Commit2;",
Expected: []sql.Row{{"t", "c", "modified"}},
},
},
},
{
Name: "modify column - working set",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"update t set c = 5 where pk = 3;",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = 'STAGED';",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = 'WORKING';",
Expected: []sql.Row{{"t", "c", "modified"}},
},
},
},
{
Name: "drop column - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"call dolt_commit_hash_out(@Commit1, '-am', 'creating table t');",
"alter table t drop column c;",
"call dolt_add('.')",
"set @Commit2 = '';",
"call dolt_commit_hash_out(@Commit2, '-am', 'dropping column c in t');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = @Commit1;",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = @Commit2;",
Expected: []sql.Row{{"t", "c", "removed"}},
},
},
},
{
Name: "drop column - working set",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"alter table t drop column c;",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select count(*) from dolt_column_diff where commit_hash = 'STAGED';",
Expected: []sql.Row{{2}},
},
{
Query: "select table_name, column_name, diff_type from dolt_column_diff where commit_hash = 'WORKING';",
Expected: []sql.Row{{"t", "c", "removed"}},
},
},
},
{
Name: "drop column and recreate with same type - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t drop column c;",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'dropping column c');",
"alter table t add column c int;",
"insert into t values (100, 101);",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 'inserting into t');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "c", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "added"},
},
},
},
},
{
Name: "drop column and recreate with same type - working set",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"alter table t drop column c;",
"alter table t add column c int;",
"insert into t values (100, 101);",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{4}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "modified"},
},
},
},
},
{
Name: "drop column, then rename column with same type to same name - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c1 int, c2 int);",
"call dolt_add('.')",
"insert into t values (1, 2, 3), (4, 5, 6);",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t drop column c1;",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'dropping column c1');",
"alter table t rename column c2 to c1;",
"insert into t values (100, 101);",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 'inserting into t');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{6}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
{"t", "c2", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "c1", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "modified"},
},
},
},
},
{
Name: "drop column, then rename column with same type to same name - working set",
SetUpScript: []string{
"create table t (pk int primary key, c1 int, c2 int);",
"insert into t values (1, 2, 3), (4, 5, 6);",
"call dolt_add('.')",
"alter table t drop column c1;",
"alter table t rename column c2 to c1;",
"insert into t values (100, 101);",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{6}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
{"t", "c2", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "removed"},
{"t", "c1", "modified"},
},
},
},
},
{
Name: "column drop and recreate with different type that can be coerced (int -> string) - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t drop column c;",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'dropping column c');",
"alter table t add column c varchar(20);",
"insert into t values (100, '101');",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 're-adding column c');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "c", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "added"},
},
},
},
},
{
Name: "column drop and recreate with different type that can be coerced (int -> string) - working set",
SetUpScript: []string{
"create table t (pk int primary key, c int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"alter table t drop column c;",
"alter table t add column c varchar(20);",
"insert into t values (100, '101');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "removed"},
{"t", "c", "added"},
},
},
},
},
{
Name: "column drop and recreate with different type that can NOT be coerced (string -> int) - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c varchar(20));",
"call dolt_add('.')",
"insert into t values (1, 'two'), (3, 'four');",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t drop column c;",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'dropping column c');",
"alter table t add column c int;",
"insert into t values (100, 101);",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 're-adding column c');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "c", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "added"},
},
},
},
},
{
Name: "column drop and recreate with different type that can NOT be coerced (string -> int) - working set",
SetUpScript: []string{
"create table t (pk int primary key, c varchar(20));",
"insert into t values (1, 'two'), (3, 'four');",
"call dolt_add('.')",
"alter table t drop column c;",
"alter table t add column c int;",
"insert into t values (100, 101);",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c", "removed"},
{"t", "c", "added"},
},
},
},
},
{
Name: "multiple column renames - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c1 int);",
"call dolt_add('.')",
"insert into t values (1, 2);",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t rename column c1 to c2;",
"insert into t values (3, 4);",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'renaming c1 to c2');",
"alter table t drop column c2;",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 'dropping column c2');",
"alter table t add column c2 int;",
"insert into t values (100, '101');",
"set @Commit4 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit4, '-am', 'recreating column c2');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{7}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c2", "modified"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3;",
Expected: []sql.Row{
{"t", "c2", "removed"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit4;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c2", "added"},
},
},
},
},
{
Name: "multiple column renames - working set",
SetUpScript: []string{
"create table t (pk int primary key, c1 int);",
"insert into t values (1, 2);",
"call dolt_add('.')",
"alter table t rename column c1 to c2;",
"insert into t values (3, 4);",
"alter table t drop column c2;",
"alter table t add column c2 int;",
"insert into t values (100, '101');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{5}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "removed"},
{"t", "c2", "added"},
},
},
},
},
{
Name: "primary key change - commit history",
SetUpScript: []string{
"create table t (pk int primary key, c1 int);",
"call dolt_add('.')",
"insert into t values (1, 2), (3, 4);",
"set @Commit1 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit1, '-am', 'creating table t');",
"alter table t drop primary key;",
"insert into t values (5, 6);",
"set @Commit2 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit2, '-am', 'dropping primary key');",
"alter table t add primary key (c1);",
"set @Commit3 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit3, '-am', 'adding primary key');",
"insert into t values (7, 8);",
"set @Commit4 = '';",
"CALL DOLT_COMMIT_HASH_OUT(@Commit4, '-am', 'adding more data');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{8}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit1;",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit2;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "modified"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit3;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "modified"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash=@Commit4;",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "modified"},
},
},
},
},
{
Name: "primary key change - working set",
SetUpScript: []string{
"create table t (pk int primary key, c1 int);",
"insert into t values (1, 2), (3, 4);",
"call dolt_add('.')",
"alter table t drop primary key;",
"alter table t add primary key (c1);",
"insert into t values (7, 8);",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "SELECT COUNT(*) FROM DOLT_COLUMN_DIFF;",
Expected: []sql.Row{{4}},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='STAGED';",
Expected: []sql.Row{
{"t", "pk", "added"},
{"t", "c1", "added"},
},
},
{
Query: "SELECT table_name, column_name, diff_type FROM DOLT_COLUMN_DIFF WHERE commit_hash='WORKING';",
Expected: []sql.Row{
{"t", "pk", "modified"},
{"t", "c1", "modified"},
},
},
},
},
}
var CommitDiffSystemTableScriptTests = []queries.ScriptTest{
{
Name: "error handling",

View File

@@ -289,6 +289,9 @@ func (j *chunkJournal) Update(ctx context.Context, lastLock addr, next manifestC
func (j *chunkJournal) UpdateGCGen(ctx context.Context, lastLock addr, next manifestContents, stats *Stats, writeHook func() error) (manifestContents, error) {
if j.backing.readOnly() {
return j.contents, errReadOnlyManifest
} else if j.wr == nil {
// pass the update to |j.backing| if the journal is not initialized
return j.backing.UpdateGCGen(ctx, lastLock, next, stats, writeHook)
} else if j.contents.lock != lastLock {
return j.contents, nil // |next| is stale
}

View File

@@ -320,6 +320,19 @@ SQL
[[ "$output" =~ "WORKING,testWorking,,,,,false,true" ]] || false
}
@test "system-tables: query dolt_column_diff system table" {
dolt sql -q "CREATE TABLE testStaged (pk INT, c1 INT, PRIMARY KEY(pk))"
dolt add testStaged
dolt sql -q "CREATE TABLE testWorking (pk INT, c1 INT, PRIMARY KEY(pk))"
run dolt sql -r csv -q 'select * from dolt_column_diff'
[ "$status" -eq 0 ]
[[ "$output" =~ "STAGED,testStaged,pk,,,,,added" ]] || false
[[ "$output" =~ "STAGED,testStaged,c1,,,,,added" ]] || false
[[ "$output" =~ "WORKING,testWorking,pk,,,,,added" ]] || false
[[ "$output" =~ "WORKING,testWorking,c1,,,,,added" ]] || false
}
@test "system-tables: query dolt_diff_ system table" {
dolt sql -q "CREATE TABLE test (pk INT, c1 INT, PRIMARY KEY(pk))"
dolt add test

View File

@@ -223,5 +223,11 @@ func (gct gcTest) run(t *testing.T) {
require.NoError(t, eg.Wait())
// Recreate the connection pool here, since idle connections in the
// connection pool may be stale.
db.Close()
db, err = server.DB(driver.Connection{User: "root"})
require.NoError(t, err)
gct.finalize(t, context.Background(), db)
}

View File

@@ -37,10 +37,10 @@ type TestDef struct {
// any Servers defined within them will be started. The interactions and
// assertions defined in Conns will be run.
type Test struct {
Name string `yaml:"name"`
Repos []driver.TestRepo `yaml:"repos"`
MultiRepos []driver.MultiRepo `yaml:"multi_repos"`
Conns []driver.Connection `yaml:"connections"`
Name string `yaml:"name"`
Repos []driver.TestRepo `yaml:"repos"`
MultiRepos []driver.MultiRepo `yaml:"multi_repos"`
Conns []driver.Connection `yaml:"connections"`
// Skip the entire test with this reason.
Skip string `yaml:"skip"`
@@ -74,7 +74,7 @@ func MakeServer(t *testing.T, dc driver.DoltCmdable, s *driver.Server) *driver.S
if s == nil {
return nil
}
opts := []driver.SqlServerOpt{driver.WithArgs(s.Args...)}
opts := []driver.SqlServerOpt{driver.WithArgs(s.Args...), driver.WithEnvs(s.Envs...)}
if s.Port != 0 {
opts = append(opts, driver.WithPort(s.Port))
}
@@ -184,7 +184,7 @@ func (test Test) Run(t *testing.T) {
}()
}
if c.RestartServer != nil {
err := server.Restart(c.RestartServer.Args)
err := server.Restart(c.RestartServer.Args, c.RestartServer.Envs)
require.NoError(t, err)
}
}

View File

@@ -330,3 +330,21 @@ tests:
queries:
- exec: "INSERT INTO t1 VALUES (1, 1),(2, 2)"
error_match: "table not found"
- name: dolt_gc succeeds as first write on existing database without a journal after chunk journal is enabled
multi_repos:
- name: server1
server:
envs: ["DOLT_DISABLE_CHUNK_JOURNAL=true"]
connections:
- on: server1
queries:
- exec: "CREATE DATABASE mydb"
- exec: "USE mydb"
- exec: "CREATE TABLE vals (id int primary key, val int)"
- exec: "INSERT INTO vals VALUES (1, 1),(2, 2)"
restart_server:
envs: []
- on: server1
queries:
- exec: "USE mydb"
- exec: "CALL dolt_gc()"