mirror of
https://github.com/dolthub/dolt.git
synced 2026-01-26 10:37:04 -06:00
/go/{store,performance,libraries,cmd}: fix go tests after branch rename
This commit is contained in:
@@ -25,13 +25,13 @@ import (
|
||||
|
||||
func TestLog(t *testing.T) {
|
||||
dEnv := createUninitializedEnv()
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_7_18, "Bill Billerson", "bigbillieb@fake.horse")
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_7_18, "Bill Billerson", "bigbillieb@fake.horse", "")
|
||||
|
||||
if err != nil {
|
||||
t.Error("Failed to init repo")
|
||||
}
|
||||
|
||||
cs, _ := doltdb.NewCommitSpec("master")
|
||||
cs, _ := doltdb.NewCommitSpec("main")
|
||||
commit, _ := dEnv.DoltDB.Resolve(context.Background(), cs, nil)
|
||||
|
||||
cli.Println(commit)
|
||||
|
||||
@@ -264,13 +264,15 @@ func TestServerSetDefaultBranch(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
sess := conn.NewSession(nil)
|
||||
|
||||
defaultBranch := "main"
|
||||
|
||||
tests := []struct {
|
||||
query *dbr.SelectStmt
|
||||
expectedRes []testBranch
|
||||
}{
|
||||
{
|
||||
query: sess.Select("active_branch() as branch"),
|
||||
expectedRes: []testBranch{{"master"}},
|
||||
expectedRes: []testBranch{{defaultBranch}},
|
||||
},
|
||||
{
|
||||
query: sess.SelectBySql("set GLOBAL dolt_default_branch = 'refs/heads/new'"),
|
||||
@@ -278,14 +280,14 @@ func TestServerSetDefaultBranch(t *testing.T) {
|
||||
},
|
||||
{
|
||||
query: sess.Select("active_branch() as branch"),
|
||||
expectedRes: []testBranch{{"master"}},
|
||||
expectedRes: []testBranch{{defaultBranch}},
|
||||
},
|
||||
{
|
||||
query: sess.Select("dolt_checkout('-b', 'new')"),
|
||||
expectedRes: []testBranch{{""}},
|
||||
},
|
||||
{
|
||||
query: sess.Select("dolt_checkout('master')"),
|
||||
query: sess.Select("dolt_checkout('main')"),
|
||||
expectedRes: []testBranch{{""}},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -32,9 +32,9 @@ import (
|
||||
func TestDocDiff(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ddb, _ := doltdb.LoadDoltDB(ctx, types.Format_Default, doltdb.InMemDoltDB, filesys2.LocalFS)
|
||||
ddb.WriteEmptyRepo(ctx, "master", "billy bob", "bigbillieb@fake.horse")
|
||||
ddb.WriteEmptyRepo(ctx, "main", "billy bob", "bigbillieb@fake.horse")
|
||||
|
||||
cs, _ := doltdb.NewCommitSpec("master")
|
||||
cs, _ := doltdb.NewCommitSpec("main")
|
||||
cm, _ := ddb.Resolve(ctx, cs, nil)
|
||||
|
||||
root, err := cm.GetRootValue()
|
||||
|
||||
@@ -71,7 +71,7 @@ var gcTests = []gcTest{
|
||||
return h
|
||||
},
|
||||
commands: []testCommand{
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
{commands.BranchCmd{}, []string{"-D", "temp"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "INSERT INTO test VALUES (4),(5),(6);"}},
|
||||
},
|
||||
|
||||
@@ -32,7 +32,7 @@ import (
|
||||
func TestAddNewerTextAndValueFromTable(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ddb, _ := doltdb.LoadDoltDB(ctx, types.Format_Default, doltdb.InMemDoltDB, filesys2.LocalFS)
|
||||
ddb.WriteEmptyRepo(ctx, "master", "billy bob", "bigbillieb@fake.horse")
|
||||
ddb.WriteEmptyRepo(ctx, "main", "billy bob", "bigbillieb@fake.horse")
|
||||
|
||||
// If no tbl/schema is provided, doc Text and Value should be nil.
|
||||
doc1 := Doc{DocPk: LicenseDoc}
|
||||
@@ -86,7 +86,7 @@ func TestAddNewerTextAndValueFromTable(t *testing.T) {
|
||||
func TestAddNewerTextAndDocPkFromRow(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
ddb, _ := doltdb.LoadDoltDB(ctx, types.Format_Default, doltdb.InMemDoltDB, filesys2.LocalFS)
|
||||
ddb.WriteEmptyRepo(ctx, "master", "billy bob", "bigbillieb@fake.horse")
|
||||
ddb.WriteEmptyRepo(ctx, "main", "billy bob", "bigbillieb@fake.horse")
|
||||
|
||||
sch := createTestDocsSchema()
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ func CreateTestEnv() *env.DoltEnv {
|
||||
env.UserNameKey: name,
|
||||
env.UserEmailKey: email,
|
||||
})
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email)
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email, "")
|
||||
|
||||
if err != nil {
|
||||
panic("Failed to initialize environment:" + err.Error())
|
||||
|
||||
2
go/libraries/doltcore/env/actions/branch.go
vendored
2
go/libraries/doltcore/env/actions/branch.go
vendored
@@ -28,7 +28,7 @@ import (
|
||||
|
||||
var ErrAlreadyExists = errors.New("already exists")
|
||||
var ErrCOBranchDelete = errors.New("attempted to delete checked out branch")
|
||||
var ErrUnmergedBranchDelete = errors.New("attempted to delete a branch that is not fully merged into master; use `-f` to force")
|
||||
var ErrUnmergedBranchDelete = errors.New("attempted to delete a branch that is not fully merged into main; use `-f` to force")
|
||||
|
||||
func RenameBranch(ctx context.Context, dEnv *env.DoltEnv, oldBranch, newBranch string, force bool) error {
|
||||
oldRef := ref.NewBranchRef(oldBranch)
|
||||
|
||||
@@ -156,7 +156,7 @@ func newQueue() *q {
|
||||
// concurrent commits --- higher commits appear first. Remaining
|
||||
// ties are broken by timestamp; newer commits appear first.
|
||||
//
|
||||
// Roughly mimics `git log master..feature`.
|
||||
// Roughly mimics `git log main..feature`.
|
||||
func GetDotDotRevisions(ctx context.Context, includedDB *doltdb.DoltDB, includedHead hash.Hash, excludedDB *doltdb.DoltDB, excludedHead hash.Hash, num int) ([]*doltdb.Commit, error) {
|
||||
commitList := make([]*doltdb.Commit, 0, num)
|
||||
q := newQueue()
|
||||
|
||||
@@ -48,10 +48,10 @@ func createUninitializedEnv() *env.DoltEnv {
|
||||
|
||||
func TestGetDotDotRevisions(t *testing.T) {
|
||||
env := createUninitializedEnv()
|
||||
err := env.InitRepo(context.Background(), types.Format_LD_1, "Bill Billerson", "bill@billerson.com")
|
||||
err := env.InitRepo(context.Background(), types.Format_LD_1, "Bill Billerson", "bill@billerson.com", "")
|
||||
require.NoError(t, err)
|
||||
|
||||
cs, err := doltdb.NewCommitSpec("master")
|
||||
cs, err := doltdb.NewCommitSpec("main")
|
||||
require.NoError(t, err)
|
||||
commit, err := env.DoltDB.Resolve(context.Background(), cs, nil)
|
||||
require.NoError(t, err)
|
||||
@@ -61,52 +61,52 @@ func TestGetDotDotRevisions(t *testing.T) {
|
||||
rvh, err := env.DoltDB.WriteRootValue(context.Background(), rv)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create 5 commits on master.
|
||||
masterCommits := make([]*doltdb.Commit, 6)
|
||||
masterCommits[0] = commit
|
||||
// Create 5 commits on main.
|
||||
mainCommits := make([]*doltdb.Commit, 6)
|
||||
mainCommits[0] = commit
|
||||
for i := 1; i < 6; i++ {
|
||||
masterCommits[i] = mustCreateCommit(t, env.DoltDB, "master", rvh, masterCommits[i-1])
|
||||
mainCommits[i] = mustCreateCommit(t, env.DoltDB, "main", rvh, mainCommits[i-1])
|
||||
}
|
||||
|
||||
// Create a feature branch.
|
||||
bref := ref.NewBranchRef("feature")
|
||||
err = env.DoltDB.NewBranchAtCommit(context.Background(), bref, masterCommits[5])
|
||||
err = env.DoltDB.NewBranchAtCommit(context.Background(), bref, mainCommits[5])
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create 3 commits on feature branch.
|
||||
featureCommits := []*doltdb.Commit{}
|
||||
featureCommits = append(featureCommits, masterCommits[5])
|
||||
featureCommits = append(featureCommits, mainCommits[5])
|
||||
for i := 1; i < 4; i++ {
|
||||
featureCommits = append(featureCommits, mustCreateCommit(t, env.DoltDB, "feature", rvh, featureCommits[i-1]))
|
||||
}
|
||||
|
||||
// Create 1 commit on master.
|
||||
masterCommits = append(masterCommits, mustCreateCommit(t, env.DoltDB, "master", rvh, masterCommits[5]))
|
||||
// Create 1 commit on main.
|
||||
mainCommits = append(mainCommits, mustCreateCommit(t, env.DoltDB, "main", rvh, mainCommits[5]))
|
||||
|
||||
// Merge master to feature branch.
|
||||
featureCommits = append(featureCommits, mustCreateCommit(t, env.DoltDB, "feature", rvh, featureCommits[3], masterCommits[6]))
|
||||
// Merge main to feature branch.
|
||||
featureCommits = append(featureCommits, mustCreateCommit(t, env.DoltDB, "feature", rvh, featureCommits[3], mainCommits[6]))
|
||||
|
||||
// Create 3 commits on feature branch.
|
||||
for i := 5; i < 8; i++ {
|
||||
featureCommits = append(featureCommits, mustCreateCommit(t, env.DoltDB, "feature", rvh, featureCommits[i-1]))
|
||||
}
|
||||
|
||||
// Create 3 commits on master.
|
||||
// Create 3 commits on main.
|
||||
for i := 7; i < 10; i++ {
|
||||
masterCommits = append(masterCommits, mustCreateCommit(t, env.DoltDB, "master", rvh, masterCommits[i-1]))
|
||||
mainCommits = append(mainCommits, mustCreateCommit(t, env.DoltDB, "main", rvh, mainCommits[i-1]))
|
||||
}
|
||||
|
||||
// Branches look like this:
|
||||
//
|
||||
// feature: *--*--*--*--*--*--*
|
||||
// / /
|
||||
// master: --*--*--*--*--*--------*--*--*--*
|
||||
// main: --*--*--*--*--*--------*--*--*--*
|
||||
|
||||
featureHash := mustGetHash(t, featureCommits[7])
|
||||
masterHash := mustGetHash(t, masterCommits[6])
|
||||
mainHash := mustGetHash(t, mainCommits[6])
|
||||
featurePreMergeHash := mustGetHash(t, featureCommits[3])
|
||||
|
||||
res, err := GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, masterHash, 100)
|
||||
res, err := GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, mainHash, 100)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 7)
|
||||
assertEqualHashes(t, featureCommits[7], res[0])
|
||||
@@ -117,25 +117,25 @@ func TestGetDotDotRevisions(t *testing.T) {
|
||||
assertEqualHashes(t, featureCommits[2], res[5])
|
||||
assertEqualHashes(t, featureCommits[1], res[6])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, masterHash, env.DoltDB, featureHash, 100)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, mainHash, env.DoltDB, featureHash, 100)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 0)
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[7], res[0])
|
||||
assertEqualHashes(t, featureCommits[6], res[1])
|
||||
assertEqualHashes(t, featureCommits[5], res[2])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[3], res[0])
|
||||
assertEqualHashes(t, featureCommits[2], res[1])
|
||||
assertEqualHashes(t, featureCommits[1], res[2])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[3], res[0])
|
||||
@@ -151,12 +151,12 @@ func TestGetDotDotRevisions(t *testing.T) {
|
||||
}
|
||||
|
||||
featureHash = mustGetHash(t, featureCommits[7])
|
||||
masterHash = mustGetHash(t, masterCommits[6])
|
||||
mainHash = mustGetHash(t, mainCommits[6])
|
||||
featurePreMergeHash = mustGetHash(t, featureCommits[3])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, masterHash, 100)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featureHash, env.DoltDB, mainHash, 100)
|
||||
require.Error(t, err)
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featureHash, env.DoltDB, masterHash, 100)
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featureHash, env.DoltDB, mainHash, 100)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 7)
|
||||
assertEqualHashes(t, featureCommits[7], res[0])
|
||||
@@ -167,27 +167,27 @@ func TestGetDotDotRevisions(t *testing.T) {
|
||||
assertEqualHashes(t, featureCommits[2], res[5])
|
||||
assertEqualHashes(t, featureCommits[1], res[6])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, masterHash, env.DoltDB, featureHash, 100)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, mainHash, env.DoltDB, featureHash, 100)
|
||||
require.Error(t, err)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, masterHash, forkEnv.DoltDB, featureHash, 100)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, mainHash, forkEnv.DoltDB, featureHash, 100)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 0)
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featureHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featureHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[7], res[0])
|
||||
assertEqualHashes(t, featureCommits[6], res[1])
|
||||
assertEqualHashes(t, featureCommits[5], res[2])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), env.DoltDB, featurePreMergeHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[3], res[0])
|
||||
assertEqualHashes(t, featureCommits[2], res[1])
|
||||
assertEqualHashes(t, featureCommits[1], res[2])
|
||||
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featurePreMergeHash, env.DoltDB, masterHash, 3)
|
||||
res, err = GetDotDotRevisions(context.Background(), forkEnv.DoltDB, featurePreMergeHash, env.DoltDB, mainHash, 3)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, res, 3)
|
||||
assertEqualHashes(t, featureCommits[3], res[0])
|
||||
@@ -218,7 +218,7 @@ func mustForkDB(t *testing.T, fromDB *doltdb.DoltDB, bn string, cm *doltdb.Commi
|
||||
stref, err := cm.GetStRef()
|
||||
require.NoError(t, err)
|
||||
forkEnv := createUninitializedEnv()
|
||||
err = forkEnv.InitRepo(context.Background(), types.Format_LD_1, "Bill Billerson", "bill@billerson.com")
|
||||
err = forkEnv.InitRepo(context.Background(), types.Format_LD_1, "Bill Billerson", "bill@billerson.com", "")
|
||||
require.NoError(t, err)
|
||||
p1 := make(chan datas.PullProgress)
|
||||
p2 := make(chan datas.PullerEvent)
|
||||
|
||||
@@ -23,7 +23,7 @@ import (
|
||||
"github.com/dolthub/dolt/go/libraries/doltcore/ref"
|
||||
)
|
||||
|
||||
var ErrUnmergedWorkspaceDelete = errors.New("attempted to delete a workspace that is not fully merged into master; use `-f` to force")
|
||||
var ErrUnmergedWorkspaceDelete = errors.New("attempted to delete a workspace that is not fully merged into main; use `-f` to force")
|
||||
var ErrCOWorkspaceDelete = errors.New("attempted to delete checked out workspace")
|
||||
var ErrBranchNameExists = errors.New("workspace name must not be existing branch name")
|
||||
|
||||
@@ -110,7 +110,7 @@ func DeleteWorkspaceOnDB(ctx context.Context, dEnv *env.DoltEnv, dref ref.DoltRe
|
||||
return err
|
||||
}
|
||||
|
||||
master, err := ddb.Resolve(ctx, ms, nil)
|
||||
m, err := ddb.Resolve(ctx, ms, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -125,7 +125,7 @@ func DeleteWorkspaceOnDB(ctx context.Context, dEnv *env.DoltEnv, dref ref.DoltRe
|
||||
return err
|
||||
}
|
||||
|
||||
isMerged, _ := master.CanFastReverseTo(ctx, cm)
|
||||
isMerged, _ := m.CanFastReverseTo(ctx, cm)
|
||||
if err != nil && err != doltdb.ErrUpToDate {
|
||||
return err
|
||||
}
|
||||
|
||||
4
go/libraries/doltcore/env/environment.go
vendored
4
go/libraries/doltcore/env/environment.go
vendored
@@ -432,7 +432,7 @@ func (dEnv *DoltEnv) configureRepo(doltDir string) error {
|
||||
}
|
||||
|
||||
// Inits the dolt DB of this environment with an empty commit at the time given and writes default docs to disk.
|
||||
// Writes new repo state with a master branch and current root hash.
|
||||
// Writes new repo state with a main branch and current root hash.
|
||||
func (dEnv *DoltEnv) InitDBAndRepoState(ctx context.Context, nbf *types.NomsBinFormat, name, email, branchName string, t time.Time) error {
|
||||
err := dEnv.InitDBWithTime(ctx, nbf, name, email, branchName, t)
|
||||
if err != nil {
|
||||
@@ -464,7 +464,7 @@ func (dEnv *DoltEnv) InitDBWithTime(ctx context.Context, nbf *types.NomsBinForma
|
||||
return nil
|
||||
}
|
||||
|
||||
// InitializeRepoState writes a default repo state to disk, consisting of a master branch and current root hash value.
|
||||
// InitializeRepoState writes a default repo state to disk, consisting of a main branch and current root hash value.
|
||||
func (dEnv *DoltEnv) InitializeRepoState(ctx context.Context, branchName string) error {
|
||||
initBranch := GetDefaultInitBranch(dEnv.Config)
|
||||
if branchName != "" {
|
||||
|
||||
@@ -53,8 +53,8 @@ func createTestEnv(isInitialized bool, hasLocalConfig bool) (*DoltEnv, *filesys.
|
||||
initialDirs = append(initialDirs, doltDir)
|
||||
initialDirs = append(initialDirs, doltDataDir)
|
||||
|
||||
masterRef := ref.NewBranchRef("master")
|
||||
repoState := &RepoState{Head: ref.MarshalableRef{Ref: masterRef}}
|
||||
mainRef := ref.NewBranchRef("main")
|
||||
repoState := &RepoState{Head: ref.MarshalableRef{Ref: mainRef}}
|
||||
repoStateData, err := json.Marshal(repoState)
|
||||
|
||||
if err != nil {
|
||||
@@ -150,7 +150,7 @@ func TestRepoDirNoLocal(t *testing.T) {
|
||||
|
||||
func TestInitRepo(t *testing.T) {
|
||||
dEnv, _ := createTestEnv(false, false)
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, "aoeu aoeu", "aoeu@aoeu.org")
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, "aoeu aoeu", "aoeu@aoeu.org", "")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = dEnv.WorkingRoot(context.Background())
|
||||
@@ -182,7 +182,7 @@ func TestMigrateWorkingSet(t *testing.T) {
|
||||
dEnv := createFileTestEnv(t, working, homeDir)
|
||||
assert.NoError(t, dEnv.CfgLoadErr)
|
||||
|
||||
err = dEnv.InitRepo(context.Background(), types.Format_Default, "aoeu aoeu", "aoeu@aoeu.org")
|
||||
err = dEnv.InitRepo(context.Background(), types.Format_Default, "aoeu aoeu", "aoeu@aoeu.org", "")
|
||||
require.NoError(t, err)
|
||||
|
||||
ws, err := dEnv.WorkingSet(context.Background())
|
||||
|
||||
@@ -89,7 +89,7 @@ func initRepoWithRelativePath(t *testing.T, envPath string, hdp HomeDirProvider)
|
||||
UserEmailKey: email,
|
||||
})
|
||||
|
||||
err = dEnv.InitRepo(context.Background(), types.Format_Default, name, email)
|
||||
err = dEnv.InitRepo(context.Background(), types.Format_Default, name, email, "")
|
||||
require.NoError(t, err)
|
||||
|
||||
return Load(context.Background(), hdp, fs, urlStr, "test")
|
||||
|
||||
2
go/libraries/doltcore/env/remotes.go
vendored
2
go/libraries/doltcore/env/remotes.go
vendored
@@ -312,7 +312,7 @@ func parseRSFromArgs(remName string, args []string) ([]ref.RemoteRefSpec, error)
|
||||
}
|
||||
|
||||
// if possible, convert refs to full spec names. prefer branches over tags.
|
||||
// eg "master" -> "refs/heads/master", "v1" -> "refs/tags/v1"
|
||||
// eg "main" -> "refs/heads/main", "v1" -> "refs/tags/v1"
|
||||
func disambiguateRefSpecStr(ctx context.Context, ddb *doltdb.DoltDB, refSpecStr string) (string, error) {
|
||||
brachRefs, err := ddb.GetBranches(ctx)
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ type HistoryNode struct {
|
||||
// InitializeWithHistory will go through the provided historyNodes and create the intended commit graph
|
||||
func InitializeWithHistory(t *testing.T, ctx context.Context, dEnv *env.DoltEnv, historyNodes ...HistoryNode) {
|
||||
for _, node := range historyNodes {
|
||||
cs, err := doltdb.NewCommitSpec("master")
|
||||
cs, err := doltdb.NewCommitSpec(env.DefaultInitBranch)
|
||||
require.NoError(t, err)
|
||||
|
||||
cm, err := dEnv.DoltDB.Resolve(ctx, cs, nil)
|
||||
|
||||
@@ -273,7 +273,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
},
|
||||
// https://github.com/dolthub/dolt/issues/773
|
||||
/*{
|
||||
Name: "create new column on master, insert to table on other branch, merge",
|
||||
Name: "create new column on main, insert to table on other branch, merge",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
@@ -284,7 +284,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Checkout{BranchName: "newBranch"},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Merge{BranchName: "newBranch"},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
},
|
||||
@@ -298,7 +298,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "create new column on master; insert, update, delete on both branches; merge",
|
||||
Name: "create new column on main; insert, update, delete on both branches; merge",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values
|
||||
@@ -318,7 +318,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: `delete from people where id=8;`},
|
||||
tc.Query{Query: `update people set age=40 where id=9;`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Merge{BranchName: "newBranch"},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
},
|
||||
@@ -334,7 +334,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "create new column on other branch, merge into master",
|
||||
Name: "create new column on other branch, merge into main",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
@@ -343,7 +343,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: `insert into people (id, name, age) values (9, "Jacqueline Bouvier", 80);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Merge{BranchName: "newBranch"},
|
||||
@@ -370,7 +370,7 @@ var RebaseTagTests = []RebaseTagTest{
|
||||
tc.Checkout{BranchName: "newBranch"},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (11, "Selma Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (10, "Patty Bouvier", 40, 8.5);`},
|
||||
tc.CommitAll{Message: "made changes"},
|
||||
tc.Merge{BranchName: "newBranch"},
|
||||
@@ -430,7 +430,7 @@ func testRebaseTag(t *testing.T, test RebaseTagTest) {
|
||||
require.NoError(t, setupErr)
|
||||
}
|
||||
|
||||
bs, _ := dEnv.DoltDB.GetBranches(context.Background()) // master
|
||||
bs, _ := dEnv.DoltDB.GetBranches(context.Background()) // main
|
||||
rebasedCommit, err := rebase.TagRebaseForRef(context.Background(), bs[0], dEnv.DoltDB, rebase.TagMapping{"people": map[uint64]uint64{test.OldTag: test.NewTag}})
|
||||
|
||||
if test.ExpectedErrStr != "" {
|
||||
@@ -440,10 +440,10 @@ func testRebaseTag(t *testing.T, test RebaseTagTest) {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, rebasedCommit)
|
||||
|
||||
mcs, _ := doltdb.NewCommitSpec("master")
|
||||
masterCm, _ := dEnv.DoltDB.Resolve(context.Background(), mcs, nil)
|
||||
mcs, _ := doltdb.NewCommitSpec("main")
|
||||
mainCm, _ := dEnv.DoltDB.Resolve(context.Background(), mcs, nil)
|
||||
rch, _ := rebasedCommit.HashOf()
|
||||
mch, _ := masterCm.HashOf()
|
||||
mch, _ := mainCm.HashOf()
|
||||
require.Equal(t, rch, mch)
|
||||
|
||||
rebasedRoot, _ := rebasedCommit.GetRootValue()
|
||||
@@ -457,10 +457,10 @@ func testRebaseTagHistory(t *testing.T) {
|
||||
tc.Query{Query: createPeopleTable},
|
||||
tc.Query{Query: `insert into people (id, name, age) values
|
||||
(7, "Maggie Simpson", 1);`},
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (newMaster, oldMaster) and (newMaster, other)
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (newMain, oldMain) and (newMain, other)
|
||||
|
||||
tc.Query{Query: `alter table people add drip double;`},
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (oldMaster, other)
|
||||
tc.CommitAll{Message: "made changes"}, // common ancestor of (oldMain, other)
|
||||
|
||||
tc.Branch{BranchName: "other"},
|
||||
tc.Query{Query: `insert into people (id, name, age, drip) values (10, "Patty Bouvier", 40, 8.5);`},
|
||||
@@ -473,17 +473,17 @@ func testRebaseTagHistory(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
mcs, _ := doltdb.NewCommitSpec("master")
|
||||
oldMasterCm, _ := dEnv.DoltDB.Resolve(context.Background(), mcs, nil)
|
||||
mcs, _ := doltdb.NewCommitSpec("main")
|
||||
oldMainCm, _ := dEnv.DoltDB.Resolve(context.Background(), mcs, nil)
|
||||
ocs, _ := doltdb.NewCommitSpec("other")
|
||||
otherCm, _ := dEnv.DoltDB.Resolve(context.Background(), ocs, nil)
|
||||
|
||||
bs, _ := dEnv.DoltDB.GetBranches(context.Background()) // master
|
||||
newMasterCm, err := rebase.TagRebaseForRef(context.Background(), bs[0], dEnv.DoltDB, rebase.TagMapping{"people": map[uint64]uint64{DripTag: DripTagRebased}})
|
||||
bs, _ := dEnv.DoltDB.GetBranches(context.Background()) // main
|
||||
newMainCm, err := rebase.TagRebaseForRef(context.Background(), bs[0], dEnv.DoltDB, rebase.TagMapping{"people": map[uint64]uint64{DripTag: DripTagRebased}})
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedSch := schema.MustSchemaFromCols(peopleWithDrip)
|
||||
rebasedRoot, _ := newMasterCm.GetRootValue()
|
||||
rebasedRoot, _ := newMainCm.GetRootValue()
|
||||
checkSchema(t, rebasedRoot, "people", expectedSch)
|
||||
checkRows(t, dEnv, rebasedRoot, "people", expectedSch, "select * from people;", []row.Row{
|
||||
newRow(row.TaggedValues{IdTag: types.Int(7), NameTag: types.String("Maggie Simpson"), AgeTag: types.Int(1)}, people),
|
||||
@@ -491,15 +491,15 @@ func testRebaseTagHistory(t *testing.T) {
|
||||
})
|
||||
|
||||
// assert that histories have been forked
|
||||
anc1, err := doltdb.GetCommitAncestor(context.Background(), oldMasterCm, otherCm)
|
||||
anc1, err := doltdb.GetCommitAncestor(context.Background(), oldMainCm, otherCm)
|
||||
require.NoError(t, err)
|
||||
ancHash1, _ := anc1.HashOf()
|
||||
|
||||
anc2, err := doltdb.GetCommitAncestor(context.Background(), newMasterCm, oldMasterCm)
|
||||
anc2, err := doltdb.GetCommitAncestor(context.Background(), newMainCm, oldMainCm)
|
||||
require.NoError(t, err)
|
||||
ancHash2, _ := anc2.HashOf()
|
||||
|
||||
anc3, err := doltdb.GetCommitAncestor(context.Background(), newMasterCm, otherCm)
|
||||
anc3, err := doltdb.GetCommitAncestor(context.Background(), newMainCm, otherCm)
|
||||
require.NoError(t, err)
|
||||
ancHash3, _ := anc3.HashOf()
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Query{Query: testableDef},
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
@@ -76,7 +76,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
@@ -92,7 +92,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
@@ -112,7 +112,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
tc.CommitAll{Message: "dropped column c0"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
@@ -130,7 +130,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.CommitAll{Message: "created table testable"},
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
@@ -147,7 +147,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c0 int;")},
|
||||
tc.Query{Query: "alter table testable drop column c0"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
)),
|
||||
@@ -167,7 +167,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.CommitStaged{Message: "adding staged column c0"},
|
||||
tc.ResetHard{},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
@@ -178,7 +178,7 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
)),
|
||||
},
|
||||
{
|
||||
Name: "super schema on branch master",
|
||||
Name: "super schema on branch main",
|
||||
TableName: "testable",
|
||||
Commands: []tc.Command{
|
||||
tc.Query{Query: testableDef},
|
||||
@@ -188,10 +188,10 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
@@ -214,9 +214,9 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.CommitAll{Message: "added column c1 on branch main"},
|
||||
tc.Checkout{BranchName: "other"},
|
||||
},
|
||||
ExpectedBranch: "other",
|
||||
@@ -243,12 +243,12 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Checkout{BranchName: "other"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c11 int;")},
|
||||
tc.CommitAll{Message: "added column c11 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.CommitAll{Message: "added column c1 on branch main"},
|
||||
tc.Merge{BranchName: "other"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
@@ -276,13 +276,13 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.CommitAll{Message: "added columns c11 and c12 on branch other"},
|
||||
tc.Query{Query: "alter table testable drop column c12;"},
|
||||
tc.CommitAll{Message: "dropped column c12 on branch other"},
|
||||
tc.Checkout{BranchName: "master"},
|
||||
tc.Checkout{BranchName: "main"},
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.CommitAll{Message: "added column c1 on branch master"},
|
||||
tc.CommitAll{Message: "added column c1 on branch main"},
|
||||
tc.Merge{BranchName: "other"},
|
||||
tc.CommitAll{Message: "Merged other into master"},
|
||||
tc.CommitAll{Message: "Merged other into main"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
@@ -308,9 +308,9 @@ var SuperSchemaTests = []SuperSchemaTest{
|
||||
tc.Query{Query: fmt.Sprintf("alter table testable add column c1 int;")},
|
||||
tc.Query{Query: "create table qux (pk int not null primary key);"},
|
||||
tc.Query{Query: "drop table foo;"},
|
||||
tc.CommitAll{Message: "added column c1 on branch master, created table qux, dropped table foo"},
|
||||
tc.CommitAll{Message: "added column c1 on branch main, created table qux, dropped table foo"},
|
||||
},
|
||||
ExpectedBranch: "master",
|
||||
ExpectedBranch: "main",
|
||||
ExpectedSchema: schema.MustSchemaFromCols(columnCollection(
|
||||
newColTypeInfo("pk", pkTag, typeinfo.Int32Type, true, schema.NotNullConstraint{}),
|
||||
newColTypeInfo("c0", c0Tag, typeinfo.Int32Type, false),
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestMerge(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, args{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
},
|
||||
query: "SELECT * FROM test",
|
||||
@@ -67,11 +67,11 @@ func TestMerge(t *testing.T) {
|
||||
setup: []testCommand{
|
||||
{cmd.BranchCmd{}, args{"other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (11,11),(22,22);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on main"}},
|
||||
{cmd.CheckoutCmd{}, args{"other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
},
|
||||
query: "SELECT * FROM test",
|
||||
@@ -88,12 +88,12 @@ func TestMerge(t *testing.T) {
|
||||
{cmd.BranchCmd{}, args{"other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk varchar(120) primary key);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES ('a'),('b'),('c');"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on main"}},
|
||||
{cmd.CheckoutCmd{}, args{"other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk varchar(120) primary key);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES ('x'),('y'),('z');"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
},
|
||||
query: "SELECT * FROM quiz ORDER BY pk",
|
||||
@@ -153,9 +153,9 @@ func TestMergeConflicts(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, args{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,11),(2,22);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
},
|
||||
query: "SELECT * FROM dolt_conflicts",
|
||||
@@ -169,9 +169,9 @@ func TestMergeConflicts(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, args{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (1,11),(2,22);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
{cnfcmds.ResolveCmd{}, args{"--ours", "test"}},
|
||||
},
|
||||
@@ -188,10 +188,10 @@ func TestMergeConflicts(t *testing.T) {
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk int PRIMARY KEY, c0 int);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk int PRIMARY KEY, c0 int);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES (1,11),(2,22);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
},
|
||||
query: "SELECT * FROM dolt_conflicts",
|
||||
@@ -206,10 +206,10 @@ func TestMergeConflicts(t *testing.T) {
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk int PRIMARY KEY, c0 int);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES (1,1),(2,2);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "CREATE TABLE quiz (pk int PRIMARY KEY, c0 int);"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO quiz VALUES (1,11),(2,22);"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-am", "added the same rows on main"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
{cnfcmds.ResolveCmd{}, args{"--theirs", "quiz"}},
|
||||
},
|
||||
|
||||
@@ -56,7 +56,7 @@ func TestKeylessMerge(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (3,4);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
expected: mustTupleSet(
|
||||
@@ -72,9 +72,9 @@ func TestKeylessMerge(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (3,4);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (5,6);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on master"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
expected: mustTupleSet(
|
||||
@@ -91,9 +91,9 @@ func TestKeylessMerge(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (3,4), (3,4);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (5,6), (5,6);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on master"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
expected: mustTupleSet(
|
||||
@@ -154,9 +154,9 @@ func TestKeylessMergeConflicts(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (3,4);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "insert into noKey values (3,4);"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on master"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "added rows on main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
conflicts: mustTupleSet(
|
||||
@@ -183,9 +183,9 @@ func TestKeylessMergeConflicts(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "delete from noKey where (c1,c2) = (1,2) limit 1;"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 1 row on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "delete from noKey where (c1,c2) = (1,2) limit 2;"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 2 rows on master"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 2 rows on main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
conflicts: mustTupleSet(
|
||||
@@ -210,9 +210,9 @@ func TestKeylessMergeConflicts(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, []string{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "update noKey set c2 = 9 limit 1;"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 1 row on other"}},
|
||||
{cmd.CheckoutCmd{}, []string{"master"}},
|
||||
{cmd.CheckoutCmd{}, []string{"main"}},
|
||||
{cmd.SqlCmd{}, []string{"-q", "update noKey set c2 = 9 limit 2;"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 2 rows on master"}},
|
||||
{cmd.CommitCmd{}, []string{"-am", "deleted 2 rows on main"}},
|
||||
{cmd.MergeCmd{}, []string{"other"}},
|
||||
},
|
||||
conflicts: mustTupleSet(
|
||||
|
||||
@@ -267,11 +267,11 @@ func setupMergeTest(t *testing.T) (types.ValueReadWriter, *doltdb.Commit, *doltd
|
||||
ddb, _ := doltdb.LoadDoltDB(context.Background(), types.Format_Default, doltdb.InMemDoltDB, filesys2.LocalFS)
|
||||
vrw := ddb.ValueReadWriter()
|
||||
|
||||
err := ddb.WriteEmptyRepo(context.Background(), "master", name, email)
|
||||
err := ddb.WriteEmptyRepo(context.Background(), "main", name, email)
|
||||
require.NoError(t, err)
|
||||
|
||||
masterHeadSpec, _ := doltdb.NewCommitSpec("master")
|
||||
masterHead, err := ddb.Resolve(context.Background(), masterHeadSpec, nil)
|
||||
mainHeadSpec, _ := doltdb.NewCommitSpec("main")
|
||||
mainHead, err := ddb.Resolve(context.Background(), mainHeadSpec, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
initialRows, err := types.NewMap(context.Background(), vrw,
|
||||
@@ -367,7 +367,7 @@ func setupMergeTest(t *testing.T) (types.ValueReadWriter, *doltdb.Commit, *doltd
|
||||
mergeTbl, err = editor.RebuildAllIndexes(context.Background(), mergeTbl, editor.TestEditorOptions(vrw))
|
||||
require.NoError(t, err)
|
||||
|
||||
mRoot, err := masterHead.GetRootValue()
|
||||
mRoot, err := mainHead.GetRootValue()
|
||||
require.NoError(t, err)
|
||||
|
||||
mRoot, err = mRoot.PutTable(context.Background(), tableName, tbl)
|
||||
@@ -379,7 +379,7 @@ func setupMergeTest(t *testing.T) (types.ValueReadWriter, *doltdb.Commit, *doltd
|
||||
mergeRoot, err := mRoot.PutTable(context.Background(), tableName, mergeTbl)
|
||||
require.NoError(t, err)
|
||||
|
||||
masterHash, err := ddb.WriteRootValue(context.Background(), mRoot)
|
||||
mainHash, err := ddb.WriteRootValue(context.Background(), mRoot)
|
||||
require.NoError(t, err)
|
||||
hash, err := ddb.WriteRootValue(context.Background(), updatedRoot)
|
||||
require.NoError(t, err)
|
||||
@@ -388,9 +388,9 @@ func setupMergeTest(t *testing.T) (types.ValueReadWriter, *doltdb.Commit, *doltd
|
||||
|
||||
meta, err := doltdb.NewCommitMeta(name, email, "fake")
|
||||
require.NoError(t, err)
|
||||
initialCommit, err := ddb.Commit(context.Background(), masterHash, ref.NewBranchRef("master"), meta)
|
||||
initialCommit, err := ddb.Commit(context.Background(), mainHash, ref.NewBranchRef("main"), meta)
|
||||
require.NoError(t, err)
|
||||
commit, err := ddb.Commit(context.Background(), hash, ref.NewBranchRef("master"), meta)
|
||||
commit, err := ddb.Commit(context.Background(), hash, ref.NewBranchRef("main"), meta)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = ddb.NewBranchAtCommit(context.Background(), ref.NewBranchRef("to-merge"), initialCommit)
|
||||
|
||||
@@ -114,13 +114,13 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop column c2;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c8 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop column c3;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c9 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -136,12 +136,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test modify c1 int null;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test modify c2 int not null;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -157,12 +157,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c3_idx on test(c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop index c1_idx;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -178,12 +178,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c3 to c33;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c2 to c22;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -200,7 +200,7 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop index c1_idx;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c1_index on test(c1);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -216,12 +216,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c4 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -238,12 +238,12 @@ var mergeSchemaTests = []mergeSchemaTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c3_idx on test(c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c3_idx on test(c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
sch: schemaFromColsAndIdxs(
|
||||
colCollection(
|
||||
@@ -270,13 +270,13 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c3 to c4;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column C6 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c2 to c4;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 int;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
expConflict: merge.SchemaConflict{
|
||||
TableName: "test",
|
||||
@@ -299,12 +299,12 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create index both on test (c1,c2);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create index both on test (c2, c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
expConflict: merge.SchemaConflict{
|
||||
TableName: "test",
|
||||
@@ -323,14 +323,14 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c40 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 bigint;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c40 int;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test rename column c40 to c44;"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test add column c6 tinyint;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
expConflict: merge.SchemaConflict{
|
||||
TableName: "test",
|
||||
@@ -353,12 +353,12 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
setup: []testCommand{
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c3_idx on test(c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
{commands.CheckoutCmd{}, []string{"other"}},
|
||||
{commands.SqlCmd{}, []string{"-q", "create index c3_index on test(c3);"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
expConflict: merge.SchemaConflict{
|
||||
TableName: "test",
|
||||
@@ -378,7 +378,7 @@ var mergeSchemaConflictTests = []mergeSchemaConflictTest{
|
||||
{commands.SqlCmd{}, []string{"-q", "alter table test drop primary key;"}},
|
||||
{commands.AddCmd{}, []string{"."}},
|
||||
{commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
{commands.CheckoutCmd{}, []string{"master"}},
|
||||
{commands.CheckoutCmd{}, []string{"main"}},
|
||||
},
|
||||
expectedErr: merge.ErrMergeWithDifferentPkSets,
|
||||
},
|
||||
@@ -421,12 +421,12 @@ var mergeForeignKeyTests = []mergeForeignKeyTest{
|
||||
// setup: []testCommand{
|
||||
// {commands.SqlCmd{}, []string{"-q", "alter table quiz add constraint q2_fk foreign key (q2) references test(t2);"}},
|
||||
// {commands.AddCmd{}, []string{"."}},
|
||||
// {commands.CommitCmd{}, []string{"-m", "modified branch master"}},
|
||||
// {commands.CommitCmd{}, []string{"-m", "modified branch main"}},
|
||||
// {commands.CheckoutCmd{}, []string{"other"}},
|
||||
// {commands.SqlCmd{}, []string{"-q", "alter table quiz drop constraint q1_fk;"}},
|
||||
// {commands.AddCmd{}, []string{"."}},
|
||||
// {commands.CommitCmd{}, []string{"-m", "modified branch other"}},
|
||||
// {commands.CheckoutCmd{}, []string{"master"}},
|
||||
// {commands.CheckoutCmd{}, []string{"main"}},
|
||||
// },
|
||||
// fkColl: fkCollection(
|
||||
// &doltdb.ForeignKey{
|
||||
@@ -478,8 +478,8 @@ func testMergeSchemas(t *testing.T, test mergeSchemaTest) {
|
||||
c.exec(t, ctx, dEnv)
|
||||
}
|
||||
|
||||
// assert that we're on master
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"master"}, dEnv)
|
||||
// assert that we're on main
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"main"}, dEnv)
|
||||
require.Equal(t, 0, exitCode)
|
||||
|
||||
// merge branches
|
||||
@@ -523,18 +523,18 @@ func testMergeSchemasWithConflicts(t *testing.T, test mergeSchemaConflictTest) {
|
||||
c.exec(t, ctx, dEnv)
|
||||
}
|
||||
|
||||
// assert that we're on master
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"master"}, dEnv)
|
||||
// assert that we're on main
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"main"}, dEnv)
|
||||
require.Equal(t, 0, exitCode)
|
||||
|
||||
masterSch := getSchema(t, dEnv)
|
||||
mainSch := getSchema(t, dEnv)
|
||||
|
||||
exitCode = commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"other"}, dEnv)
|
||||
require.Equal(t, 0, exitCode)
|
||||
|
||||
otherSch := getSchema(t, dEnv)
|
||||
|
||||
_, actConflicts, err := merge.SchemaMerge(masterSch, otherSch, ancSch, "test")
|
||||
_, actConflicts, err := merge.SchemaMerge(mainSch, otherSch, ancSch, "test")
|
||||
if test.expectedErr != nil {
|
||||
assert.True(t, errors.Is(err, test.expectedErr))
|
||||
return
|
||||
@@ -572,11 +572,11 @@ func testMergeForeignKeys(t *testing.T, test mergeForeignKeyTest) {
|
||||
c.exec(t, ctx, dEnv)
|
||||
}
|
||||
|
||||
// assert that we're on master
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"master"}, dEnv)
|
||||
// assert that we're on main
|
||||
exitCode := commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"main"}, dEnv)
|
||||
require.Equal(t, 0, exitCode)
|
||||
|
||||
masterRoot, err := dEnv.WorkingRoot(ctx)
|
||||
mainRoot, err := dEnv.WorkingRoot(ctx)
|
||||
require.NoError(t, err)
|
||||
|
||||
exitCode = commands.CheckoutCmd{}.Exec(ctx, "checkout", []string{"other"}, dEnv)
|
||||
@@ -586,7 +586,7 @@ func testMergeForeignKeys(t *testing.T, test mergeForeignKeyTest) {
|
||||
require.NoError(t, err)
|
||||
|
||||
opts := editor.TestEditorOptions(dEnv.DoltDB.ValueReadWriter())
|
||||
mergedRoot, _, err := merge.MergeRoots(ctx, masterRoot, otherRoot, ancRoot, opts)
|
||||
mergedRoot, _, err := merge.MergeRoots(ctx, mainRoot, otherRoot, ancRoot, opts)
|
||||
assert.NoError(t, err)
|
||||
|
||||
fkc, err := mergedRoot.GetForeignKeyCollection(ctx)
|
||||
|
||||
@@ -120,10 +120,10 @@ func filterBranchTests() []filterBranchTest {
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (4,4),(5,5),(6,6);"}},
|
||||
{cmd.AddCmd{}, args{"-A"}},
|
||||
{cmd.CommitCmd{}, args{"-m", "added more rows on other"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", "INSERT INTO test VALUES (7,7),(8,8),(9,9);"}},
|
||||
{cmd.AddCmd{}, args{"-A"}},
|
||||
{cmd.CommitCmd{}, args{"-m", "added more rows on master"}},
|
||||
{cmd.CommitCmd{}, args{"-m", "added more rows on main"}},
|
||||
{cmd.FilterBranchCmd{}, args{"--all", "DELETE FROM test WHERE pk > 4;"}},
|
||||
},
|
||||
asserts: []testAssertion{
|
||||
|
||||
@@ -33,7 +33,7 @@ func (br BranchRef) GetPath() string {
|
||||
return br.branch
|
||||
}
|
||||
|
||||
// String returns the fully qualified reference name e.g. refs/heads/master
|
||||
// String returns the fully qualified reference name e.g. refs/heads/main
|
||||
func (br BranchRef) String() string {
|
||||
return String(br)
|
||||
}
|
||||
@@ -42,7 +42,7 @@ func (br BranchRef) MarshalJSON() ([]byte, error) {
|
||||
return MarshalJSON(br)
|
||||
}
|
||||
|
||||
// NewBranchRef creates a reference to a local branch from a branch name or a branch ref e.g. master, or refs/heads/master
|
||||
// NewBranchRef creates a reference to a local branch from a branch name or a branch ref e.g. main, or refs/heads/main
|
||||
func NewBranchRef(branchName string) BranchRef {
|
||||
if IsRef(branchName) {
|
||||
prefix := PrefixForType(BranchRefType)
|
||||
|
||||
@@ -25,7 +25,7 @@ import (
|
||||
var ErrInvalidRefSpec = errors.New("invalid ref spec")
|
||||
|
||||
// ErrInvalidMapping is the error returned when a refspec tries to do an invalid mapping, such as mapping
|
||||
// refs/heads/master to refs/remotes/origin/*
|
||||
// refs/heads/main to refs/remotes/origin/*
|
||||
var ErrInvalidMapping = errors.New("invalid ref spec mapping")
|
||||
|
||||
// ErrUnsupportedMapping is returned when trying to do anything other than map local branches (refs/heads/*) to
|
||||
|
||||
@@ -21,10 +21,10 @@ import (
|
||||
)
|
||||
|
||||
func TestStringPattern(t *testing.T) {
|
||||
sp := strPattern("refs/heads/master")
|
||||
sp := strPattern("refs/heads/main")
|
||||
|
||||
captured, matchesMaster := sp.matches("refs/heads/master")
|
||||
assert.True(t, matchesMaster, "should match master branch ref")
|
||||
captured, matchesMain := sp.matches("refs/heads/main")
|
||||
assert.True(t, matchesMain, "should match main branch ref")
|
||||
assert.True(t, captured == "", "nothing to capture")
|
||||
|
||||
captured, matchesFeature := sp.matches("refs/heads/feature")
|
||||
@@ -46,7 +46,7 @@ func TestWildcardPattern(t *testing.T) {
|
||||
{
|
||||
"refs/heads/*",
|
||||
[]patternTest{
|
||||
{"refs/heads/master", "master", true},
|
||||
{"refs/heads/main", "main", true},
|
||||
{"refs/heads/feature", "feature", true},
|
||||
{"refs/heads/bh/my/feature", "bh/my/feature", true},
|
||||
},
|
||||
@@ -54,16 +54,16 @@ func TestWildcardPattern(t *testing.T) {
|
||||
{
|
||||
"refs/heads/bh/*",
|
||||
[]patternTest{
|
||||
{"refs/heads/master", "", false},
|
||||
{"refs/heads/main", "", false},
|
||||
{"refs/heads/bh/my/feature", "my/feature", true},
|
||||
},
|
||||
},
|
||||
{
|
||||
"refs/heads/*/master",
|
||||
"refs/heads/*/main",
|
||||
[]patternTest{
|
||||
{"refs/heads/master", "", false},
|
||||
{"refs/heads/bh/master", "bh", true},
|
||||
{"refs/heads/as/master", "as", true},
|
||||
{"refs/heads/main", "", false},
|
||||
{"refs/heads/bh/main", "bh", true},
|
||||
{"refs/heads/as/main", "as", true},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -28,46 +28,46 @@ func TestRefSpec(t *testing.T) {
|
||||
"refs/heads/*:refs/remotes/origin/*",
|
||||
true,
|
||||
map[string]string{
|
||||
"refs/heads/master": "refs/remotes/origin/master",
|
||||
"refs/heads/main": "refs/remotes/origin/main",
|
||||
"refs/heads/feature": "refs/remotes/origin/feature",
|
||||
"refs/remotes/origin/master": "refs/nil/",
|
||||
"refs/remotes/origin/main": "refs/nil/",
|
||||
},
|
||||
}, {
|
||||
"borigin",
|
||||
"refs/heads/master:refs/remotes/borigin/mymaster",
|
||||
"refs/heads/main:refs/remotes/borigin/mymain",
|
||||
true,
|
||||
map[string]string{
|
||||
"refs/heads/master": "refs/remotes/borigin/mymaster",
|
||||
"refs/heads/main": "refs/remotes/borigin/mymain",
|
||||
"refs/heads/feature": "refs/nil/",
|
||||
},
|
||||
}, {
|
||||
"",
|
||||
"refs/heads/*/master:refs/remotes/borigin/*/mymaster",
|
||||
"refs/heads/*/main:refs/remotes/borigin/*/mymain",
|
||||
true,
|
||||
map[string]string{
|
||||
"refs/heads/master": "refs/nil/",
|
||||
"refs/heads/bh/master": "refs/remotes/borigin/bh/mymaster",
|
||||
"refs/heads/as/master": "refs/remotes/borigin/as/mymaster",
|
||||
"refs/heads/main": "refs/nil/",
|
||||
"refs/heads/bh/main": "refs/remotes/borigin/bh/mymain",
|
||||
"refs/heads/as/main": "refs/remotes/borigin/as/mymain",
|
||||
},
|
||||
}, {
|
||||
"",
|
||||
"master",
|
||||
"main",
|
||||
true,
|
||||
map[string]string{
|
||||
"refs/heads/master": "refs/heads/master",
|
||||
"refs/heads/main": "refs/heads/main",
|
||||
"refs/heads/feature": "refs/nil/",
|
||||
},
|
||||
}, {
|
||||
"",
|
||||
"master:master",
|
||||
"main:main",
|
||||
true,
|
||||
map[string]string{
|
||||
"refs/heads/master": "refs/heads/master",
|
||||
"refs/heads/main": "refs/heads/main",
|
||||
"refs/heads/feature": "refs/nil/",
|
||||
},
|
||||
}, {
|
||||
"origin",
|
||||
"refs/heads/master:refs/remotes/not_borigin/mymaster",
|
||||
"refs/heads/main:refs/remotes/not_borigin/mymain",
|
||||
false,
|
||||
nil,
|
||||
}, {
|
||||
|
||||
@@ -29,12 +29,12 @@ func TestJsonMarshalAndUnmarshal(t *testing.T) {
|
||||
str string
|
||||
}{
|
||||
{
|
||||
NewBranchRef("master"),
|
||||
`{"test":"refs/heads/master"}`,
|
||||
NewBranchRef("main"),
|
||||
`{"test":"refs/heads/main"}`,
|
||||
},
|
||||
{
|
||||
NewRemoteRef("origin", "master"),
|
||||
`{"test":"refs/remotes/origin/master"}`,
|
||||
NewRemoteRef("origin", "main"),
|
||||
`{"test":"refs/remotes/origin/main"}`,
|
||||
},
|
||||
{
|
||||
NewInternalRef("create"),
|
||||
@@ -71,61 +71,61 @@ func TestJsonMarshalAndUnmarshal(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestEqualsStr(t *testing.T) {
|
||||
om, _ := NewRemoteRefFromPathStr("origin/master")
|
||||
rom, _ := NewRemoteRefFromPathStr("refs/remotes/origin/master")
|
||||
om, _ := NewRemoteRefFromPathStr("origin/main")
|
||||
rom, _ := NewRemoteRefFromPathStr("refs/remotes/origin/main")
|
||||
tests := []struct {
|
||||
dr DoltRef
|
||||
cmp string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
NewBranchRef("master"),
|
||||
"refs/heads/master",
|
||||
NewBranchRef("main"),
|
||||
"refs/heads/main",
|
||||
true,
|
||||
},
|
||||
{
|
||||
NewBranchRef("refs/heads/master"),
|
||||
"refs/heads/master",
|
||||
NewBranchRef("refs/heads/main"),
|
||||
"refs/heads/main",
|
||||
true,
|
||||
},
|
||||
{
|
||||
NewBranchRef("master"),
|
||||
"refs/heads/notmaster",
|
||||
NewBranchRef("main"),
|
||||
"refs/heads/notmain",
|
||||
false,
|
||||
},
|
||||
{
|
||||
NewBranchRef("master"),
|
||||
"refs/remotes/origin/master",
|
||||
NewBranchRef("main"),
|
||||
"refs/remotes/origin/main",
|
||||
false,
|
||||
},
|
||||
{
|
||||
NewRemoteRef("origin", "master"),
|
||||
"refs/remotes/origin/master",
|
||||
NewRemoteRef("origin", "main"),
|
||||
"refs/remotes/origin/main",
|
||||
true,
|
||||
},
|
||||
{
|
||||
om,
|
||||
"refs/remotes/origin/master",
|
||||
"refs/remotes/origin/main",
|
||||
true,
|
||||
},
|
||||
{
|
||||
rom,
|
||||
"refs/remotes/origin/master",
|
||||
"refs/remotes/origin/main",
|
||||
true,
|
||||
},
|
||||
{
|
||||
NewRemoteRef("origin", "master"),
|
||||
"refs/remotes/borigin/master",
|
||||
NewRemoteRef("origin", "main"),
|
||||
"refs/remotes/borigin/main",
|
||||
false,
|
||||
},
|
||||
{
|
||||
NewRemoteRef("origin", "master"),
|
||||
"refs/remotes/origin/notmaster",
|
||||
NewRemoteRef("origin", "main"),
|
||||
"refs/remotes/origin/notmain",
|
||||
false,
|
||||
},
|
||||
{
|
||||
NewRemoteRef("origin", "master"),
|
||||
"refs/notavalidtype/origin/notmaster",
|
||||
NewRemoteRef("origin", "main"),
|
||||
"refs/notavalidtype/origin/notmain",
|
||||
false,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -32,12 +32,12 @@ func (rr RemoteRef) GetType() RefType {
|
||||
return RemoteRefType
|
||||
}
|
||||
|
||||
// GetPath returns the remote name separated by the branch e.g. origin/master
|
||||
// GetPath returns the remote name separated by the branch e.g. origin/main
|
||||
func (rr RemoteRef) GetPath() string {
|
||||
return path.Join(rr.remote, rr.branch)
|
||||
}
|
||||
|
||||
// String returns the fully qualified reference e.g. refs/remotes/origin/master
|
||||
// String returns the fully qualified reference e.g. refs/remotes/origin/main
|
||||
func (rr RemoteRef) String() string {
|
||||
return String(rr)
|
||||
}
|
||||
@@ -57,8 +57,8 @@ func NewRemoteRef(remote, branch string) RemoteRef {
|
||||
return RemoteRef{remote, branch}
|
||||
}
|
||||
|
||||
// NewRemoteRefFromPathString creates a DoltRef from a string in the format origin/master, or remotes/origin/master, or
|
||||
// refs/remotes/origin/master
|
||||
// NewRemoteRefFromPathString creates a DoltRef from a string in the format origin/main, or remotes/origin/main, or
|
||||
// refs/remotes/origin/main
|
||||
func NewRemoteRefFromPathStr(remoteAndPath string) (DoltRef, error) {
|
||||
if IsRef(remoteAndPath) {
|
||||
prefix := PrefixForType(RemoteRefType)
|
||||
|
||||
@@ -74,7 +74,7 @@ func (br TagRef) GetPath() string {
|
||||
return br.tag
|
||||
}
|
||||
|
||||
// String returns the fully qualified reference name e.g. refs/heads/master
|
||||
// String returns the fully qualified reference name e.g. refs/heads/main
|
||||
func (br TagRef) String() string {
|
||||
return String(br)
|
||||
}
|
||||
|
||||
@@ -28,8 +28,8 @@ type WorkingSetRef struct {
|
||||
|
||||
const WorkingSetRefPrefix = "workingSets"
|
||||
|
||||
// NewWorkingSetRef creates a working set ref from a name or a working set ref e.g. heads/master, or
|
||||
// workingSets/heads/master
|
||||
// NewWorkingSetRef creates a working set ref from a name or a working set ref e.g. heads/main, or
|
||||
// workingSets/heads/main
|
||||
func NewWorkingSetRef(workingSetName string) WorkingSetRef {
|
||||
prefix := WorkingSetRefPrefix + "/"
|
||||
if strings.HasPrefix(workingSetName, prefix) {
|
||||
|
||||
@@ -427,7 +427,7 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
|
||||
Children: nil,
|
||||
},
|
||||
{
|
||||
Branch: "master",
|
||||
Branch: "main",
|
||||
CommitMsg: "Adding string address to users with tag 3",
|
||||
Updates: map[string]envtestutils.TableUpdate{
|
||||
TableWithHistoryName: {
|
||||
@@ -443,7 +443,7 @@ func CreateHistory(ctx context.Context, dEnv *env.DoltEnv, t *testing.T) []envte
|
||||
},
|
||||
Children: []envtestutils.HistoryNode{
|
||||
{
|
||||
Branch: "master",
|
||||
Branch: "main",
|
||||
CommitMsg: "Re-add age as a uint with tag 4",
|
||||
Updates: map[string]envtestutils.TableUpdate{
|
||||
TableWithHistoryName: {
|
||||
|
||||
@@ -97,7 +97,7 @@ func TestJsonValues(t *testing.T) {
|
||||
{cmd.CheckoutCmd{}, args{"-b", "other"}},
|
||||
{cmd.SqlCmd{}, args{"-q", `update js set js = '{"b":22}' where pk = 2;`}},
|
||||
{cmd.CommitCmd{}, args{"-am", "update row pk = 2"}},
|
||||
{cmd.CheckoutCmd{}, args{"master"}},
|
||||
{cmd.CheckoutCmd{}, args{"main"}},
|
||||
{cmd.SqlCmd{}, args{"-q", `update js set js = '{"a":11}' where pk = 1;`}},
|
||||
{cmd.CommitCmd{}, args{"-am", "update row pk = 1"}},
|
||||
{cmd.MergeCmd{}, args{"other"}},
|
||||
|
||||
@@ -122,12 +122,12 @@ func (h *DoltHarness) ExecuteQuery(statement string) (schema string, results []s
|
||||
|
||||
func innerInit(h *DoltHarness, dEnv *env.DoltEnv) error {
|
||||
if !dEnv.HasDoltDir() {
|
||||
err := dEnv.InitRepoWithTime(context.Background(), types.Format_Default, name, email, time.Now())
|
||||
err := dEnv.InitRepoWithTime(context.Background(), types.Format_Default, name, email, "", time.Now())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
err := dEnv.InitDBAndRepoState(context.Background(), types.Format_Default, name, email, time.Now())
|
||||
err := dEnv.InitDBAndRepoState(context.Background(), types.Format_Default, name, email, "", time.Now())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ func createTestEnvWithFS(fs filesys.Filesys, workingDir string) *env.DoltEnv {
|
||||
const name = "test mcgibbins"
|
||||
const email = "bigfakeytester@fake.horse"
|
||||
dEnv := env.Load(context.Background(), testHomeDirFunc, fs, doltdb.InMemDoltDB, "test")
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email)
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email, "")
|
||||
if err != nil {
|
||||
panic("Failed to initialize environment")
|
||||
}
|
||||
|
||||
@@ -761,7 +761,7 @@ var BasicSelectTests = []SelectTest{
|
||||
Query: "select * from dolt_branches",
|
||||
ExpectedRows: []sql.Row{
|
||||
{
|
||||
"master",
|
||||
"main",
|
||||
"so275enkvulb96mkckbun1kjo9seg7c9",
|
||||
"billy bob", "bigbillieb@fake.horse",
|
||||
time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).In(LoadedLocalLocation()),
|
||||
@@ -821,14 +821,14 @@ var SelectDiffTests = []SelectTest{
|
||||
// TODO: fix dependencies to hashof function can be registered and used here, also create branches when generating the history so that different from and to commits can be tested.
|
||||
/*{
|
||||
Name: "select from diff system table with from and to commit and test insensitive name",
|
||||
Query: "select to_id, to_first_name, to_last_name, to_addr, to_age_4, to_age_5, from_id, from_first_name, from_last_name, from_addr, from_age_4, from_age_5, diff_type from dolt_diff_TeSt_TaBlE where from_commit = 'add-age' and to_commit = 'master'",
|
||||
Query: "select to_id, to_first_name, to_last_name, to_addr, to_age_4, to_age_5, from_id, from_first_name, from_last_name, from_addr, from_age_4, from_age_5, diff_type from dolt_diff_TeSt_TaBlE where from_commit = 'add-age' and to_commit = 'main'",
|
||||
ExpectedRows: ToSqlRows(DiffSchema,
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(0), 8: types.String("Aaron"), 9: types.String("Son"), 11: types.Int(35), 0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35), 13: types.String("add-age"), 6: types.String("master"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(1), 8: types.String("Brian"), 9: types.String("Hendriks"), 11: types.Int(38), 0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38), 13: types.String("add-age"), 6: types.String("master"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(2), 8: types.String("Tim"), 9: types.String("Sehn"), 11: types.Int(37), 0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37), 13: types.String("add-age"), 6: types.String("master"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(3), 8: types.String("Zach"), 9: types.String("Musgrave"), 11: types.Int(37), 0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 3: types.String("-1 Imaginary Wy"), 5: types.Uint(37), 13: types.String("add-age"), 6: types.String("master"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele"), 3: types.NullValue, 13: types.String("add-age"), 6: types.String("master"), 14: types.String("added")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{0: types.Int(5), 1: types.String("Daylon"), 2: types.String("Wilkins"), 3: types.NullValue, 13: types.String("add-age"), 6: types.String("master"), 14: types.String("added")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(0), 8: types.String("Aaron"), 9: types.String("Son"), 11: types.Int(35), 0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35), 13: types.String("add-age"), 6: types.String("main"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(1), 8: types.String("Brian"), 9: types.String("Hendriks"), 11: types.Int(38), 0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38), 13: types.String("add-age"), 6: types.String("main"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(2), 8: types.String("Tim"), 9: types.String("Sehn"), 11: types.Int(37), 0: types.Int(2), 1: types.String("Tim"), 2: types.String("Sehn"), 3: types.String("789 Not Real Ct"), 5: types.Uint(37), 13: types.String("add-age"), 6: types.String("main"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{7: types.Int(3), 8: types.String("Zach"), 9: types.String("Musgrave"), 11: types.Int(37), 0: types.Int(3), 1: types.String("Zach"), 2: types.String("Musgrave"), 3: types.String("-1 Imaginary Wy"), 5: types.Uint(37), 13: types.String("add-age"), 6: types.String("main"), 14: types.String("modified")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{0: types.Int(4), 1: types.String("Matt"), 2: types.String("Jesuele"), 3: types.NullValue, 13: types.String("add-age"), 6: types.String("main"), 14: types.String("added")})),
|
||||
mustRow(row.New(types.Format_Default, DiffSchema, row.TaggedValues{0: types.Int(5), 1: types.String("Daylon"), 2: types.String("Wilkins"), 3: types.NullValue, 13: types.String("add-age"), 6: types.String("main"), 14: types.String("added")})),
|
||||
),
|
||||
ExpectedSqlSchema: sqlDiffSchema,
|
||||
},*/
|
||||
@@ -857,8 +857,8 @@ var AsOfTests = []SelectTest{
|
||||
ExpectedSchema: AddAgeAt4HistSch,
|
||||
},
|
||||
{
|
||||
Name: "select * from master branch",
|
||||
Query: "select * from test_table as of 'master'",
|
||||
Name: "select * from main branch",
|
||||
Query: "select * from test_table as of 'main'",
|
||||
ExpectedRows: ToSqlRows(ReaddAgeAt5HistSch,
|
||||
mustRow(row.New(types.Format_Default, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St"), 5: types.Uint(35)})),
|
||||
mustRow(row.New(types.Format_Default, ReaddAgeAt5HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln"), 5: types.Uint(38)})),
|
||||
@@ -894,8 +894,8 @@ var AsOfTests = []SelectTest{
|
||||
ExpectedSchema: AddAddrAt3HistSch,
|
||||
},
|
||||
{
|
||||
Name: "select * from master^",
|
||||
Query: "select * from test_table as of 'master^'",
|
||||
Name: "select * from main^",
|
||||
Query: "select * from test_table as of 'main^'",
|
||||
ExpectedRows: ToSqlRows(AddAddrAt3HistSch,
|
||||
mustRow(row.New(types.Format_Default, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(0), 1: types.String("Aaron"), 2: types.String("Son"), 3: types.String("123 Fake St")})),
|
||||
mustRow(row.New(types.Format_Default, AddAddrAt3HistSch, row.TaggedValues{0: types.Int(1), 1: types.String("Brian"), 2: types.String("Hendriks"), 3: types.String("456 Bull Ln")})),
|
||||
@@ -1611,7 +1611,7 @@ func testSelectDiffQuery(t *testing.T, test SelectTest) {
|
||||
test.AdditionalSetup(t, dEnv)
|
||||
}
|
||||
|
||||
cs, err := doltdb.NewCommitSpec("master")
|
||||
cs, err := doltdb.NewCommitSpec("main")
|
||||
require.NoError(t, err)
|
||||
|
||||
cm, err := dEnv.DoltDB.Resolve(ctx, cs, nil)
|
||||
|
||||
@@ -71,7 +71,7 @@ func createTestEnvWithFS(fs filesys.Filesys, workingDir string) *env.DoltEnv {
|
||||
const name = "test mcgibbins"
|
||||
const email = "bigfakeytester@fake.horse"
|
||||
dEnv := env.Load(context.Background(), testHomeDirFunc, fs, doltdb.LocalDirDoltDB, "test")
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_7_18, name, email)
|
||||
err := dEnv.InitRepo(context.Background(), types.Format_7_18, name, email, "")
|
||||
if err != nil {
|
||||
panic("Failed to initialize environment")
|
||||
}
|
||||
|
||||
@@ -129,7 +129,7 @@ func getEnvAndConfig(ctx context.Context, b *testing.B) (dEnv *env.DoltEnv, cfg
|
||||
}
|
||||
|
||||
dEnv = env.Load(ctx, os.UserHomeDir, fs, doltdb.LocalDirDoltDB, "bench")
|
||||
err = dEnv.InitRepo(ctx, types.Format_7_18, name, email)
|
||||
err = dEnv.InitRepo(ctx, types.Format_7_18, name, email, "")
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/dolthub/dolt/go/libraries/doltcore/env"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
@@ -71,7 +72,7 @@ func WriteValueFile(ctx context.Context, filepath string, store *FileValueStore,
|
||||
// WriteToWriter writes the values out to the provided writer in the value file format
|
||||
func WriteToWriter(ctx context.Context, wr io.Writer, store *FileValueStore, values ...types.Value) error {
|
||||
db := datas.NewDatabase(store)
|
||||
ds, err := db.GetDataset(ctx, "master")
|
||||
ds, err := db.GetDataset(ctx, env.DefaultInitBranch) // TODO: DUSTIN
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
Reference in New Issue
Block a user