Merge branch 'main' into fulghum/dolt-7040

This commit is contained in:
Jason Fulghum
2023-12-07 14:34:42 -08:00
78 changed files with 1626 additions and 804 deletions

View File

@@ -15,7 +15,7 @@ actorprefix="$5"
format="$6"
nomsBinFormat="$7"
precision="4"
precision="6"
if [ -n "$nomsBinFormat" ]; then
nomsBinFormat="\"--noms-bin-format=$nomsBinFormat\","

View File

@@ -93,9 +93,9 @@ jobs:
- name: Install Hadoop
working-directory: ./.ci_bin
run: |
curl -LO https://downloads.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz
tar xvf hadoop-3.3.5.tar.gz
echo "$(pwd)/hadoop-3.3.5/bin" >> $GITHUB_PATH
curl -LO https://downloads.apache.org/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz
tar xvf hadoop-3.3.6.tar.gz
echo "$(pwd)/hadoop-3.3.6/bin" >> $GITHUB_PATH
- name: Install parquet-cli
id: parquet_cli
working-directory: ./.ci_bin

View File

@@ -93,9 +93,9 @@ jobs:
- name: Install Hadoop
working-directory: ./.ci_bin
run: |
curl -LO https://downloads.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz
tar xvf hadoop-3.3.5.tar.gz
echo "$(pwd)/hadoop-3.3.5/bin" >> $GITHUB_PATH
curl -LO https://downloads.apache.org/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz
tar xvf hadoop-3.3.6.tar.gz
echo "$(pwd)/hadoop-3.3.6/bin" >> $GITHUB_PATH
- name: Install parquet-cli
id: parquet_cli
working-directory: ./.ci_bin

97
go/Godeps/LICENSES generated
View File

@@ -1996,35 +1996,6 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
= LICENSE 3f09359866e587619921288cca4607374451bbd3b3f3806bc70598b6 =
================================================================================
================================================================================
= github.com/cespare/xxhash licensed under: =
Copyright (c) 2016 Caleb Spare
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
= LICENSE.txt 726f1b8f64f7e439b1b12c7cbde7b1427752a00ddea15019e4156465 =
================================================================================
================================================================================
= github.com/cespare/xxhash/v2 licensed under: =
@@ -5034,6 +5005,74 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
= LICENSE bda64ae869be18b50125d9cfe5c370eb7248e84a2324823e4d7f2295 =
================================================================================
================================================================================
= github.com/google/go-github/v57 licensed under: =
Copyright (c) 2013 The go-github AUTHORS. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
= LICENSE 4f95ee9c8c81d66113b4c4fe66b684ae243884b5947ee854319dd9cc =
================================================================================
================================================================================
= github.com/google/go-querystring licensed under: =
Copyright (c) 2013 Google. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
= LICENSE 7b97c9585df42dc638169348f6350b491fc35fe50884a7e6cf41aa58 =
================================================================================
================================================================================
= github.com/google/s2a-go licensed under: =

View File

@@ -125,6 +125,7 @@ func CreateCloneArgParser() *argparser.ArgParser {
ap.SupportsString(dbfactory.OSSCredsFileParam, "", "file", "OSS credentials file.")
ap.SupportsString(dbfactory.OSSCredsProfile, "", "profile", "OSS profile to use.")
ap.SupportsString(UserFlag, "u", "user", "User name to use when authenticating with the remote. Gets password from the environment variable {{.EmphasisLeft}}DOLT_REMOTE_PASSWORD{{.EmphasisRight}}.")
ap.SupportsFlag(SingleBranchFlag, "", "Clone only the history leading to the tip of a single branch, either specified by --branch or the remote's HEAD (default).")
return ap
}
@@ -149,6 +150,7 @@ func CreateCleanArgParser() *argparser.ArgParser {
func CreateCheckoutArgParser() *argparser.ArgParser {
ap := argparser.NewArgParserWithVariableArgs("checkout")
ap.SupportsString(CheckoutCreateBranch, "", "branch", "Create a new branch named {{.LessThan}}new_branch{{.GreaterThan}} and start it at {{.LessThan}}start_point{{.GreaterThan}}.")
ap.SupportsString(CreateResetBranch, "", "branch", "Similar to '-b'. Forcibly resets the branch to {{.LessThan}}start_point{{.GreaterThan}} if it exists.")
ap.SupportsFlag(ForceFlag, "f", "If there is any changes in working set, the force flag will wipe out the current changes and checkout the new branch.")
ap.SupportsString(TrackFlag, "t", "", "When creating a new branch, set up 'upstream' configuration.")
return ap

View File

@@ -29,6 +29,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/events"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
config "github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/store/nbs"
"github.com/dolthub/dolt/go/store/types"
)
@@ -293,14 +294,14 @@ fatal: empty ident name not allowed
// CheckUserNameAndEmail returns true if the user name and email are set for this environment, or prints an error and
// returns false if not.
func CheckUserNameAndEmail(config *env.DoltCliConfig) bool {
_, err := config.GetString(env.UserEmailKey)
func CheckUserNameAndEmail(cfg *env.DoltCliConfig) bool {
_, err := cfg.GetString(config.UserEmailKey)
if err != nil {
PrintErr(userNameRequiredError)
return false
}
_, err = config.GetString(env.UserNameKey)
_, err = cfg.GetString(config.UserNameKey)
if err != nil {
PrintErr(userNameRequiredError)
return false

View File

@@ -25,6 +25,7 @@ const (
BranchParam = "branch"
CachedFlag = "cached"
CheckoutCreateBranch = "b"
CreateResetBranch = "B"
CommitFlag = "commit"
CopyFlag = "copy"
DateParam = "date"
@@ -59,6 +60,7 @@ const (
ShallowFlag = "shallow"
ShowIgnoredFlag = "ignored"
SilentFlag = "silent"
SingleBranchFlag = "single-branch"
SkipEmptyFlag = "skip-empty"
SoftResetParam = "soft"
SquashParam = "squash"

View File

@@ -106,15 +106,20 @@ func (cmd CheckoutCmd) Exec(ctx context.Context, commandStr string, args []strin
return 1
}
branchOrTrack := apr.Contains(cli.CheckoutCreateBranch) || apr.Contains(cli.TrackFlag)
// Argument validation in the CLI is strictly nice to have. The stored procedure will do the same, but the errors
// won't be as nice.
branchOrTrack := apr.Contains(cli.CheckoutCreateBranch) || apr.Contains(cli.CreateResetBranch) || apr.Contains(cli.TrackFlag)
if (branchOrTrack && apr.NArg() > 1) || (!branchOrTrack && apr.NArg() == 0) {
usagePrt()
return 1
}
// Branch name retrieval here is strictly for messages. dolt_checkout procedure is the authority on logic around validation.
var branchName string
if apr.Contains(cli.CheckoutCreateBranch) {
branchName, _ = apr.GetValue(cli.CheckoutCreateBranch)
} else if apr.Contains(cli.CreateResetBranch) {
branchName, _ = apr.GetValue(cli.CreateResetBranch)
} else if apr.Contains(cli.TrackFlag) {
if apr.NArg() > 0 {
usagePrt()

View File

@@ -98,6 +98,7 @@ func (cmd CloneCmd) Exec(ctx context.Context, commandStr string, args []string,
func clone(ctx context.Context, apr *argparser.ArgParseResults, dEnv *env.DoltEnv) errhand.VerboseError {
remoteName := apr.GetValueOrDefault(cli.RemoteParam, "origin")
branch := apr.GetValueOrDefault(cli.BranchParam, "")
singleBranch := apr.Contains(cli.SingleBranchFlag)
dir, urlStr, verr := parseArgs(apr)
if verr != nil {
return verr
@@ -143,7 +144,7 @@ func clone(ctx context.Context, apr *argparser.ArgParseResults, dEnv *env.DoltEn
// Nil out the old Dolt env so we don't accidentally operate on the wrong database
dEnv = nil
err = actions.CloneRemote(ctx, srcDB, remoteName, branch, clonedEnv)
err = actions.CloneRemote(ctx, srcDB, remoteName, branch, singleBranch, clonedEnv)
if err != nil {
// If we're cloning into a directory that already exists do not erase it. Otherwise
// make best effort to delete the directory we created.

View File

@@ -37,6 +37,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/editor"
"github.com/dolthub/dolt/go/libraries/utils/iohelp"
"github.com/dolthub/dolt/go/libraries/utils/set"
@@ -275,17 +276,17 @@ func handleCommitErr(sqlCtx *sql.Context, queryist cli.Queryist, err error, usag
}
if err == datas.ErrNameNotConfigured {
bdr := errhand.BuildDError("Could not determine %s.", env.UserNameKey)
bdr := errhand.BuildDError("Could not determine %s.", config.UserNameKey)
bdr.AddDetails("Log into DoltHub: dolt login")
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", env.UserNameKey)
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", config.UserNameKey)
return HandleVErrAndExitCode(bdr.Build(), usage)
}
if err == datas.ErrEmailNotConfigured {
bdr := errhand.BuildDError("Could not determine %s.", env.UserEmailKey)
bdr := errhand.BuildDError("Could not determine %s.", config.UserEmailKey)
bdr.AddDetails("Log into DoltHub: dolt login")
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", env.UserEmailKey)
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", config.UserEmailKey)
return HandleVErrAndExitCode(bdr.Build(), usage)
}
@@ -355,7 +356,7 @@ func getCommitMessageFromEditor(sqlCtx *sql.Context, queryist cli.Queryist, sugg
backupEd = ed
}
// try getting Dolt config core.editor
editorStr := cliCtx.Config().GetStringOrDefault(env.DoltEditor, backupEd)
editorStr := cliCtx.Config().GetStringOrDefault(config.DoltEditor, backupEd)
cli.ExecuteWithStdioRestored(func() {
commitMsg, cErr := editor.OpenCommitEditor(editorStr, initialMsg)

View File

@@ -30,6 +30,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/grpcendpoint"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
var checkShortDesc = "Check authenticating with a credential keypair against a doltremoteapi."
@@ -101,8 +102,8 @@ func loadEndpoint(dEnv *env.DoltEnv, apr *argparser.ArgParseResults) (string, st
return getHostFromEndpoint(earg), earg
}
host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
host := dEnv.Config.GetStringOrDefault(config.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(config.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
return host, fmt.Sprintf("%s:%s", host, port)
}

View File

@@ -32,6 +32,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/grpcendpoint"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
var importDocs = cli.CommandDocumentationContent{
@@ -149,17 +150,17 @@ func updateProfileWithCredentials(ctx context.Context, dEnv *env.DoltEnv, c cred
panic("Should have global config here...")
}
if _, err := gcfg.GetString(env.UserNameKey); err == nil {
if _, err := gcfg.GetString(config.UserNameKey); err == nil {
// Already has a name...
return nil
}
if _, err := gcfg.GetString(env.UserEmailKey); err == nil {
if _, err := gcfg.GetString(config.UserEmailKey); err == nil {
// Already has an email...
return nil
}
host := dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
host := dEnv.Config.GetStringOrDefault(config.RemotesApiHostKey, env.DefaultRemotesApiHost)
port := dEnv.Config.GetStringOrDefault(config.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
hostAndPort := fmt.Sprintf("%s:%s", host, port)
cfg, err := dEnv.GetGRPCDialParams(grpcendpoint.Config{
Endpoint: hostAndPort,
@@ -178,8 +179,8 @@ func updateProfileWithCredentials(ctx context.Context, dEnv *env.DoltEnv, c cred
return fmt.Errorf("error: unable to call WhoAmI endpoint: %w", err)
}
userUpdates := map[string]string{
env.UserNameKey: resp.DisplayName,
env.UserEmailKey: resp.EmailAddress,
config.UserNameKey: resp.DisplayName,
config.UserEmailKey: resp.EmailAddress,
}
return gcfg.SetStrings(userUpdates)
}

View File

@@ -100,9 +100,9 @@ func updateConfigToUseNewCredIfNoExistingCred(dEnv *env.DoltEnv, dCreds creds.Do
panic("global config not found. Should create it here if this is a thing.")
}
_, err := gcfg.GetString(env.UserCreds)
_, err := gcfg.GetString(config.UserCreds)
if err == config.ErrConfigParamNotFound {
return gcfg.SetStrings(map[string]string{env.UserCreds: dCreds.KeyIDBase32Str()})
return gcfg.SetStrings(map[string]string{config.UserCreds: dCreds.KeyIDBase32Str()})
} else {
return err
}

View File

@@ -25,6 +25,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
var useDocs = cli.CommandDocumentationContent{
@@ -95,7 +96,7 @@ func (cmd UseCmd) Exec(ctx context.Context, commandStr string, args []string, dE
if !hasGCfg {
panic("global config not found. Should create it here if this is a thing.")
}
err := gcfg.SetStrings(map[string]string{env.UserCreds: cred.KeyIDBase32Str()})
err := gcfg.SetStrings(map[string]string{config.UserCreds: cred.KeyIDBase32Str()})
if err != nil {
verr = errhand.BuildDError("error: updating user credentials in config").AddCause(err).Build()
}

View File

@@ -26,6 +26,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/dconfig"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/store/datas"
"github.com/dolthub/dolt/go/store/types"
)
@@ -76,10 +77,10 @@ func (cmd InitCmd) Docs() *cli.CommandDocumentation {
func (cmd InitCmd) ArgParser() *argparser.ArgParser {
ap := argparser.NewArgParserWithMaxArgs(cmd.Name(), 0)
ap.SupportsString(usernameParamName, "", "name", fmt.Sprintf("The name used in commits to this repo. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", env.UserNameKey))
ap.SupportsString(emailParamName, "", "email", fmt.Sprintf("The email address used. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", env.UserEmailKey))
ap.SupportsString(usernameParamName, "", "name", fmt.Sprintf("The name used in commits to this repo. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", config.UserNameKey))
ap.SupportsString(emailParamName, "", "email", fmt.Sprintf("The email address used. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config.", config.UserEmailKey))
ap.SupportsString(cli.DateParam, "", "date", "Specify the date used in the initial commit. If not specified the current system time is used.")
ap.SupportsString(initBranchParamName, "b", "branch", fmt.Sprintf("The branch name used to initialize this database. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config. If unset, the default initialized branch will be named '%s'.", env.InitBranchName, env.DefaultInitBranch))
ap.SupportsString(initBranchParamName, "b", "branch", fmt.Sprintf("The branch name used to initialize this database. If not provided will be taken from {{.EmphasisLeft}}%s{{.EmphasisRight}} in the global config. If unset, the default initialized branch will be named '%s'.", config.InitBranchName, env.DefaultInitBranch))
ap.SupportsFlag(newFormatFlag, "", fmt.Sprintf("Specify this flag to use the new storage format (%s).", types.Format_DOLT.VersionString()))
ap.SupportsFlag(oldFormatFlag, "", fmt.Sprintf("Specify this flag to use the old storage format (%s).", types.Format_LD_1.VersionString()))
ap.SupportsFlag(funHashFlag, "", "") // This flag is an easter egg. We can't currently prevent it from being listed in the help, but the description is deliberately left blank.
@@ -123,8 +124,8 @@ func (cmd InitCmd) Exec(ctx context.Context, commandStr string, args []string, d
}
}
name = dEnv.Config.IfEmptyUseConfig(name, env.UserNameKey)
email = dEnv.Config.IfEmptyUseConfig(email, env.UserEmailKey)
name = dEnv.Config.IfEmptyUseConfig(name, config.UserNameKey)
email = dEnv.Config.IfEmptyUseConfig(email, config.UserEmailKey)
if initBranch == "" {
initBranch = env.GetDefaultInitBranch(dEnv.Config)
}
@@ -133,14 +134,14 @@ func (cmd InitCmd) Exec(ctx context.Context, commandStr string, args []string, d
cli.PrintErrln(
color.RedString("Could not determine %[1]s. "+
"Use the init parameter --name \"FIRST LAST\" to set it for this repo, "+
"or dolt config --global --add %[1]s \"FIRST LAST\"", env.UserNameKey))
"or dolt config --global --add %[1]s \"FIRST LAST\"", config.UserNameKey))
usage()
return 1
} else if email == "" {
cli.PrintErrln(
color.RedString("Could not determine %[1]s. "+
"Use the init parameter --email \"EMAIL_ADDRESS\" to set it for this repo, "+
"or dolt config --global --add %[1]s \"EMAIL_ADDRESS\"", env.UserEmailKey))
"or dolt config --global --add %[1]s \"EMAIL_ADDRESS\"", config.UserEmailKey))
usage()
return 1
}
@@ -169,10 +170,10 @@ func (cmd InitCmd) Exec(ctx context.Context, commandStr string, args []string, d
configuration := make(map[string]string)
if apr.Contains(usernameParamName) {
configuration[env.UserNameKey] = name
configuration[config.UserNameKey] = name
}
if apr.Contains(emailParamName) {
configuration[env.UserEmailKey] = email
configuration[config.UserEmailKey] = email
}
if len(configuration) > 0 {
err = dEnv.Config.WriteableConfig().SetStrings(configuration)

View File

@@ -24,6 +24,7 @@ import (
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
type initTest struct {
@@ -45,8 +46,8 @@ func TestInit(t *testing.T) {
"Global config name and email",
[]string{},
map[string]string{
env.UserNameKey: "Bill Billerson",
env.UserEmailKey: "bigbillieb@fake.horse",
config.UserNameKey: "Bill Billerson",
config.UserEmailKey: "bigbillieb@fake.horse",
},
true,
},
@@ -80,8 +81,8 @@ func TestInit(t *testing.T) {
if test.ExpectSuccess {
require.True(t, dEnv.HasDoltDir(), "- .dolt dir should exist after initialization")
testLocalConfigValue(t, dEnv, test, usernameParamName, env.UserNameKey)
testLocalConfigValue(t, dEnv, test, emailParamName, env.UserEmailKey)
testLocalConfigValue(t, dEnv, test, usernameParamName, config.UserNameKey)
testLocalConfigValue(t, dEnv, test, emailParamName, config.UserEmailKey)
} else {
require.False(t, dEnv.HasDoltDir(),
"- dolt directory shouldn't exist after failure to initialize")

View File

@@ -33,6 +33,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/grpcendpoint"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
const (
@@ -81,7 +82,7 @@ func (cmd LoginCmd) Docs() *cli.CommandDocumentation {
func (cmd LoginCmd) ArgParser() *argparser.ArgParser {
ap := argparser.NewArgParserWithMaxArgs(cmd.Name(), 1)
ap.SupportsString(authEndpointParam, "e", "hostname:port", fmt.Sprintf("Specify the endpoint used to authenticate this client. Must be used with --%s OR set in the configuration file as `%s`", loginURLParam, env.AddCredsUrlKey))
ap.SupportsString(authEndpointParam, "e", "hostname:port", fmt.Sprintf("Specify the endpoint used to authenticate this client. Must be used with --%s OR set in the configuration file as `%s`", loginURLParam, config.AddCredsUrlKey))
ap.SupportsString(loginURLParam, "url", "url", "Specify the login url where the browser will add credentials.")
ap.SupportsFlag(insecureParam, "i", "If set, makes insecure connection to remote authentication server")
ap.ArgListHelp = append(ap.ArgListHelp, [2]string{"creds", "A specific credential to use for login. If omitted, new credentials will be generated."})
@@ -100,7 +101,7 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string,
apr := cli.ParseArgsOrDie(ap, args, help)
// use config values over defaults, flag values over config values
loginUrl := dEnv.Config.GetStringOrDefault(env.AddCredsUrlKey, env.DefaultLoginUrl)
loginUrl := dEnv.Config.GetStringOrDefault(config.AddCredsUrlKey, env.DefaultLoginUrl)
loginUrl = apr.GetValueOrDefault(loginURLParam, loginUrl)
var authHost string
@@ -114,8 +115,8 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string,
}
authEndpoint = fmt.Sprintf("%s:%s", authHost, authPort)
} else {
authHost = dEnv.Config.GetStringOrDefault(env.RemotesApiHostKey, env.DefaultRemotesApiHost)
authPort = dEnv.Config.GetStringOrDefault(env.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
authHost = dEnv.Config.GetStringOrDefault(config.RemotesApiHostKey, env.DefaultRemotesApiHost)
authPort = dEnv.Config.GetStringOrDefault(config.RemotesApiHostPortKey, env.DefaultRemotesApiPort)
authEndpoint = fmt.Sprintf("%s:%s", authHost, authPort)
}
@@ -128,10 +129,10 @@ func (cmd LoginCmd) Exec(ctx context.Context, commandStr string, args []string,
var err error
if !insecure {
insecureStr := dEnv.Config.GetStringOrDefault(env.DoltLabInsecureKey, "false")
insecureStr := dEnv.Config.GetStringOrDefault(config.DoltLabInsecureKey, "false")
insecure, err = strconv.ParseBool(insecureStr)
if err != nil {
HandleVErrAndExitCode(errhand.BuildDError(fmt.Sprintf("The config value of '%s' is '%s' which is not a valid true/false value", env.DoltLabInsecureKey, insecureStr)).Build(), usage)
HandleVErrAndExitCode(errhand.BuildDError(fmt.Sprintf("The config value of '%s' is '%s' which is not a valid true/false value", config.DoltLabInsecureKey, insecureStr)).Build(), usage)
}
}
@@ -269,9 +270,9 @@ func updateConfig(dEnv *env.DoltEnv, whoAmI *remotesapi.WhoAmIResponse, dCreds c
panic("global config not found. Should create it here if this is a thing.")
}
gcfg.SetStrings(map[string]string{env.UserCreds: dCreds.KeyIDBase32Str()})
gcfg.SetStrings(map[string]string{config.UserCreds: dCreds.KeyIDBase32Str()})
userUpdates := map[string]string{env.UserNameKey: whoAmI.DisplayName, env.UserEmailKey: whoAmI.EmailAddress}
userUpdates := map[string]string{config.UserNameKey: whoAmI.DisplayName, config.UserEmailKey: whoAmI.EmailAddress}
lcfg, hasLCfg := dEnv.Config.GetConfig(env.LocalConfig)
if hasLCfg {

View File

@@ -36,6 +36,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/merge"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/store/util/outputpager"
)
@@ -174,8 +175,8 @@ func validateDoltMergeArgs(apr *argparser.ArgParseResults, usage cli.UsagePrinte
if !cli.CheckUserNameAndEmail(cliCtx.Config()) {
bdr := errhand.BuildDError("Could not determine name and/or email.")
bdr.AddDetails("Log into DoltHub: dolt login")
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", env.UserNameKey)
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", env.UserEmailKey)
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", config.UserNameKey)
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", config.UserEmailKey)
return HandleVErrAndExitCode(bdr.Build(), usage)
}

View File

@@ -31,6 +31,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/store/util/outputpager"
)
@@ -93,8 +94,8 @@ func (cmd PullCmd) Exec(ctx context.Context, commandStr string, args []string, d
if !cli.CheckUserNameAndEmail(cliCtx.Config()) {
bdr := errhand.BuildDError("Could not determine name and/or email.")
bdr.AddDetails("Log into DoltHub: dolt login")
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", env.UserNameKey)
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", env.UserEmailKey)
bdr.AddDetails("OR add name to config: dolt config [--global|--local] --add %[1]s \"FIRST LAST\"", config.UserNameKey)
bdr.AddDetails("OR add email to config: dolt config [--global|--local] --add %[1]s \"EMAIL_ADDRESS\"", config.UserEmailKey)
return HandleVErrAndExitCode(bdr.Build(), usage)
}

View File

@@ -59,7 +59,7 @@ func TestGetAbsRemoteUrl(t *testing.T) {
{
"ts/emp",
config.NewMapConfig(map[string]string{
env.RemotesApiHostKey: "host.dom",
config.RemotesApiHostKey: "host.dom",
}),
"https://host.dom/ts/emp",
"https",
@@ -75,7 +75,7 @@ func TestGetAbsRemoteUrl(t *testing.T) {
{
"https://test.org:443/ts/emp",
config.NewMapConfig(map[string]string{
env.RemotesApiHostKey: "host.dom",
config.RemotesApiHostKey: "host.dom",
}),
"https://test.org:443/ts/emp",
"https",
@@ -84,7 +84,7 @@ func TestGetAbsRemoteUrl(t *testing.T) {
{
"localhost/ts/emp",
config.NewMapConfig(map[string]string{
env.RemotesApiHostKey: "host.dom",
config.RemotesApiHostKey: "host.dom",
}),
"https://localhost/ts/emp",
"https",

View File

@@ -17,26 +17,29 @@ package commands
import (
"context"
"fmt"
"log"
"os"
"strconv"
"time"
"github.com/fatih/color"
"github.com/sirupsen/logrus"
"google.golang.org/grpc"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/grpcendpoint"
"github.com/dolthub/dolt/go/libraries/events"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/libraries/utils/config"
)
// SendMetricsCommand is the command used for sending metrics
const (
SendMetricsCommand = "send-metrics"
outputFlag = "output"
sendMetricsShortDesc = "Send metrics to the events server or print them to stdout"
EventsOutputFormat = "output-format"
sendMetricsShortDesc = "Send usage metrics to the events server (default), or log them in another way"
)
type SendMetricsCmd struct{}
@@ -68,12 +71,16 @@ func (cmd SendMetricsCmd) Docs() *cli.CommandDocumentation {
func (cmd SendMetricsCmd) ArgParser() *argparser.ArgParser {
ap := argparser.NewArgParserWithMaxArgs(cmd.Name(), 0)
ap.SupportsFlag(outputFlag, "o", "Flush events to stdout.")
ap.SupportsString(
EventsOutputFormat,
"r",
"output-format",
"Format of the events output. Valid values are null, stdout, grpc, file, logger. Defaults to grpc.",
)
return ap
}
// Exec is the implementation of the command that flushes the events to the grpc service
// Exec executes the command
func (cmd SendMetricsCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, cliCtx cli.CliContext) int {
if dEnv.DoltDB != nil { // see go/cmd/dolt/dolt.go:interceptSendMetrics()
cli.PrintErrln("expected DoltEnv without DoltDB")
@@ -82,14 +89,13 @@ func (cmd SendMetricsCmd) Exec(ctx context.Context, commandStr string, args []st
ap := cmd.ArgParser()
help, _ := cli.HelpAndUsagePrinters(cli.CommandDocsForCommandString(commandStr, cli.CommandDocumentationContent{ShortDesc: sendMetricsShortDesc}, ap))
help, usage := cli.HelpAndUsagePrinters(cli.CommandDocsForCommandString(commandStr, cli.CommandDocumentationContent{ShortDesc: sendMetricsShortDesc}, ap))
apr := cli.ParseArgsOrDie(ap, args, help)
metricsDisabled := dEnv.Config.GetStringOrDefault(env.MetricsDisabled, "false")
metricsDisabled := dEnv.Config.GetStringOrDefault(config.MetricsDisabled, "false")
disabled, err := strconv.ParseBool(metricsDisabled)
if err != nil {
// log.Print(err)
return 1
}
@@ -98,74 +104,108 @@ func (cmd SendMetricsCmd) Exec(ctx context.Context, commandStr string, args []st
return 0
}
if !disabled {
ctx, cancel := context.WithTimeout(ctx, time.Minute)
defer cancel()
ctx, cancel := context.WithTimeout(ctx, time.Minute)
defer cancel()
root, err := dEnv.GetUserHomeDir()
if err != nil {
// log.Print(err)
return 1
}
dolt := dbfactory.DoltDir
var flusher events.Flusher
if apr.Contains(outputFlag) {
flusher = events.NewIOFlusher(dEnv.FS, root, dolt)
} else {
grpcEmitter := getGRPCEmitter(dEnv)
flusher = events.NewGrpcEventFlusher(dEnv.FS, root, dolt, grpcEmitter)
}
err = flusher.Flush(ctx)
if err != nil {
if err == events.ErrFileLocked {
return 2
}
return 1
}
return 0
userHomeDir, err := dEnv.GetUserHomeDir()
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
return 1
output := apr.GetValueOrDefault(EventsOutputFormat, events.EmitterTypeGrpc)
err = FlushLoggedEvents(ctx, dEnv, userHomeDir, output)
if err != nil {
cli.PrintErrf("Error flushing events: %s\n", err.Error())
if err == events.ErrFileLocked {
return 2
}
return 1
}
return 0
}
// getGRPCEmitter gets the connection to the events grpc service
func getGRPCEmitter(dEnv *env.DoltEnv) *events.GrpcEmitter {
host := dEnv.Config.GetStringOrDefault(env.MetricsHost, env.DefaultMetricsHost)
portStr := dEnv.Config.GetStringOrDefault(env.MetricsPort, env.DefaultMetricsPort)
insecureStr := dEnv.Config.GetStringOrDefault(env.MetricsInsecure, "false")
// FlushLoggedEvents flushes any logged events in the directory given to an appropriate event emitter
func FlushLoggedEvents(ctx context.Context, dEnv *env.DoltEnv, userHomeDir string, outputType string) error {
emitter, err := NewEmitter(outputType, dEnv)
if err != nil {
return err
}
flusher := events.NewFileFlusher(dEnv.FS, userHomeDir, dbfactory.DoltDir, emitter)
return flusher.Flush(ctx)
}
// NewEmitter returns an emitter for the given configuration provider, of the type named. If an empty name is provided,
// defaults to a file-based emitter.
func NewEmitter(emitterType string, pro EmitterConfigProvider) (events.Emitter, error) {
switch emitterType {
case events.EmitterTypeNull:
return events.NullEmitter{}, nil
case events.EmitterTypeStdout:
return events.WriterEmitter{Wr: os.Stdout}, nil
case events.EmitterTypeGrpc:
return GRPCEmitterForConfig(pro)
case events.EmitterTypeFile:
homeDir, err := pro.GetUserHomeDir()
if err != nil {
return nil, err
}
return events.NewFileEmitter(homeDir, dbfactory.DoltDir), nil
case events.EmitterTypeLogger:
return events.NewLoggerEmitter(logrus.DebugLevel), nil
default:
return nil, fmt.Errorf("unknown emitter type: %s", emitterType)
}
}
// GRPCEmitterForConfig returns an event emitter for the given environment, or nil if the environment cannot
// provide one
func GRPCEmitterForConfig(pro EmitterConfigProvider) (*events.GrpcEmitter, error) {
cfg, err := GRPCEventRemoteConfig(pro)
if err != nil {
return nil, err
}
conn, err := grpc.Dial(cfg.Endpoint, cfg.DialOptions...)
if err != nil {
return nil, err
}
return events.NewGrpcEmitter(conn), nil
}
// GRPCEventRemoteConfig returns a GRPCRemoteConfig for the given configuration provider
func GRPCEventRemoteConfig(pro EmitterConfigProvider) (dbfactory.GRPCRemoteConfig, error) {
host := pro.GetConfig().GetStringOrDefault(config.MetricsHost, events.DefaultMetricsHost)
portStr := pro.GetConfig().GetStringOrDefault(config.MetricsPort, events.DefaultMetricsPort)
insecureStr := pro.GetConfig().GetStringOrDefault(config.MetricsInsecure, "false")
port, err := strconv.ParseUint(portStr, 10, 16)
if err != nil {
log.Println(color.YellowString("The config value of '%s' is '%s' which is not a valid port.", env.MetricsPort, portStr))
return nil
return dbfactory.GRPCRemoteConfig{}, nil
}
insecure, err := strconv.ParseBool(insecureStr)
if err != nil {
log.Println(color.YellowString("The config value of '%s' is '%s' which is not a valid true/false value", env.MetricsInsecure, insecureStr))
}
insecure, _ := strconv.ParseBool(insecureStr)
hostAndPort := fmt.Sprintf("%s:%d", host, port)
cfg, err := dEnv.GetGRPCDialParams(grpcendpoint.Config{
cfg, err := pro.GetGRPCDialParams(grpcendpoint.Config{
Endpoint: hostAndPort,
Insecure: insecure,
})
if err != nil {
return nil
return dbfactory.GRPCRemoteConfig{}, nil
}
conn, err := grpc.Dial(cfg.Endpoint, cfg.DialOptions...)
if err != nil {
return nil
}
return events.NewGrpcEmitter(conn)
return cfg, nil
}
// EmitterConfigProvider is an interface used to get the configuration to create an emitter
type EmitterConfigProvider interface {
GetGRPCDialParams(config grpcendpoint.Config) (dbfactory.GRPCRemoteConfig, error)
GetConfig() config.ReadableConfig
GetUserHomeDir() (string, error)
}

View File

@@ -21,6 +21,7 @@ import (
"fmt"
"net"
"net/http"
"os"
"runtime"
"strconv"
"strings"
@@ -33,6 +34,7 @@ import (
"github.com/dolthub/go-mysql-server/sql/plan"
"github.com/dolthub/go-mysql-server/sql/types"
"github.com/dolthub/vitess/go/mysql"
"github.com/google/uuid"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/sirupsen/logrus"
goerrors "gopkg.in/src-d/go-errors.v1"
@@ -40,6 +42,7 @@ import (
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/commands"
"github.com/dolthub/dolt/go/cmd/dolt/commands/engine"
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/remotesrv"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle"
@@ -48,6 +51,8 @@ import (
_ "github.com/dolthub/dolt/go/libraries/doltcore/sqle/dfunctions"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/doltcore/sqlserver"
"github.com/dolthub/dolt/go/libraries/events"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/svcs"
)
@@ -56,6 +61,9 @@ const (
ApiSqleContextKey = "__sqle_context__"
)
// sqlServerHeartbeatIntervalEnvVar is the duration between heartbeats sent to the remote server, used for testing
const sqlServerHeartbeatIntervalEnvVar = "DOLT_SQL_SERVER_HEARTBEAT_INTERVAL"
// ExternalDisableUsers is called by implementing applications to disable users. This is not used by Dolt itself,
// but will break compatibility with implementing applications that do not yet support users.
var ExternalDisableUsers bool = false
@@ -124,6 +132,8 @@ func Serve(
}
controller.Register(InitLogging)
controller.Register(newHeartbeatService(version, dEnv))
fs := dEnv.FS
InitDataDir := &svcs.AnonService{
InitF: func(context.Context) (err error) {
@@ -561,6 +571,83 @@ func Serve(
return nil, controller.WaitForStop()
}
// heartbeatService is a service that sends a heartbeat event to the metrics server once a day
type heartbeatService struct {
version string
eventEmitter events.Emitter
interval time.Duration
}
func newHeartbeatService(version string, dEnv *env.DoltEnv) *heartbeatService {
metricsDisabled := dEnv.Config.GetStringOrDefault(config.MetricsDisabled, "false")
disabled, err := strconv.ParseBool(metricsDisabled)
if err != nil || disabled {
return &heartbeatService{} // will be defunct on Run()
}
emitterType, ok := os.LookupEnv(events.EmitterTypeEnvVar)
if !ok {
emitterType = events.EmitterTypeGrpc
}
interval, ok := os.LookupEnv(sqlServerHeartbeatIntervalEnvVar)
if !ok {
interval = "24h"
}
duration, err := time.ParseDuration(interval)
if err != nil {
return &heartbeatService{} // will be defunct on Run()
}
emitter, err := commands.NewEmitter(emitterType, dEnv)
if err != nil {
return &heartbeatService{} // will be defunct on Run()
}
return &heartbeatService{
version: version,
eventEmitter: emitter,
interval: duration,
}
}
func (h *heartbeatService) Init(ctx context.Context) error { return nil }
func (h *heartbeatService) Stop() error { return nil }
func (h *heartbeatService) Run(ctx context.Context) {
// Faulty config settings or disabled metrics can cause us to not have a valid event emitter
if h.eventEmitter == nil {
return
}
ticker := time.NewTicker(h.interval)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return
case <-ticker.C:
t := events.NowTimestamp()
err := h.eventEmitter.LogEvents(h.version, []*eventsapi.ClientEvent{
{
Id: uuid.New().String(),
StartTime: t,
EndTime: t,
Type: eventsapi.ClientEventType_SQL_SERVER_HEARTBEAT,
},
})
if err != nil {
logrus.Debugf("failed to send heartbeat event: %v", err)
}
}
}
}
var _ svcs.Service = &heartbeatService{}
func persistServerLocalCreds(port int, dEnv *env.DoltEnv) (*LocalCreds, error) {
creds := NewLocalCreds(port)
err := WriteLocalCreds(dEnv.FS, creds)

View File

@@ -25,6 +25,7 @@ import (
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
@@ -72,6 +73,7 @@ func (cmd StatusCmd) RequiresRepo() bool {
}
var _ cli.RepoNotRequiredCommand = StatusCmd{}
var _ cli.EventMonitoredCommand = StatusCmd{}
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
func (cmd StatusCmd) Name() string {
@@ -94,6 +96,10 @@ func (cmd StatusCmd) ArgParser() *argparser.ArgParser {
return ap
}
func (cmd StatusCmd) EventType() eventsapi.ClientEventType {
return eventsapi.ClientEventType_STATUS
}
// Exec executes the command
func (cmd StatusCmd) Exec(ctx context.Context, commandStr string, args []string, _ *env.DoltEnv, cliCtx cli.CliContext) int {
// parse arguments

View File

@@ -16,9 +16,18 @@ package commands
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/fatih/color"
"github.com/google/go-github/v57/github"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dfunctions"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
@@ -27,8 +36,17 @@ import (
const (
featureVersionFlag = "feature"
verboseFlag = "verbose"
versionCheckFile = "version_check.txt"
)
var versionDocs = cli.CommandDocumentationContent{
ShortDesc: "Displays the version for the Dolt binary.",
LongDesc: `Displays the version for the Dolt binary.`,
Synopsis: []string{
`[--verbose] [--feature]`,
},
}
type VersionCmd struct {
VersionStr string
}
@@ -40,7 +58,7 @@ func (cmd VersionCmd) Name() string {
// Description returns a description of the command
func (cmd VersionCmd) Description() string {
return "Displays the current Dolt cli version."
return versionDocs.ShortDesc
}
// RequiresRepo should return false if this interface is implemented, and the command does not have the requirement
@@ -50,7 +68,8 @@ func (cmd VersionCmd) RequiresRepo() bool {
}
func (cmd VersionCmd) Docs() *cli.CommandDocumentation {
return nil
ap := cmd.ArgParser()
return cli.NewCommandDocumentation(versionDocs, ap)
}
func (cmd VersionCmd) ArgParser() *argparser.ArgParser {
@@ -63,11 +82,18 @@ func (cmd VersionCmd) ArgParser() *argparser.ArgParser {
// Version displays the version of the running dolt client
// Exec executes the command
func (cmd VersionCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, cliCtx cli.CliContext) int {
ap := cmd.ArgParser()
help, usage := cli.HelpAndUsagePrinters(cli.CommandDocsForCommandString(commandStr, versionDocs, ap))
apr := cli.ParseArgsOrDie(ap, args, help)
cli.Println("dolt version", cmd.VersionStr)
usage := func() {}
ap := cmd.ArgParser()
apr := cli.ParseArgsOrDie(ap, args, usage)
var verr errhand.VerboseError
verr = checkAndPrintVersionOutOfDateWarning(cmd.VersionStr, dEnv)
if verr != nil {
// print error but don't fail
cli.PrintErrf(color.YellowString(verr.Verbose()))
}
if apr.Contains(verboseFlag) {
if dEnv.HasDoltDir() && dEnv.RSLoadErr == nil && !cli.CheckEnvIsValid(dEnv) {
@@ -78,7 +104,6 @@ func (cmd VersionCmd) Exec(ctx context.Context, commandStr string, args []string
}
}
var verr errhand.VerboseError
if apr.Contains(featureVersionFlag) {
if !cli.CheckEnvIsValid(dEnv) {
return 2
@@ -92,12 +117,84 @@ func (cmd VersionCmd) Exec(ctx context.Context, commandStr string, args []string
fv, ok, err := wr.GetFeatureVersion(ctx)
if err != nil {
verr = errhand.BuildDError("error reading feature version").AddCause(err).Build()
return HandleVErrAndExitCode(verr, usage)
} else if !ok {
verr = errhand.BuildDError("the current head does not have a feature version").Build()
return HandleVErrAndExitCode(verr, usage)
} else {
cli.Println("feature version:", fv)
}
}
return HandleVErrAndExitCode(verr, usage)
return HandleVErrAndExitCode(nil, usage)
}
// checkAndPrintVersionOutOfDateWarning checks if the current version of Dolt is out of date and prints a warning if it
// is. Restricts this check to at most once per week.
func checkAndPrintVersionOutOfDateWarning(curVersion string, dEnv *env.DoltEnv) errhand.VerboseError {
var latestRelease string
var verr errhand.VerboseError
homeDir, err := dEnv.GetUserHomeDir()
if err != nil {
return errhand.BuildDError("error: failed to get user home directory").AddCause(err).Build()
}
path := filepath.Join(homeDir, dbfactory.DoltDir, versionCheckFile)
if exists, _ := dEnv.FS.Exists(path); exists {
vCheck, err := dEnv.FS.ReadFile(path)
if err != nil {
return errhand.BuildDError("error: failed to read version check file").AddCause(err).Build()
}
vCheckData := strings.Split(string(vCheck), ",")
if len(vCheckData) != 2 {
// formatting or data is wrong, so just overwrite
latestRelease, verr = getLatestDoltReleaseAndRecord(path, dEnv)
if verr != nil {
return verr
}
} else {
latestRelease = vCheckData[0]
lastCheckDate, err := time.Parse(time.DateOnly, vCheckData[1])
if err != nil {
return errhand.BuildDError("error: failed to parse version check file").AddCause(err).Build()
}
if lastCheckDate.Before(time.Now().AddDate(0, 0, -7)) {
latestRelease, verr = getLatestDoltReleaseAndRecord(path, dEnv)
if verr != nil {
return verr
}
}
}
} else {
latestRelease, verr = getLatestDoltReleaseAndRecord(path, dEnv)
if verr != nil {
return verr
}
}
if curVersion != latestRelease {
cli.Printf(color.YellowString("Warning: you are on an old version of Dolt. The newest version is %s.\n", latestRelease))
}
return nil
}
// getLatestDoltRelease returns the latest release of Dolt from GitHub and records the release and current date in the
// version check file.
func getLatestDoltReleaseAndRecord(path string, dEnv *env.DoltEnv) (string, errhand.VerboseError) {
client := github.NewClient(nil)
release, resp, err := client.Repositories.GetLatestRelease(context.Background(), "dolthub", "dolt")
if err != nil || resp.StatusCode != 200 {
return "", errhand.BuildDError("error: failed to verify latest release").AddCause(err).Build()
}
releaseName := strings.TrimPrefix(*release.TagName, "v")
err = dEnv.FS.WriteFile(path, []byte(fmt.Sprintf("%s,%s", releaseName, time.Now().UTC().Format(time.DateOnly))), os.ModePerm)
if err != nil {
return "", errhand.BuildDError("error: failed to update version check file").AddCause(err).Build()
}
return releaseName, nil
}

View File

@@ -24,6 +24,7 @@ import (
_ "net/http/pprof"
"os"
"os/exec"
"path/filepath"
"strconv"
"time"
@@ -64,7 +65,7 @@ import (
)
const (
Version = "1.29.0"
Version = "1.29.1"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}
@@ -207,12 +208,19 @@ Dolt subcommands are in transition to using the flags listed below as global fla
Not all subcommands use these flags. If your command accepts these flags without error, then they are supported.
`
const disableEventFlushEnvVar = "DOLT_DISABLE_EVENT_FLUSH"
var eventFlushDisabled = false
func init() {
dumpDocsCommand.DoltCommand = doltCommand
dumpDocsCommand.GlobalDocs = globalDocs
dumpDocsCommand.GlobalSpecialMsg = globalSpecialMsg
dumpZshCommand.DoltCommand = doltCommand
dfunctions.VersionString = Version
if _, ok := os.LookupEnv(disableEventFlushEnvVar); ok {
eventFlushDisabled = true
}
}
const pprofServerFlag = "--pprof-server"
@@ -437,7 +445,7 @@ func runMain() int {
fs = filesys.LocalFS
dEnv := env.Load(ctx, env.GetCurrentUserHomeDir, fs, doltdb.LocalDirDoltDB, Version)
root, err := env.GetCurrentUserHomeDir()
homeDir, err := env.GetCurrentUserHomeDir()
if err != nil {
cli.PrintErrln(color.RedString("Failed to load the HOME directory: %v", err))
return 1
@@ -465,32 +473,7 @@ func runMain() int {
return 1
}
emitter := events.NewFileEmitter(root, dbfactory.DoltDir)
defer func() {
ces := events.GlobalCollector.Close()
// events.WriterEmitter{cli.CliOut}.LogEvents(Version, ces)
metricsDisabled := dEnv.Config.GetStringOrDefault(env.MetricsDisabled, "false")
disabled, err := strconv.ParseBool(metricsDisabled)
if err != nil {
// log.Print(err)
return
}
if disabled {
return
}
// write events
_ = emitter.LogEvents(Version, ces)
// flush events
if err := processEventsDir(args, dEnv); err != nil {
// log.Print(err)
}
}()
defer emitUsageEvents(dEnv, homeDir, args)
if needsWriteAccess(subcommandName) {
err = reconfigIfTempFileMoveFails(dEnv)
@@ -760,34 +743,60 @@ func seedGlobalRand() {
rand.Seed(int64(binary.LittleEndian.Uint64(bs)))
}
// processEventsDir runs the dolt send-metrics command in a new process
func processEventsDir(args []string, dEnv *env.DoltEnv) error {
if len(args) > 0 {
ignoreCommands := map[string]struct{}{
commands.SendMetricsCommand: {},
"init": {},
"config": {},
}
// emitUsageEvents is called after a command is run to emit usage events and send them to metrics servers.
// Two controls of this behavior are possible:
// 1. The config key |metrics.disabled|, when set to |true|, disables all metrics emission
// 2. The environment key |DOLT_DISABLE_EVENT_FLUSH| allows writing events to disk but not sending them to the server.
// This is mostly used for testing.
func emitUsageEvents(dEnv *env.DoltEnv, homeDir string, args []string) {
metricsDisabled := dEnv.Config.GetStringOrDefault(config.MetricsDisabled, "false")
disabled, err := strconv.ParseBool(metricsDisabled)
if err != nil || disabled {
return
}
_, ok := ignoreCommands[args[0]]
// write events
emitter := events.NewFileEmitter(homeDir, dbfactory.DoltDir)
_ = emitter.LogEvents(Version, events.GlobalCollector.Close())
if ok {
return nil
}
// flush events
if !eventFlushDisabled && len(args) > 0 && shouldFlushEvents(args[0]) {
_ = flushEventsDir()
}
}
cmd := exec.Command("dolt", commands.SendMetricsCommand)
// flushEventsDir flushes all logged events in a separate process.
// This is done without blocking so that the main process can exit immediately in the case of a slow network.
func flushEventsDir() error {
path, err := os.Executable()
if err != nil {
return err
}
if err := cmd.Start(); err != nil {
// log.Print(err)
return err
}
absPath, err := filepath.Abs(path)
if err != nil {
return err
}
return nil
cmd := exec.Command(absPath, commands.SendMetricsCommand)
if err := cmd.Start(); err != nil {
return err
}
return nil
}
func shouldFlushEvents(command string) bool {
ignoreCommands := map[string]struct{}{
commands.SendMetricsCommand: {},
"init": {},
"config": {},
}
_, ok := ignoreCommands[command]
return !ok
}
func interceptSendMetrics(ctx context.Context, args []string) (bool, int) {
if len(args) < 1 || args[0] != commands.SendMetricsCommand {
return false, 0

View File

@@ -156,6 +156,7 @@ const (
ClientEventType_SHOW ClientEventType = 61
ClientEventType_PROFILE ClientEventType = 62
ClientEventType_REFLOG ClientEventType = 63
ClientEventType_SQL_SERVER_HEARTBEAT ClientEventType = 64
)
// Enum value maps for ClientEventType.
@@ -224,6 +225,8 @@ var (
60: "STASH_POP",
61: "SHOW",
62: "PROFILE",
63: "REFLOG",
64: "SQL_SERVER_HEARTBEAT",
}
ClientEventType_value = map[string]int32{
"TYPE_UNSPECIFIED": 0,
@@ -289,6 +292,8 @@ var (
"STASH_POP": 60,
"SHOW": 61,
"PROFILE": 62,
"REFLOG": 63,
"SQL_SERVER_HEARTBEAT": 64,
}
)
@@ -422,6 +427,7 @@ type AppID int32
const (
AppID_APP_ID_UNSPECIFIED AppID = 0
AppID_APP_DOLT AppID = 1
AppID_APP_DOLTGRES AppID = 2
)
// Enum value maps for AppID.
@@ -429,10 +435,12 @@ var (
AppID_name = map[int32]string{
0: "APP_ID_UNSPECIFIED",
1: "APP_DOLT",
2: "APP_DOLTGRES",
}
AppID_value = map[string]int32{
"APP_ID_UNSPECIFIED": 0,
"APP_DOLT": 1,
"APP_DOLTGRES": 2,
}
)
@@ -476,7 +484,7 @@ var file_dolt_services_eventsapi_v1alpha1_event_constants_proto_rawDesc = []byte
0x52, 0x4d, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00,
0x12, 0x09, 0x0a, 0x05, 0x4c, 0x49, 0x4e, 0x55, 0x58, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57,
0x49, 0x4e, 0x44, 0x4f, 0x57, 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x41, 0x52, 0x57,
0x49, 0x4e, 0x10, 0x03, 0x2a, 0xfd, 0x07, 0x0a, 0x0f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x45,
0x49, 0x4e, 0x10, 0x03, 0x2a, 0xa3, 0x08, 0x0a, 0x0f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x45,
0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08,
0x0a, 0x04, 0x49, 0x4e, 0x49, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x41, 0x54,
@@ -540,27 +548,30 @@ var file_dolt_services_eventsapi_v1alpha1_event_constants_proto_rawDesc = []byte
0x0a, 0x0a, 0x53, 0x54, 0x41, 0x53, 0x48, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x3b, 0x12, 0x0d,
0x0a, 0x09, 0x53, 0x54, 0x41, 0x53, 0x48, 0x5f, 0x50, 0x4f, 0x50, 0x10, 0x3c, 0x12, 0x08, 0x0a,
0x04, 0x53, 0x48, 0x4f, 0x57, 0x10, 0x3d, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x52, 0x4f, 0x46, 0x49,
0x4c, 0x45, 0x10, 0x3e, 0x2a, 0x6a, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x49, 0x44,
0x12, 0x16, 0x0a, 0x12, 0x4d, 0x45, 0x54, 0x52, 0x49, 0x43, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45,
0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x42, 0x59, 0x54, 0x45,
0x53, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x45, 0x44, 0x10, 0x01, 0x12, 0x17,
0x0a, 0x13, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x4d, 0x53, 0x5f, 0x45, 0x4c,
0x41, 0x50, 0x53, 0x45, 0x44, 0x10, 0x02, 0x12, 0x17, 0x0a, 0x13, 0x52, 0x45, 0x4d, 0x4f, 0x54,
0x45, 0x41, 0x50, 0x49, 0x5f, 0x52, 0x50, 0x43, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x03,
0x2a, 0x45, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x49, 0x44, 0x12,
0x19, 0x0a, 0x15, 0x41, 0x54, 0x54, 0x52, 0x49, 0x42, 0x55, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53,
0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x52, 0x45,
0x4d, 0x4f, 0x54, 0x45, 0x5f, 0x55, 0x52, 0x4c, 0x5f, 0x53, 0x43, 0x48, 0x45, 0x4d, 0x45, 0x10,
0x02, 0x22, 0x04, 0x08, 0x01, 0x10, 0x01, 0x2a, 0x2d, 0x0a, 0x05, 0x41, 0x70, 0x70, 0x49, 0x44,
0x12, 0x16, 0x0a, 0x12, 0x41, 0x50, 0x50, 0x5f, 0x49, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45,
0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x50, 0x50, 0x5f,
0x44, 0x4f, 0x4c, 0x54, 0x10, 0x01, 0x42, 0x51, 0x5a, 0x4f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x68, 0x75, 0x62, 0x2f, 0x64, 0x6f, 0x6c,
0x74, 0x2f, 0x67, 0x6f, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64,
0x6f, 0x6c, 0x74, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x65, 0x76, 0x65,
0x6e, 0x74, 0x73, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b,
0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
0x4c, 0x45, 0x10, 0x3e, 0x12, 0x0a, 0x0a, 0x06, 0x52, 0x45, 0x46, 0x4c, 0x4f, 0x47, 0x10, 0x3f,
0x12, 0x18, 0x0a, 0x14, 0x53, 0x51, 0x4c, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x45, 0x52, 0x5f, 0x48,
0x45, 0x41, 0x52, 0x54, 0x42, 0x45, 0x41, 0x54, 0x10, 0x40, 0x2a, 0x6a, 0x0a, 0x08, 0x4d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x4d, 0x45, 0x54, 0x52, 0x49, 0x43,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14,
0x0a, 0x10, 0x42, 0x59, 0x54, 0x45, 0x53, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44,
0x45, 0x44, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44,
0x5f, 0x4d, 0x53, 0x5f, 0x45, 0x4c, 0x41, 0x50, 0x53, 0x45, 0x44, 0x10, 0x02, 0x12, 0x17, 0x0a,
0x13, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x41, 0x50, 0x49, 0x5f, 0x52, 0x50, 0x43, 0x5f, 0x45,
0x52, 0x52, 0x4f, 0x52, 0x10, 0x03, 0x2a, 0x45, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62,
0x75, 0x74, 0x65, 0x49, 0x44, 0x12, 0x19, 0x0a, 0x15, 0x41, 0x54, 0x54, 0x52, 0x49, 0x42, 0x55,
0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00,
0x12, 0x15, 0x0a, 0x11, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x5f, 0x55, 0x52, 0x4c, 0x5f, 0x53,
0x43, 0x48, 0x45, 0x4d, 0x45, 0x10, 0x02, 0x22, 0x04, 0x08, 0x01, 0x10, 0x01, 0x2a, 0x3f, 0x0a,
0x05, 0x41, 0x70, 0x70, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x50, 0x50, 0x5f, 0x49, 0x44,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0c,
0x0a, 0x08, 0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c,
0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x47, 0x52, 0x45, 0x53, 0x10, 0x02, 0x42, 0x51,
0x5a, 0x4f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x6f, 0x6c,
0x74, 0x68, 0x75, 0x62, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f, 0x67, 0x6f, 0x2f, 0x67, 0x65, 0x6e,
0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x73, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61, 0x70, 0x69, 0x2f, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61, 0x70,
0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (

View File

@@ -15,18 +15,17 @@ require (
github.com/dolthub/fslock v0.0.3
github.com/dolthub/ishell v0.0.0-20221214210346-d7db0b066488
github.com/dolthub/sqllogictest/go v0.0.0-20201107003712-816f3ae12d81
github.com/dolthub/vitess v0.0.0-20231127171856-2466012fb61f
github.com/dustin/go-humanize v1.0.0
github.com/dolthub/vitess v0.0.0-20231207010700-88fb35413580
github.com/dustin/go-humanize v1.0.1
github.com/fatih/color v1.13.0
github.com/flynn-archive/go-shlex v0.0.0-20150515145356-3f9db97f8568
github.com/go-sql-driver/mysql v1.7.2-0.20230713085235-0b18dac46f7f
github.com/gocraft/dbr/v2 v2.7.2
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.4
github.com/google/uuid v1.3.0
github.com/jpillora/backoff v1.0.0
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d
github.com/mattn/go-isatty v0.0.16
github.com/mattn/go-isatty v0.0.17
github.com/mattn/go-runewidth v0.0.13
github.com/pkg/errors v0.9.1
github.com/pkg/profile v1.5.0
@@ -36,9 +35,8 @@ require (
github.com/silvasur/buzhash v0.0.0-20160816060738-9bdec3dec7c6
github.com/sirupsen/logrus v1.8.1
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966
github.com/stretchr/testify v1.8.2
github.com/stretchr/testify v1.8.3
github.com/tealeg/xlsx v1.0.5
github.com/tklauser/go-sysconf v0.3.9 // indirect
go.uber.org/zap v1.24.0
golang.org/x/crypto v0.14.0
golang.org/x/net v0.17.0
@@ -59,9 +57,10 @@ require (
github.com/cespare/xxhash v1.1.0
github.com/creasty/defaults v1.6.0
github.com/dolthub/flatbuffers/v23 v23.3.3-dh.2
github.com/dolthub/go-mysql-server v0.17.1-0.20231206010302-ece3651a5032
github.com/dolthub/go-mysql-server v0.17.1-0.20231207014254-4bbb22ce2d4d
github.com/dolthub/swiss v0.1.0
github.com/goccy/go-json v0.10.2
github.com/google/go-github/v57 v57.0.0
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510
github.com/hashicorp/golang-lru/v2 v2.0.2
github.com/jmoiron/sqlx v1.3.4
@@ -114,14 +113,16 @@ require (
github.com/go-pdf/fpdf v0.6.0 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/s2a-go v0.1.4 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect
github.com/googleapis/gax-go/v2 v2.11.0 // indirect
github.com/gorilla/mux v1.8.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/klauspost/compress v1.10.10 // indirect
github.com/klauspost/compress v1.10.5 // indirect
github.com/klauspost/cpuid/v2 v2.0.12 // indirect
github.com/lestrrat-go/strftime v1.0.4 // indirect
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
@@ -138,6 +139,7 @@ require (
github.com/tetratelabs/wazero v1.1.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tklauser/go-sysconf v0.3.9 // indirect
github.com/tklauser/numcpus v0.3.0 // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect
go.opencensus.io v0.24.0 // indirect
@@ -147,7 +149,7 @@ require (
golang.org/x/mod v0.12.0 // indirect
golang.org/x/oauth2 v0.8.0 // indirect
golang.org/x/term v0.13.0 // indirect
golang.org/x/time v0.1.0 // indirect
golang.org/x/time v0.0.0-20191024005414-555d28b269f0 // indirect
golang.org/x/tools v0.13.0 // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
google.golang.org/appengine v1.6.7 // indirect

View File

@@ -181,8 +181,8 @@ github.com/dolthub/fslock v0.0.3 h1:iLMpUIvJKMKm92+N1fmHVdxJP5NdyDK5bK7z7Ba2s2U=
github.com/dolthub/fslock v0.0.3/go.mod h1:QWql+P17oAAMLnL4HGB5tiovtDuAjdDTPbuqx7bYfa0=
github.com/dolthub/go-icu-regex v0.0.0-20230524105445-af7e7991c97e h1:kPsT4a47cw1+y/N5SSCkma7FhAPw7KeGmD6c9PBZW9Y=
github.com/dolthub/go-icu-regex v0.0.0-20230524105445-af7e7991c97e/go.mod h1:KPUcpx070QOfJK1gNe0zx4pA5sicIK1GMikIGLKC168=
github.com/dolthub/go-mysql-server v0.17.1-0.20231205222834-2eb85072ed9d h1:DBMlz2ONWPx6qZhUps8qwlvGa2QwsDSBKbOHxhr55Gc=
github.com/dolthub/go-mysql-server v0.17.1-0.20231205222834-2eb85072ed9d/go.mod h1:vXlRKS39WHav9N51VsfYphKhmSA2t5FkhHmW3BtwH5I=
github.com/dolthub/go-mysql-server v0.17.1-0.20231207014254-4bbb22ce2d4d h1:qesX3+3ll8CiqUbCXYxHemsC6q2i6lxIs9fkYX+8i/g=
github.com/dolthub/go-mysql-server v0.17.1-0.20231207014254-4bbb22ce2d4d/go.mod h1:zJCyPiYe9VZ9xIQTv7S1OFKwyoVQoeGxZXNtkFxTcOI=
github.com/dolthub/go-mysql-server v0.17.1-0.20231205233134-99c5d6d59ff2 h1:u7JyFBPn9przz/X4e6212BaeXJa875vDtUZmXMyqniI=
github.com/dolthub/go-mysql-server v0.17.1-0.20231205233134-99c5d6d59ff2/go.mod h1:vXlRKS39WHav9N51VsfYphKhmSA2t5FkhHmW3BtwH5I=
github.com/dolthub/go-mysql-server v0.17.1-0.20231206010302-ece3651a5032 h1:71BolPv10jFNhpwycyT4toCTizOaBwbLE2VdZixrThI=
@@ -197,11 +197,11 @@ github.com/dolthub/sqllogictest/go v0.0.0-20201107003712-816f3ae12d81 h1:7/v8q9X
github.com/dolthub/sqllogictest/go v0.0.0-20201107003712-816f3ae12d81/go.mod h1:siLfyv2c92W1eN/R4QqG/+RjjX5W2+gCTRjZxBjI3TY=
github.com/dolthub/swiss v0.1.0 h1:EaGQct3AqeP/MjASHLiH6i4TAmgbG/c4rA6a1bzCOPc=
github.com/dolthub/swiss v0.1.0/go.mod h1:BeucyB08Vb1G9tumVN3Vp/pyY4AMUnr9p7Rz7wJ7kAQ=
github.com/dolthub/vitess v0.0.0-20231127171856-2466012fb61f h1:I480LKHhb4usnF3dYhp6J4ORKMrncNKaWYZvIZwlK+U=
github.com/dolthub/vitess v0.0.0-20231127171856-2466012fb61f/go.mod h1:IwjNXSQPymrja5pVqmfnYdcy7Uv7eNJNBPK/MEh9OOw=
github.com/dolthub/vitess v0.0.0-20231207010700-88fb35413580 h1:OSp1g3tRBMVIyxza4LN20rZ6yYEKqjf5hNNisVg/Lns=
github.com/dolthub/vitess v0.0.0-20231207010700-88fb35413580/go.mod h1:IwjNXSQPymrja5pVqmfnYdcy7Uv7eNJNBPK/MEh9OOw=
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
@@ -325,13 +325,18 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs=
github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
@@ -436,9 +441,8 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.10.5 h1:7q6vHIqubShURwQz8cQK6yIe/xC3IF0Vm7TGfqjewrc=
github.com/klauspost/compress v1.10.5/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/compress v1.10.10 h1:a/y8CglcM7gLGYmlbP/stPE5sR3hbhFRUjCBfd/0B3I=
github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/cpuid/v2 v2.0.12 h1:p9dKCg8i4gmOxtv35DvrYoWqYzQrvEVdjQ762Y0OqZE=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -480,8 +484,9 @@ github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
@@ -652,8 +657,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tealeg/xlsx v1.0.5 h1:+f8oFmvY8Gw1iUXzPk+kz+4GpbDZPK1FhPiQRd+ypgE=
github.com/tealeg/xlsx v1.0.5/go.mod h1:btRS8dz54TDnvKNosuAqxrM1QgN1udgk9O34bDCnORM=
github.com/tetratelabs/wazero v1.1.0 h1:EByoAhC+QcYpwSZJSs/aV0uokxPwBgKxfiokSUwAknQ=
@@ -965,9 +970,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA=
golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

View File

@@ -0,0 +1,293 @@
// Copyright 2023 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cherry_pick
import (
"errors"
"fmt"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/dolt/go/libraries/doltcore/diff"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/merge"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
)
// ErrCherryPickUncommittedChanges is returned when a cherry-pick is attempted without a clean working set.
var ErrCherryPickUncommittedChanges = errors.New("cannot cherry-pick with uncommitted changes")
// CherryPickOptions specifies optional parameters specifying how a cherry-pick is performed.
type CherryPickOptions struct {
// Amend controls whether the commit at HEAD is amended and combined with the commit to be cherry-picked.
Amend bool
// CommitMessage is optional, and controls the message for the new commit.
CommitMessage string
}
// CherryPick replays a commit, specified by |options.Commit|, and applies it as a new commit to the current HEAD. If
// successful, the hash of the new commit is returned. If the cherry-pick results in merge conflicts, the merge result
// is returned. If any unexpected error occur, it is returned.
func CherryPick(ctx *sql.Context, commit string, options CherryPickOptions) (string, *merge.Result, error) {
doltSession := dsess.DSessFromSess(ctx.Session)
dbName := ctx.GetCurrentDatabase()
roots, ok := doltSession.GetRoots(ctx, dbName)
if !ok {
return "", nil, fmt.Errorf("failed to get roots for current session")
}
mergeResult, commitMsg, err := cherryPick(ctx, doltSession, roots, dbName, commit)
if err != nil {
return "", nil, err
}
newWorkingRoot := mergeResult.Root
err = doltSession.SetRoot(ctx, dbName, newWorkingRoot)
if err != nil {
return "", nil, err
}
err = stageCherryPickedTables(ctx, mergeResult.Stats)
if err != nil {
return "", nil, err
}
// If there were merge conflicts, just return the merge result.
if mergeResult.HasMergeArtifacts() {
return "", mergeResult, nil
}
commitProps := actions.CommitStagedProps{
Date: ctx.QueryTime(),
Name: ctx.Client().User,
Email: fmt.Sprintf("%s@%s", ctx.Client().User, ctx.Client().Address),
Message: commitMsg,
}
if options.CommitMessage != "" {
commitProps.Message = options.CommitMessage
}
if options.Amend {
commitProps.Amend = true
}
// NOTE: roots are old here (after staging the tables) and need to be refreshed
roots, ok = doltSession.GetRoots(ctx, dbName)
if !ok {
return "", nil, fmt.Errorf("failed to get roots for current session")
}
pendingCommit, err := doltSession.NewPendingCommit(ctx, dbName, roots, commitProps)
if err != nil {
return "", nil, err
}
if pendingCommit == nil {
return "", nil, errors.New("nothing to commit")
}
newCommit, err := doltSession.DoltCommit(ctx, dbName, doltSession.GetTransaction(), pendingCommit)
if err != nil {
return "", nil, err
}
h, err := newCommit.HashOf()
if err != nil {
return "", nil, err
}
return h.String(), nil, nil
}
// AbortCherryPick aborts a cherry-pick merge, if one is in progress. If unable to abort for any reason
// (e.g. if there is not cherry-pick merge in progress), an error is returned.
func AbortCherryPick(ctx *sql.Context, dbName string) error {
doltSession := dsess.DSessFromSess(ctx.Session)
ws, err := doltSession.WorkingSet(ctx, dbName)
if err != nil {
return fmt.Errorf("fatal: unable to load working set: %v", err)
}
if !ws.MergeActive() {
return fmt.Errorf("error: There is no cherry-pick merge to abort")
}
roots, ok := doltSession.GetRoots(ctx, dbName)
if !ok {
return fmt.Errorf("fatal: unable to load roots for %s", dbName)
}
newWs, err := merge.AbortMerge(ctx, ws, roots)
if err != nil {
return fmt.Errorf("fatal: unable to abort merge: %v", err)
}
return doltSession.SetWorkingSet(ctx, dbName, newWs)
}
// cherryPick checks that the current working set is clean, verifies the cherry-pick commit is not a merge commit
// or a commit without parent commit, performs merge and returns the new working set root value and
// the commit message of cherry-picked commit as the commit message of the new commit created during this command.
func cherryPick(ctx *sql.Context, dSess *dsess.DoltSession, roots doltdb.Roots, dbName, cherryStr string) (*merge.Result, string, error) {
// check for clean working set
wsOnlyHasIgnoredTables, err := diff.WorkingSetContainsOnlyIgnoredTables(ctx, roots)
if err != nil {
return nil, "", err
}
if !wsOnlyHasIgnoredTables {
return nil, "", ErrCherryPickUncommittedChanges
}
headRootHash, err := roots.Head.HashOf()
if err != nil {
return nil, "", err
}
workingRootHash, err := roots.Working.HashOf()
if err != nil {
return nil, "", err
}
doltDB, ok := dSess.GetDoltDB(ctx, dbName)
if !ok {
return nil, "", fmt.Errorf("failed to get DoltDB")
}
dbData, ok := dSess.GetDbData(ctx, dbName)
if !ok {
return nil, "", fmt.Errorf("failed to get dbData")
}
cherryCommitSpec, err := doltdb.NewCommitSpec(cherryStr)
if err != nil {
return nil, "", err
}
headRef, err := dbData.Rsr.CWBHeadRef()
if err != nil {
return nil, "", err
}
cherryCommit, err := doltDB.Resolve(ctx, cherryCommitSpec, headRef)
if err != nil {
return nil, "", err
}
if len(cherryCommit.DatasParents()) > 1 {
return nil, "", fmt.Errorf("cherry-picking a merge commit is not supported")
}
if len(cherryCommit.DatasParents()) == 0 {
return nil, "", fmt.Errorf("cherry-picking a commit without parents is not supported")
}
cherryRoot, err := cherryCommit.GetRootValue(ctx)
if err != nil {
return nil, "", err
}
// When cherry-picking, we need to use the parent of the cherry-picked commit as the ancestor. This
// ensures that only the delta from the cherry-pick commit is applied.
parentCommit, err := doltDB.ResolveParent(ctx, cherryCommit, 0)
if err != nil {
return nil, "", err
}
parentRoot, err := parentCommit.GetRootValue(ctx)
if err != nil {
return nil, "", err
}
dbState, ok, err := dSess.LookupDbState(ctx, dbName)
if err != nil {
return nil, "", err
} else if !ok {
return nil, "", sql.ErrDatabaseNotFound.New(dbName)
}
mo := merge.MergeOpts{
IsCherryPick: true,
KeepSchemaConflicts: false,
}
result, err := merge.MergeRoots(ctx, roots.Working, cherryRoot, parentRoot, cherryCommit, parentCommit, dbState.EditOpts(), mo)
if err != nil {
return nil, "", err
}
workingRootHash, err = result.Root.HashOf()
if err != nil {
return nil, "", err
}
if headRootHash.Equal(workingRootHash) {
return nil, "", fmt.Errorf("no changes were made, nothing to commit")
}
cherryCommitMeta, err := cherryCommit.GetCommitMeta(ctx)
if err != nil {
return nil, "", err
}
// If any of the merge stats show a data or schema conflict or a constraint
// violation, record that a merge is in progress.
for _, stats := range result.Stats {
if stats.HasArtifacts() {
ws, err := dSess.WorkingSet(ctx, dbName)
if err != nil {
return nil, "", err
}
newWorkingSet := ws.StartCherryPick(cherryCommit, cherryStr)
err = dSess.SetWorkingSet(ctx, dbName, newWorkingSet)
if err != nil {
return nil, "", err
}
break
}
}
return result, cherryCommitMeta.Description, nil
}
// stageCherryPickedTables stages the tables from |mergeStats| that don't have any merge artifacts i.e.
// tables that don't have any data or schema conflicts and don't have any constraint violations.
func stageCherryPickedTables(ctx *sql.Context, mergeStats map[string]*merge.MergeStats) (err error) {
tablesToAdd := make([]string, 0, len(mergeStats))
for tableName, mergeStats := range mergeStats {
if mergeStats.HasArtifacts() {
continue
}
// Find any tables being deleted and make sure we stage those tables first
if mergeStats.Operation == merge.TableRemoved {
tablesToAdd = append([]string{tableName}, tablesToAdd...)
} else {
tablesToAdd = append(tablesToAdd, tableName)
}
}
doltSession := dsess.DSessFromSess(ctx.Session)
dbName := ctx.GetCurrentDatabase()
roots, ok := doltSession.GetRoots(ctx, dbName)
if !ok {
return fmt.Errorf("unable to get roots for database '%s' from session", dbName)
}
roots, err = actions.StageTables(ctx, roots, tablesToAdd, true)
if err != nil {
return err
}
return doltSession.SetRoots(ctx, dbName, roots)
}

View File

@@ -21,6 +21,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/store/types"
)
@@ -100,8 +101,8 @@ func createTestEnvWithNameAndFilesystem(envName string, fs filesys.Filesys, home
dEnv := env.Load(context.Background(), homeDirFunc, fs, urlStr, "test")
cfg, _ := dEnv.Config.GetConfig(env.GlobalConfig)
cfg.SetStrings(map[string]string{
env.UserNameKey: name,
env.UserEmailKey: email,
config.UserNameKey: name,
config.UserEmailKey: email,
})
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email, env.DefaultInitBranch)

View File

@@ -22,6 +22,7 @@ import (
cmd "github.com/dolthub/dolt/go/cmd/dolt/commands"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dprocedures"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
@@ -121,8 +122,8 @@ func (mr *MultiRepoTestSetup) NewDB(dbName string) {
}
cfg, _ := dEnv.Config.GetConfig(env.GlobalConfig)
cfg.SetStrings(map[string]string{
env.UserNameKey: name,
env.UserEmailKey: email,
config.UserNameKey: name,
config.UserEmailKey: email,
})
err = dEnv.InitRepo(context.Background(), types.Format_Default, name, email, defaultBranch)
if err != nil {
@@ -195,7 +196,7 @@ func (mr *MultiRepoTestSetup) CloneDB(fromRemote, dbName string) {
mr.Errhand(err)
}
err = actions.CloneRemote(ctx, srcDB, r.Name, "", dEnv)
err = actions.CloneRemote(ctx, srcDB, r.Name, "", false, dEnv)
if err != nil {
mr.Errhand(err)
}

View File

@@ -29,6 +29,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/ref"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/libraries/utils/iohelp"
"github.com/dolthub/dolt/go/libraries/utils/strhelp"
@@ -156,7 +157,7 @@ func sortedKeys(m map[string]iohelp.ReadStats) []string {
return keys
}
func CloneRemote(ctx context.Context, srcDB *doltdb.DoltDB, remoteName, branch string, dEnv *env.DoltEnv) error {
func CloneRemote(ctx context.Context, srcDB *doltdb.DoltDB, remoteName, branch string, singleBranch bool, dEnv *env.DoltEnv) error {
eventCh := make(chan pull.TableFileEvent, 128)
wg := &sync.WaitGroup{}
@@ -214,25 +215,26 @@ func CloneRemote(ctx context.Context, srcDB *doltdb.DoltDB, remoteName, branch s
// every branch in the remote. We iterate through local branches and
// create remote refs corresponding to each of them. We delete all of
// the local branches except for the one corresponding to |branch|.
for _, brnch := range branches {
cs, _ := doltdb.NewCommitSpec(brnch.GetPath())
cm, err := dEnv.DoltDB.Resolve(ctx, cs, nil)
if err != nil {
return fmt.Errorf("%w: %s; %s", ErrFailedToResolveBranchRef, brnch.String(), err.Error())
}
remoteRef := ref.NewRemoteRef(remoteName, brnch.GetPath())
err = dEnv.DoltDB.SetHeadToCommit(ctx, remoteRef, cm)
if err != nil {
return fmt.Errorf("%w: %s; %s", ErrFailedToCreateRemoteRef, remoteRef.String(), err.Error())
}
if brnch.GetPath() != branch {
err := dEnv.DoltDB.DeleteBranch(ctx, brnch, nil)
for _, br := range branches {
if !singleBranch || br.GetPath() == branch {
cs, _ := doltdb.NewCommitSpec(br.GetPath())
cm, err := dEnv.DoltDB.Resolve(ctx, cs, nil)
if err != nil {
return fmt.Errorf("%w: %s; %s", ErrFailedToDeleteBranch, brnch.String(), err.Error())
return fmt.Errorf("%w: %s; %s", ErrFailedToResolveBranchRef, br.String(), err.Error())
}
remoteRef := ref.NewRemoteRef(remoteName, br.GetPath())
err = dEnv.DoltDB.SetHeadToCommit(ctx, remoteRef, cm)
if err != nil {
return fmt.Errorf("%w: %s; %s", ErrFailedToCreateRemoteRef, remoteRef.String(), err.Error())
}
}
if br.GetPath() != branch {
err := dEnv.DoltDB.DeleteBranch(ctx, br, nil)
if err != nil {
return fmt.Errorf("%w: %s; %s", ErrFailedToDeleteBranch, br.String(), err.Error())
}
}
}
@@ -267,8 +269,8 @@ func CloneRemote(ctx context.Context, srcDB *doltdb.DoltDB, remoteName, branch s
// InitEmptyClonedRepo inits an empty, newly cloned repo. This would be unnecessary if we properly initialized the
// storage for a repository when we created it on dolthub. If we do that, this code can be removed.
func InitEmptyClonedRepo(ctx context.Context, dEnv *env.DoltEnv) error {
name := dEnv.Config.GetStringOrDefault(env.UserNameKey, "")
email := dEnv.Config.GetStringOrDefault(env.UserEmailKey, "")
name := dEnv.Config.GetStringOrDefault(config.UserNameKey, "")
email := dEnv.Config.GetStringOrDefault(config.UserEmailKey, "")
initBranch := env.GetDefaultInitBranch(dEnv.Config)
if name == "" {

View File

@@ -22,7 +22,6 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/libraries/utils/set"
"github.com/dolthub/dolt/go/store/datas"
)
@@ -30,33 +29,10 @@ const (
localConfigName = "local"
globalConfigName = "global"
UserEmailKey = "user.email"
UserNameKey = "user.name"
// should be able to have remote specific creds?
UserCreds = "user.creds"
DoltEditor = "core.editor"
InitBranchName = "init.defaultbranch"
RemotesApiHostKey = "remotes.default_host"
RemotesApiHostPortKey = "remotes.default_port"
AddCredsUrlKey = "creds.add_url"
DoltLabInsecureKey = "doltlab.insecure"
MetricsDisabled = "metrics.disabled"
MetricsHost = "metrics.host"
MetricsPort = "metrics.port"
MetricsInsecure = "metrics.insecure"
PushAutoSetupRemote = "push.autosetupremote"
)
var LocalConfigWhitelist = set.NewStrSet([]string{UserNameKey, UserEmailKey})
var GlobalConfigWhitelist = set.NewStrSet([]string{UserNameKey, UserEmailKey})
// ConfigScope is an enum representing the elements that make up the ConfigHierarchy
type ConfigScope int
@@ -206,7 +182,7 @@ func GetStringOrDefault(cfg config.ReadableConfig, key, defStr string) string {
// GetNameAndEmail returns the name and email from the supplied config
func GetNameAndEmail(cfg config.ReadableConfig) (string, string, error) {
name, err := cfg.GetString(UserNameKey)
name, err := cfg.GetString(config.UserNameKey)
if err == config.ErrConfigParamNotFound {
return "", "", datas.ErrNameNotConfigured
@@ -214,7 +190,7 @@ func GetNameAndEmail(cfg config.ReadableConfig) (string, string, error) {
return "", "", err
}
email, err := cfg.GetString(UserEmailKey)
email, err := cfg.GetString(config.UserEmailKey)
if err == config.ErrConfigParamNotFound {
return "", "", datas.ErrEmailNotConfigured
@@ -258,8 +234,8 @@ const (
)
var DefaultFailsafeConfig = map[string]string{
UserEmailKey: DefaultEmail,
UserNameKey: DefaultName,
config.UserEmailKey: DefaultEmail,
config.UserNameKey: DefaultName,
}
func (w writeableLocalDoltCliConfig) SetStrings(updates map[string]string) error {

View File

@@ -34,15 +34,15 @@ func TestConfig(t *testing.T) {
lCfg, _ := dEnv.Config.GetConfig(LocalConfig)
gCfg, _ := dEnv.Config.GetConfig(GlobalConfig)
lCfg.SetStrings(map[string]string{UserEmailKey: email, UserNameKey: "local_override"})
gCfg.SetStrings(map[string]string{UserNameKey: name})
lCfg.SetStrings(map[string]string{config.UserEmailKey: email, config.UserNameKey: "local_override"})
gCfg.SetStrings(map[string]string{config.UserNameKey: name})
assert.Equal(t, email, dEnv.Config.GetStringOrDefault(UserEmailKey, "no"))
assert.Equal(t, "local_override", dEnv.Config.GetStringOrDefault(UserNameKey, "no"))
assert.Equal(t, email, dEnv.Config.GetStringOrDefault(config.UserEmailKey, "no"))
assert.Equal(t, "local_override", dEnv.Config.GetStringOrDefault(config.UserNameKey, "no"))
assert.Equal(t, "yes", dEnv.Config.GetStringOrDefault("bad_key", "yes"))
assert.Equal(t, email, dEnv.Config.IfEmptyUseConfig("", UserEmailKey))
assert.Equal(t, "not empty", dEnv.Config.IfEmptyUseConfig("not empty", UserEmailKey))
assert.Equal(t, email, dEnv.Config.IfEmptyUseConfig("", config.UserEmailKey))
assert.Equal(t, "not empty", dEnv.Config.IfEmptyUseConfig("not empty", config.UserEmailKey))
assert.Equal(t, "", dEnv.Config.IfEmptyUseConfig("", "missing"))
@@ -54,17 +54,17 @@ func TestFailsafes(t *testing.T) {
dEnv, _ := createTestEnv(true, true)
lCfg, _ := dEnv.Config.GetConfig(LocalConfig)
require.NoError(t, lCfg.Unset([]string{UserNameKey}))
require.NoError(t, lCfg.Unset([]string{config.UserNameKey}))
dEnv.Config.SetFailsafes(DefaultFailsafeConfig)
assert.Equal(t, DefaultEmail, dEnv.Config.GetStringOrDefault(UserEmailKey, "none"))
assert.Equal(t, DefaultName, dEnv.Config.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, DefaultEmail, dEnv.Config.GetStringOrDefault(config.UserEmailKey, "none"))
assert.Equal(t, DefaultName, dEnv.Config.GetStringOrDefault(config.UserNameKey, "none"))
dEnv.Config.SetFailsafes(map[string]string{UserEmailKey: "new", "abc": "def"})
dEnv.Config.SetFailsafes(map[string]string{config.UserEmailKey: "new", "abc": "def"})
assert.Equal(t, "new", dEnv.Config.GetStringOrDefault(UserEmailKey, "none"))
assert.Equal(t, DefaultName, dEnv.Config.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, "new", dEnv.Config.GetStringOrDefault(config.UserEmailKey, "none"))
assert.Equal(t, DefaultName, dEnv.Config.GetStringOrDefault(config.UserNameKey, "none"))
assert.Equal(t, "def", dEnv.Config.GetStringOrDefault("abc", "none"))
}
@@ -75,23 +75,23 @@ func TestWritableDoltConfig(t *testing.T) {
gCfg, _ := dEnv.Config.GetConfig(GlobalConfig)
lCfg, _ := dEnv.Config.GetConfig(LocalConfig)
require.NoError(t, gCfg.SetStrings(map[string]string{UserNameKey: name}))
require.NoError(t, lCfg.SetStrings(map[string]string{UserNameKey: localName}))
require.NoError(t, gCfg.SetStrings(map[string]string{config.UserNameKey: name}))
require.NoError(t, lCfg.SetStrings(map[string]string{config.UserNameKey: localName}))
cfg := dEnv.Config.WriteableConfig()
assert.Equal(t, localName, cfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, localName, cfg.GetStringOrDefault(config.UserNameKey, "none"))
require.NoError(t, cfg.SetStrings(map[string]string{"test": "abc"}))
require.NoError(t, cfg.Unset([]string{UserNameKey}))
require.NoError(t, cfg.Unset([]string{config.UserNameKey}))
assert.Equal(t, name, cfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, name, cfg.GetStringOrDefault(config.UserNameKey, "none"))
assert.Equal(t, "abc", cfg.GetStringOrDefault("test", "none"))
_, err := lCfg.GetString(UserNameKey)
_, err := lCfg.GetString(config.UserNameKey)
assert.Equal(t, config.ErrConfigParamNotFound, err)
assert.Equal(t, name, gCfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, name, gCfg.GetStringOrDefault(config.UserNameKey, "none"))
_, err = gCfg.GetString("test")
assert.Equal(t, config.ErrConfigParamNotFound, err)
}
@@ -102,22 +102,22 @@ func TestWritableDoltConfigNoLocal(t *testing.T) {
newName := "Willy"
gCfg, _ := dEnv.Config.GetConfig(GlobalConfig)
require.NoError(t, gCfg.SetStrings(map[string]string{UserNameKey: name, "test": "abc"}))
require.NoError(t, gCfg.SetStrings(map[string]string{config.UserNameKey: name, "test": "abc"}))
cfg := dEnv.Config.WriteableConfig()
assert.Equal(t, name, cfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, name, cfg.GetStringOrDefault(config.UserNameKey, "none"))
assert.Equal(t, "abc", cfg.GetStringOrDefault("test", "none"))
require.NoError(t, cfg.SetStrings(map[string]string{UserNameKey: newName}))
require.NoError(t, cfg.SetStrings(map[string]string{config.UserNameKey: newName}))
require.NoError(t, cfg.Unset([]string{"test"}))
assert.Equal(t, newName, cfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, newName, cfg.GetStringOrDefault(config.UserNameKey, "none"))
_, err := cfg.GetString("test")
assert.Equal(t, config.ErrConfigParamNotFound, err)
assert.Equal(t, newName, gCfg.GetStringOrDefault(UserNameKey, "none"))
assert.Equal(t, newName, gCfg.GetStringOrDefault(config.UserNameKey, "none"))
_, err = gCfg.GetString("test")
assert.Equal(t, config.ErrConfigParamNotFound, err)

View File

@@ -46,9 +46,6 @@ const (
DefaultLoginUrl = "https://dolthub.com/settings/credentials"
DefaultMetricsHost = "eventsapi.dolthub.com"
DefaultMetricsPort = "443"
DefaultRemotesApiHost = "doltremoteapi.dolthub.com"
DefaultRemotesApiPort = "443"
@@ -105,6 +102,10 @@ func (dEnv *DoltEnv) GetRemoteDB(ctx context.Context, format *types.NomsBinForma
}
}
func (dEnv *DoltEnv) GetConfig() config.ReadableConfig {
return dEnv.Config
}
func createRepoState(fs filesys.Filesys) (*RepoState, error) {
repoState, rsErr := LoadRepoState(fs)
@@ -198,7 +199,7 @@ func Load(ctx context.Context, hdp HomeDirProvider, fs filesys.Filesys, urlStr s
}
func GetDefaultInitBranch(cfg config.ReadableConfig) string {
return GetStringOrDefault(cfg, InitBranchName, DefaultInitBranch)
return GetStringOrDefault(cfg, config.InitBranchName, DefaultInitBranch)
}
// Valid returns whether this environment has been properly initialized. This is useful because although every command
@@ -810,8 +811,8 @@ func (dEnv *DoltEnv) workingSetMeta() *datas.WorkingSetMeta {
func (dEnv *DoltEnv) NewWorkingSetMeta(message string) *datas.WorkingSetMeta {
return &datas.WorkingSetMeta{
Name: dEnv.Config.GetStringOrDefault(UserNameKey, ""),
Email: dEnv.Config.GetStringOrDefault(UserEmailKey, ""),
Name: dEnv.Config.GetStringOrDefault(config.UserNameKey, ""),
Email: dEnv.Config.GetStringOrDefault(config.UserEmailKey, ""),
Timestamp: uint64(time.Now().Unix()),
Description: message,
}
@@ -822,7 +823,7 @@ func (dEnv *DoltEnv) CredsDir() (string, error) {
}
func (dEnv *DoltEnv) UserDoltCreds() (creds.DoltCreds, bool, error) {
kid, err := dEnv.Config.GetString(UserCreds)
kid, err := dEnv.Config.GetString(config.UserCreds)
if err == nil && kid != "" {
dir, err := dEnv.CredsDir()

View File

@@ -25,6 +25,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/dbfactory"
"github.com/dolthub/dolt/go/libraries/doltcore/dconfig"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/libraries/utils/earl"
"github.com/dolthub/dolt/go/libraries/utils/filesys"
"github.com/dolthub/dolt/go/libraries/utils/test"
@@ -103,8 +104,8 @@ func initRepoWithRelativePath(t *testing.T, envPath string, hdp HomeDirProvider)
dEnv := Load(context.Background(), hdp, fs, urlStr, "test")
cfg, _ := dEnv.Config.GetConfig(GlobalConfig)
cfg.SetStrings(map[string]string{
UserNameKey: name,
UserEmailKey: email,
config.UserNameKey: name,
config.UserEmailKey: email,
})
err = dEnv.InitRepo(context.Background(), types.Format_Default, name, email, DefaultInitBranch)

View File

@@ -661,7 +661,7 @@ func GetAbsRemoteUrl(fs filesys2.Filesys, cfg config.ReadableConfig, urlArg stri
return dbfactory.HTTPSScheme, "https://" + urlArg, nil
}
hostName, err := cfg.GetString(RemotesApiHostKey)
hostName, err := cfg.GetString(config.RemotesApiHostKey)
if err != nil {
if err != config.ErrConfigParamNotFound {

View File

@@ -24,6 +24,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/table/editor"
"github.com/dolthub/dolt/go/store/hash"
)
@@ -281,3 +282,49 @@ func mergedRootToWorking(
}
return
}
// AbortMerge returns a new WorkingSet instance, with the active merge aborted, by clearing and
// resetting the merge state in |workingSet| and using |roots| to identify the existing tables
// and reset them, excluding any ignored tables. The caller must then set the new WorkingSet in
// the session before the aborted merge is finalized. If no merge is in progress, this function
// returns an error.
func AbortMerge(ctx *sql.Context, workingSet *doltdb.WorkingSet, roots doltdb.Roots) (*doltdb.WorkingSet, error) {
if !workingSet.MergeActive() {
return nil, fmt.Errorf("there is no merge to abort")
}
tbls, err := doltdb.UnionTableNames(ctx, roots.Working, roots.Staged, roots.Head)
if err != nil {
return nil, err
}
roots, err = actions.MoveTablesFromHeadToWorking(ctx, roots, tbls)
if err != nil {
return nil, err
}
preMergeWorkingRoot := workingSet.MergeState().PreMergeWorkingRoot()
preMergeWorkingTables, err := preMergeWorkingRoot.GetTableNames(ctx)
if err != nil {
return nil, err
}
nonIgnoredTables, err := doltdb.ExcludeIgnoredTables(ctx, roots, preMergeWorkingTables)
if err != nil {
return nil, err
}
someTablesAreIgnored := len(nonIgnoredTables) != len(preMergeWorkingTables)
if someTablesAreIgnored {
newWorking, err := actions.MoveTablesBetweenRoots(ctx, nonIgnoredTables, preMergeWorkingRoot, roots.Working)
if err != nil {
return nil, err
}
workingSet = workingSet.WithWorkingRoot(newWorking)
} else {
workingSet = workingSet.WithWorkingRoot(preMergeWorkingRoot)
}
workingSet = workingSet.WithStagedRoot(workingSet.WorkingRoot())
workingSet = workingSet.ClearMerge()
return workingSet, nil
}

View File

@@ -485,7 +485,7 @@ func (p *DoltDatabaseProvider) CloneDatabaseFromRemote(
return fmt.Errorf("cannot create DB, file exists at %s", dbName)
}
dEnv, err := p.cloneDatabaseFromRemote(ctx, dbName, remoteName, branch, remoteUrl, remoteParams)
err := p.cloneDatabaseFromRemote(ctx, dbName, remoteName, branch, remoteUrl, remoteParams)
if err != nil {
// Make a best effort to clean up any artifacts on disk from a failed clone
// before we return the error
@@ -499,7 +499,7 @@ func (p *DoltDatabaseProvider) CloneDatabaseFromRemote(
return err
}
return ConfigureReplicationDatabaseHook(ctx, p, dbName, dEnv)
return nil
}
// cloneDatabaseFromRemote encapsulates the inner logic for cloning a database so that if any error
@@ -510,26 +510,26 @@ func (p *DoltDatabaseProvider) cloneDatabaseFromRemote(
ctx *sql.Context,
dbName, remoteName, branch, remoteUrl string,
remoteParams map[string]string,
) (*env.DoltEnv, error) {
) error {
if p.remoteDialer == nil {
return nil, fmt.Errorf("unable to clone remote database; no remote dialer configured")
return fmt.Errorf("unable to clone remote database; no remote dialer configured")
}
// TODO: params for AWS, others that need them
r := env.NewRemote(remoteName, remoteUrl, nil)
srcDB, err := r.GetRemoteDB(ctx, types.Format_Default, p.remoteDialer)
if err != nil {
return nil, err
return err
}
dEnv, err := actions.EnvForClone(ctx, srcDB.ValueReadWriter().Format(), r, dbName, p.fs, "VERSION", env.GetCurrentUserHomeDir)
if err != nil {
return nil, err
return err
}
err = actions.CloneRemote(ctx, srcDB, remoteName, branch, dEnv)
err = actions.CloneRemote(ctx, srcDB, remoteName, branch, false, dEnv)
if err != nil {
return nil, err
return err
}
err = dEnv.RepoStateWriter().UpdateBranch(dEnv.RepoState.CWBHeadRef().GetPath(), env.BranchConfig{
@@ -537,33 +537,7 @@ func (p *DoltDatabaseProvider) cloneDatabaseFromRemote(
Remote: remoteName,
})
fkChecks, err := ctx.GetSessionVariable(ctx, "foreign_key_checks")
if err != nil {
return nil, err
}
opts := editor.Options{
Deaf: dEnv.DbEaFactory(),
// TODO: this doesn't seem right, why is this getting set in the constructor to the DB
ForeignKeyChecksDisabled: fkChecks.(int8) == 0,
}
db, err := NewDatabase(ctx, dbName, dEnv.DbData(), opts)
if err != nil {
return nil, err
}
// If we have an initialization hook, invoke it. By default, this will
// be ConfigureReplicationDatabaseHook, which will setup replication
// for the new database if a remote url template is set.
err = p.InitDatabaseHook(ctx, p, dbName, dEnv)
if err != nil {
return nil, err
}
p.databases[formatDbMapKeyName(db.Name())] = db
return dEnv, nil
return p.registerNewDatabase(ctx, dbName, dEnv)
}
// DropDatabase implements the sql.MutableDatabaseProvider interface

View File

@@ -71,9 +71,14 @@ func doDoltCheckout(ctx *sql.Context, args []string) (statusCode int, successMes
return 1, "", err
}
branchOrTrack := apr.Contains(cli.CheckoutCreateBranch) || apr.Contains(cli.TrackFlag)
newBranch, _, err := parseBranchArgs(apr)
if err != nil {
return 1, "", err
}
branchOrTrack := newBranch != "" || apr.Contains(cli.TrackFlag)
if apr.Contains(cli.TrackFlag) && apr.NArg() > 0 {
return 1, "", errors.New("Improper usage.")
return 1, "", errors.New("Improper usage. Too many arguments provided.")
}
if (branchOrTrack && apr.NArg() > 1) || (!branchOrTrack && apr.NArg() == 0) {
return 1, "", errors.New("Improper usage.")
@@ -90,7 +95,7 @@ func doDoltCheckout(ctx *sql.Context, args []string) (statusCode int, successMes
if err != nil {
return 1, "", err
}
if apr.Contains(cli.CheckoutCreateBranch) && readOnlyDatabase {
if newBranch != "" && readOnlyDatabase {
return 1, "", fmt.Errorf("unable to create new branch in a read-only database")
}
@@ -199,6 +204,30 @@ func doDoltCheckout(ctx *sql.Context, args []string) (statusCode int, successMes
return 0, successMessage, nil
}
// parseBranchArgs returns the name of the new branch and whether or not it should be created forcibly. This asserts
// that the provided branch name may not be empty, so an empty string is returned where no -b or -B flag is provided.
func parseBranchArgs(apr *argparser.ArgParseResults) (newBranch string, createBranchForcibly bool, err error) {
if apr.Contains(cli.CheckoutCreateBranch) && apr.Contains(cli.CreateResetBranch) {
return "", false, errors.New("Improper usage. Cannot use both -b and -B.")
}
if newBranch, ok := apr.GetValue(cli.CheckoutCreateBranch); ok {
if len(newBranch) == 0 {
return "", false, ErrEmptyBranchName
}
return newBranch, false, nil
}
if newBranch, ok := apr.GetValue(cli.CreateResetBranch); ok {
if len(newBranch) == 0 {
return "", false, ErrEmptyBranchName
}
return newBranch, true, nil
}
return "", false, nil
}
// isReadOnlyDatabase returns true if the named database is a read-only database. An error is returned
// if any issues are encountered while looking up the named database.
func isReadOnlyDatabase(ctx *sql.Context, dbName string) (bool, error) {
@@ -346,14 +375,20 @@ func checkoutNewBranch(ctx *sql.Context, dbName string, dbData env.DbData, apr *
newBranchName = remoteBranchName
}
if newBranch, ok := apr.GetValue(cli.CheckoutCreateBranch); ok {
if len(newBranch) == 0 {
return "", "", ErrEmptyBranchName
}
newBranchName = newBranch
// A little wonky behavior here. parseBranchArgs is actually called twice because in this procedure we pass around
// the parse results, but we also needed to parse the -b and -B flags in the main procedure. It ended up being
// a little cleaner to just call it again here than to pass the results around.
var createBranchForcibly bool
var optionBBranch string
optionBBranch, createBranchForcibly, err = parseBranchArgs(apr)
if err != nil {
return "", "", err
}
if optionBBranch != "" {
newBranchName = optionBBranch
}
err = actions.CreateBranchWithStartPt(ctx, dbData, newBranchName, startPt, false, rsc)
err = actions.CreateBranchWithStartPt(ctx, dbData, newBranchName, startPt, createBranchForcibly, rsc)
if err != nil {
return "", "", err
}

View File

@@ -23,14 +23,10 @@ import (
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/libraries/doltcore/branch_control"
"github.com/dolthub/dolt/go/libraries/doltcore/diff"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/libraries/doltcore/merge"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/doltcore/cherry_pick"
)
var ErrEmptyCherryPick = errors.New("cannot cherry-pick empty string")
var ErrCherryPickUncommittedChanges = errors.New("cannot cherry-pick with uncommitted changes")
var cherryPickSchema = []*sql.Column{
{
@@ -83,29 +79,8 @@ func doDoltCherryPick(ctx *sql.Context, args []string) (string, int, int, int, e
return "", 0, 0, 0, err
}
dSess := dsess.DSessFromSess(ctx.Session)
if apr.Contains(cli.AbortParam) {
ws, err := dSess.WorkingSet(ctx, dbName)
if err != nil {
return "", 0, 0, 0, fmt.Errorf("fatal: unable to load working set: %v", err)
}
if !ws.MergeActive() {
return "", 0, 0, 0, fmt.Errorf("error: There is no cherry-pick merge to abort")
}
roots, ok := dSess.GetRoots(ctx, dbName)
if !ok {
return "", 0, 0, 0, fmt.Errorf("fatal: unable to load roots for %s", dbName)
}
newWs, err := abortMerge(ctx, ws, roots)
if err != nil {
return "", 0, 0, 0, fmt.Errorf("fatal: unable to abort merge: %v", err)
}
return "", 0, 0, 0, dSess.SetWorkingSet(ctx, dbName, newWs)
return "", 0, 0, 0, cherry_pick.AbortCherryPick(ctx, dbName)
}
// we only support cherry-picking a single commit for now.
@@ -120,182 +95,18 @@ func doDoltCherryPick(ctx *sql.Context, args []string) (string, int, int, int, e
return "", 0, 0, 0, ErrEmptyCherryPick
}
roots, ok := dSess.GetRoots(ctx, dbName)
if !ok {
return "", 0, 0, 0, sql.ErrDatabaseNotFound.New(dbName)
}
mergeResult, commitMsg, err := cherryPick(ctx, dSess, roots, dbName, cherryStr)
commit, mergeResult, err := cherry_pick.CherryPick(ctx, cherryStr, cherry_pick.CherryPickOptions{})
if err != nil {
return "", 0, 0, 0, err
}
newWorkingRoot := mergeResult.Root
err = dSess.SetRoot(ctx, dbName, newWorkingRoot)
if err != nil {
return "", 0, 0, 0, err
if mergeResult != nil {
return "",
mergeResult.CountOfTablesWithDataConflicts(),
mergeResult.CountOfTablesWithSchemaConflicts(),
mergeResult.CountOfTablesWithConstraintViolations(),
nil
}
err = stageCherryPickedTables(ctx, mergeResult.Stats)
if err != nil {
return "", 0, 0, 0, err
}
if mergeResult.HasMergeArtifacts() {
return "", mergeResult.CountOfTablesWithDataConflicts(),
mergeResult.CountOfTablesWithSchemaConflicts(), mergeResult.CountOfTablesWithConstraintViolations(), nil
} else {
commitHash, _, err := doDoltCommit(ctx, []string{"-m", commitMsg})
return commitHash, 0, 0, 0, err
}
}
// stageCherryPickedTables stages the tables from |mergeStats| that don't have any merge artifacts i.e.
// tables that don't have any data or schema conflicts and don't have any constraint violations.
func stageCherryPickedTables(ctx *sql.Context, mergeStats map[string]*merge.MergeStats) error {
tablesToAdd := make([]string, 0, len(mergeStats))
for tableName, mergeStats := range mergeStats {
if mergeStats.HasArtifacts() {
continue
}
// Find any tables being deleted and make sure we stage those tables first
if mergeStats.Operation == merge.TableRemoved {
tablesToAdd = append([]string{tableName}, tablesToAdd...)
} else {
tablesToAdd = append(tablesToAdd, tableName)
}
}
for _, tableName := range tablesToAdd {
res, err := doDoltAdd(ctx, []string{tableName})
if err != nil {
return err
}
if res != 0 {
return fmt.Errorf("dolt add failed")
}
}
return nil
}
// cherryPick checks that the current working set is clean, verifies the cherry-pick commit is not a merge commit
// or a commit without parent commit, performs merge and returns the new working set root value and
// the commit message of cherry-picked commit as the commit message of the new commit created during this command.
func cherryPick(ctx *sql.Context, dSess *dsess.DoltSession, roots doltdb.Roots, dbName, cherryStr string) (*merge.Result, string, error) {
// check for clean working set
wsOnlyHasIgnoredTables, err := diff.WorkingSetContainsOnlyIgnoredTables(ctx, roots)
if err != nil {
return nil, "", err
}
if !wsOnlyHasIgnoredTables {
return nil, "", ErrCherryPickUncommittedChanges
}
headRootHash, err := roots.Head.HashOf()
if err != nil {
return nil, "", err
}
workingRootHash, err := roots.Working.HashOf()
if err != nil {
return nil, "", err
}
doltDB, ok := dSess.GetDoltDB(ctx, dbName)
if !ok {
return nil, "", fmt.Errorf("failed to get DoltDB")
}
dbData, ok := dSess.GetDbData(ctx, dbName)
if !ok {
return nil, "", fmt.Errorf("failed to get dbData")
}
cherryCommitSpec, err := doltdb.NewCommitSpec(cherryStr)
if err != nil {
return nil, "", err
}
headRef, err := dbData.Rsr.CWBHeadRef()
if err != nil {
return nil, "", err
}
cherryCommit, err := doltDB.Resolve(ctx, cherryCommitSpec, headRef)
if err != nil {
return nil, "", err
}
if len(cherryCommit.DatasParents()) > 1 {
return nil, "", fmt.Errorf("cherry-picking a merge commit is not supported")
}
if len(cherryCommit.DatasParents()) == 0 {
return nil, "", fmt.Errorf("cherry-picking a commit without parents is not supported")
}
cherryRoot, err := cherryCommit.GetRootValue(ctx)
if err != nil {
return nil, "", err
}
// When cherry-picking, we need to use the parent of the cherry-picked commit as the ancestor. This
// ensures that only the delta from the cherry-pick commit is applied.
parentCommit, err := doltDB.ResolveParent(ctx, cherryCommit, 0)
if err != nil {
return nil, "", err
}
parentRoot, err := parentCommit.GetRootValue(ctx)
if err != nil {
return nil, "", err
}
dbState, ok, err := dSess.LookupDbState(ctx, dbName)
if err != nil {
return nil, "", err
} else if !ok {
return nil, "", sql.ErrDatabaseNotFound.New(dbName)
}
mo := merge.MergeOpts{
IsCherryPick: true,
KeepSchemaConflicts: false,
}
result, err := merge.MergeRoots(ctx, roots.Working, cherryRoot, parentRoot, cherryCommit, parentCommit, dbState.EditOpts(), mo)
if err != nil {
return nil, "", err
}
workingRootHash, err = result.Root.HashOf()
if err != nil {
return nil, "", err
}
if headRootHash.Equal(workingRootHash) {
return nil, "", fmt.Errorf("no changes were made, nothing to commit")
}
cherryCommitMeta, err := cherryCommit.GetCommitMeta(ctx)
if err != nil {
return nil, "", err
}
// If any of the merge stats show a data or schema conflict or a constraint
// violation, record that a merge is in progress.
for _, stats := range result.Stats {
if stats.HasArtifacts() {
ws, err := dSess.WorkingSet(ctx, dbName)
if err != nil {
return nil, "", err
}
newWorkingSet := ws.StartCherryPick(cherryCommit, cherryStr)
err = dSess.SetWorkingSet(ctx, dbName, newWorkingSet)
if err != nil {
return nil, "", err
}
break
}
}
return result, cherryCommitMeta.Description, nil
return commit, 0, 0, 0, nil
}

View File

@@ -117,7 +117,7 @@ func doDoltMerge(ctx *sql.Context, args []string) (string, int, int, error) {
return "", noConflictsOrViolations, threeWayMerge, fmt.Errorf("fatal: There is no merge to abort")
}
ws, err = abortMerge(ctx, ws, roots)
ws, err = merge.AbortMerge(ctx, ws, roots)
if err != nil {
return "", noConflictsOrViolations, threeWayMerge, err
}
@@ -278,43 +278,6 @@ func performMerge(
return ws, commit, noConflictsOrViolations, threeWayMerge, nil
}
func abortMerge(ctx *sql.Context, workingSet *doltdb.WorkingSet, roots doltdb.Roots) (*doltdb.WorkingSet, error) {
tbls, err := doltdb.UnionTableNames(ctx, roots.Working, roots.Staged, roots.Head)
if err != nil {
return nil, err
}
roots, err = actions.MoveTablesFromHeadToWorking(ctx, roots, tbls)
if err != nil {
return nil, err
}
preMergeWorkingRoot := workingSet.MergeState().PreMergeWorkingRoot()
preMergeWorkingTables, err := preMergeWorkingRoot.GetTableNames(ctx)
if err != nil {
return nil, err
}
nonIgnoredTables, err := doltdb.ExcludeIgnoredTables(ctx, roots, preMergeWorkingTables)
if err != nil {
return nil, err
}
someTablesAreIgnored := len(nonIgnoredTables) != len(preMergeWorkingTables)
if someTablesAreIgnored {
newWorking, err := actions.MoveTablesBetweenRoots(ctx, nonIgnoredTables, preMergeWorkingRoot, roots.Working)
if err != nil {
return nil, err
}
workingSet = workingSet.WithWorkingRoot(newWorking)
} else {
workingSet = workingSet.WithWorkingRoot(preMergeWorkingRoot)
}
workingSet = workingSet.WithStagedRoot(workingSet.WorkingRoot())
workingSet = workingSet.ClearMerge()
return workingSet, nil
}
func executeMerge(
ctx *sql.Context,
sess *dsess.DoltSession,

View File

@@ -27,6 +27,7 @@ import (
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/doltcore/env/actions"
"github.com/dolthub/dolt/go/libraries/doltcore/sqle/dsess"
"github.com/dolthub/dolt/go/libraries/utils/config"
"github.com/dolthub/dolt/go/store/datas"
)
@@ -70,7 +71,7 @@ func doDoltPush(ctx *sql.Context, args []string) (int, string, error) {
return cmdFailure, "", err
}
autoSetUpRemote := loadConfig(ctx).GetStringOrDefault(env.PushAutoSetupRemote, "false")
autoSetUpRemote := loadConfig(ctx).GetStringOrDefault(config.PushAutoSetupRemote, "false")
pushAutoSetUpRemote, err := strconv.ParseBool(autoSetUpRemote)
if err != nil {
return cmdFailure, "", err

View File

@@ -95,8 +95,8 @@ func NewDoltSession(
conf config.ReadWriteConfig,
branchController *branch_control.Controller,
) (*DoltSession, error) {
username := conf.GetStringOrDefault(env.UserNameKey, "")
email := conf.GetStringOrDefault(env.UserEmailKey, "")
username := conf.GetStringOrDefault(config.UserNameKey, "")
email := conf.GetStringOrDefault(config.UserEmailKey, "")
globals := config.NewPrefixConfig(conf, env.SqlServerGlobalsPrefix)
sess := &DoltSession{

View File

@@ -89,7 +89,7 @@ func TestSingleQuery(t *testing.T) {
}
for _, q := range setupQueries {
enginetest.RunQuery(t, engine, harness, q)
enginetest.RunQueryWithContext(t, engine, harness, nil, q)
}
// engine.EngineAnalyzer().Debug = true
@@ -331,7 +331,7 @@ func TestSingleQueryPrepared(t *testing.T) {
}
for _, q := range setupQueries {
enginetest.RunQuery(t, engine, harness, q)
enginetest.RunQueryWithContext(t, engine, harness, nil, q)
}
//engine.Analyzer.Debug = true
@@ -2340,7 +2340,7 @@ func TestSystemTableIndexes(t *testing.T) {
ctx := enginetest.NewContext(harness)
for _, q := range stt.setup {
enginetest.RunQuery(t, e, harness, q)
enginetest.RunQueryWithContext(t, e, harness, ctx, q)
}
for i, c := range []string{"inner", "lookup", "hash", "merge"} {
@@ -2375,7 +2375,7 @@ func TestSystemTableIndexesPrepared(t *testing.T) {
ctx := enginetest.NewContext(harness)
for _, q := range stt.setup {
enginetest.RunQuery(t, e, harness, q)
enginetest.RunQueryWithContext(t, e, harness, ctx, q)
}
for _, tt := range stt.queries {

View File

@@ -2387,6 +2387,101 @@ var DoltCheckoutScripts = []queries.ScriptTest{
},
},
},
{
Name: "dolt_checkout with new branch forcefully",
SetUpScript: []string{
"create table t (s varchar(5) primary key);",
"insert into t values ('foo');",
"call dolt_commit('-Am', 'commit main~2');", // will be main~2
"insert into t values ('bar');",
"call dolt_commit('-Am', 'commit main~1');", // will be main~1
"insert into t values ('baz');",
"call dolt_commit('-Am', 'commit main');", // will be main~1
"call dolt_branch('testbr', 'main~1');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "call dolt_checkout('-B', 'testbr', 'main~2');",
SkipResultsCheck: true,
},
{
Query: "select active_branch();",
Expected: []sql.Row{{"testbr"}},
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"foo"}},
},
{
Query: "call dolt_checkout('main');",
SkipResultsCheck: true,
},
{
Query: "select active_branch();",
Expected: []sql.Row{{"main"}},
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"bar"}, {"baz"}, {"foo"}},
},
{
Query: "call dolt_checkout('-B', 'testbr', 'main~1');",
SkipResultsCheck: true,
},
{
Query: "select active_branch();",
Expected: []sql.Row{{"testbr"}},
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"bar"}, {"foo"}},
},
},
},
{
Name: "dolt_checkout with new branch forcefully with dirty working set",
SetUpScript: []string{
"create table t (s varchar(5) primary key);",
"insert into t values ('foo');",
"call dolt_commit('-Am', 'commit main~2');", // will be main~2
"insert into t values ('bar');",
"call dolt_commit('-Am', 'commit main~1');", // will be main~1
"insert into t values ('baz');",
"call dolt_commit('-Am', 'commit main');", // will be main~1
"call dolt_checkout('-b', 'testbr', 'main~1');",
"insert into t values ('qux');",
},
Assertions: []queries.ScriptTestAssertion{
{
Query: "select active_branch();",
Expected: []sql.Row{{"testbr"}},
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"bar"}, {"foo"}, {"qux"}}, // Dirty working set
},
{
Query: "call dolt_checkout('main');",
SkipResultsCheck: true,
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"bar"}, {"baz"}, {"foo"}},
},
{
Query: "call dolt_checkout('-B', 'testbr', 'main~1');",
SkipResultsCheck: true,
},
{
Query: "select active_branch();",
Expected: []sql.Row{{"testbr"}},
},
{
Query: "select * from t order by s;",
Expected: []sql.Row{{"bar"}, {"foo"}}, // Dirty working set was forcefully overwritten
},
},
},
{
Name: "dolt_checkout mixed with USE statements",
SetUpScript: []string{
@@ -2768,6 +2863,15 @@ var DoltCheckoutReadOnlyScripts = []queries.ScriptTest{
},
},
},
{
Name: "dolt checkout -B returns an error for read-only databases",
Assertions: []queries.ScriptTestAssertion{
{
Query: "call dolt_checkout('-B', 'newBranch');",
ExpectedErrStr: "unable to create new branch in a read-only database",
},
},
},
}
var DoltInfoSchemaScripts = []queries.ScriptTest{

View File

@@ -399,8 +399,8 @@ func CreateTestEnvWithName(envName string) *env.DoltEnv {
dEnv := env.Load(context.Background(), homeDirFunc, fs, doltdb.InMemDoltDB+envName, "test")
cfg, _ := dEnv.Config.GetConfig(env.GlobalConfig)
cfg.SetStrings(map[string]string{
env.UserNameKey: name,
env.UserEmailKey: email,
config2.UserNameKey: name,
config2.UserEmailKey: email,
})
err := dEnv.InitRepo(context.Background(), types.Format_Default, name, email, env.DefaultInitBranch)

View File

@@ -22,6 +22,8 @@ import (
"strings"
"time"
"github.com/fatih/color"
"github.com/sirupsen/logrus"
"google.golang.org/grpc"
"google.golang.org/protobuf/encoding/prototext"
@@ -30,10 +32,32 @@ import (
"github.com/dolthub/dolt/go/libraries/utils/iohelp"
)
// Application is the application ID used for all events emitted by this application. Other applications (not dolt)
// should set this once at initialization.
var Application = eventsapi.AppID_APP_DOLT
// EmitterTypeEnvVar is the environment variable DOLT_EVENTS_EMITTER, which you can set to one of the values below
// to change how event emission occurs. Currently only used for sql-server heartbeat events.
const EmitterTypeEnvVar = "DOLT_EVENTS_EMITTER"
// Types of emitters. These strings are accepted by the --output-format flag for the send-metrics command.
const (
EmitterTypeNull = "null" // no output
EmitterTypeStdout = "stdout" // output to stdout, used in testing
EmitterTypeGrpc = "grpc" // output to a grpc server, the default for send-metrics
EmitterTypeFile = "file" // output to a file, used to log events during normal execution
EmitterTypeLogger = "logger" // output to a logger, used in testing
)
const DefaultMetricsHost = "eventsapi.dolthub.com"
const DefaultMetricsPort = "443"
// Emitter is an interface used for processing a batch of events
type Emitter interface {
// LogEvents takes a batch of events and processes them
// LogEvents emits a batch of events
LogEvents(version string, evts []*eventsapi.ClientEvent) error
// LogEventsRequest emits a batch of events wrapped in a request object, with other metadata
LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error
}
// NullEmitter is an emitter that drops events
@@ -44,6 +68,10 @@ func (ne NullEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent) e
return nil
}
func (ne NullEmitter) LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
return nil
}
// WriterEmitter is an emitter that writes the text encoding of the events to it's writer
type WriterEmitter struct {
// Wr the writer to log events to
@@ -80,6 +108,11 @@ func (we WriterEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent)
return nil
}
func (we WriterEmitter) LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
_, err := fmt.Fprintf(color.Output, "%+v\n", req)
return err
}
// GrpcEmitter sends events to a GRPC service implementing the eventsapi
type GrpcEmitter struct {
client eventsapi.ClientEventsServiceClient
@@ -110,7 +143,7 @@ func (em *GrpcEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent)
Version: version,
Platform: plat,
Events: evts,
App: eventsapi.AppID_APP_DOLT,
App: Application,
}
_, err := em.client.LogEvents(ctx, &req)
@@ -118,6 +151,10 @@ func (em *GrpcEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent)
return err
}
func (em *GrpcEmitter) LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
return em.SendLogEventsRequest(ctx, req)
}
// SendLogEventsRequest sends a request using the grpc client
func (em *GrpcEmitter) SendLogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
_, err := em.client.LogEvents(ctx, req)
@@ -147,3 +184,68 @@ func (fe *FileEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent)
return nil
}
func (fe *FileEmitter) LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
// TODO: we are losing some information here, like the machine id
if err := fe.fbp.WriteEvents(req.Version, req.Events); err != nil {
return err
}
return nil
}
type LoggerEmitter struct {
logLevel logrus.Level
}
func (l LoggerEmitter) LogEvents(version string, evts []*eventsapi.ClientEvent) error {
sb := &strings.Builder{}
wr := WriterEmitter{Wr: sb}
err := wr.LogEvents(version, evts)
if err != nil {
return err
}
eventString := sb.String()
return l.logEventString(eventString)
}
func (l LoggerEmitter) LogEventsRequest(ctx context.Context, req *eventsapi.LogEventsRequest) error {
sb := &strings.Builder{}
wr := WriterEmitter{Wr: sb}
err := wr.LogEventsRequest(ctx, req)
if err != nil {
return err
}
eventString := sb.String()
return l.logEventString(eventString)
}
func (l LoggerEmitter) logEventString(eventString string) error {
switch l.logLevel {
case logrus.DebugLevel:
logrus.Debug(eventString)
case logrus.ErrorLevel:
logrus.Error(eventString)
case logrus.FatalLevel:
logrus.Fatal(eventString)
case logrus.InfoLevel:
logrus.Info(eventString)
case logrus.PanicLevel:
logrus.Panic(eventString)
case logrus.TraceLevel:
logrus.Trace(eventString)
case logrus.WarnLevel:
logrus.Warn(eventString)
default:
return fmt.Errorf("unknown log level %v", l.logLevel)
}
return nil
}
func NewLoggerEmitter(level logrus.Level) *LoggerEmitter {
return &LoggerEmitter{
logLevel: level,
}
}

View File

@@ -17,10 +17,9 @@ package events
import (
"context"
"errors"
"fmt"
"io/fs"
"github.com/dolthub/fslock"
"github.com/fatih/color"
"google.golang.org/protobuf/proto"
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
@@ -47,74 +46,45 @@ type Flusher interface {
Flush(ctx context.Context) error
}
// lockAndFlush locks the given lockPath and passes the flushCB to the filesys' Iter method
func lockAndFlush(ctx context.Context, fs filesys.Filesys, dirPath string, lockPath string, fcb flushCB) error {
fsLock := filesys.CreateFilesysLock(fs, lockPath)
isUnlocked, err := fsLock.TryLock()
defer func() error {
err := fsLock.Unlock()
if err != nil {
return err
}
return nil
}()
if err != nil {
if err == fslock.ErrLocked {
return ErrFileLocked
}
return err
}
if isUnlocked && err == nil {
err := fs.Iter(dirPath, false, func(path string, size int64, isDir bool) (stop bool) {
if err := fcb(ctx, path); err != nil {
// log.Print(err)
return false
}
return false
})
if err != nil {
return err
}
return nil
}
return nil
type FileFlusher struct {
emitter Emitter
fbp *FileBackedProc
}
// GrpcEventFlusher parses dolt event logs sends the events to the events server
type GrpcEventFlusher struct {
em *GrpcEmitter
fbp *FileBackedProc
}
// NewGrpcEventFlusher creates a new GrpcEventFlusher
func NewGrpcEventFlusher(fs filesys.Filesys, userHomeDir string, doltDir string, grpcEmitter *GrpcEmitter) *GrpcEventFlusher {
func NewFileFlusher(fs filesys.Filesys, userHomeDir string, doltDir string, emitter Emitter) *FileFlusher {
fbp := NewFileBackedProc(fs, userHomeDir, doltDir, MD5FileNamer, CheckFilenameMD5)
if exists := fbp.EventsDirExists(); !exists {
panic(ErrEventsDataDir)
}
return &GrpcEventFlusher{em: grpcEmitter, fbp: fbp}
return &FileFlusher{emitter: emitter, fbp: fbp}
}
func (f FileFlusher) Flush(ctx context.Context) error {
fs := f.fbp.GetFileSys()
evtsDir := f.fbp.GetEventsDirPath()
err := f.lockAndFlush(ctx, fs, evtsDir, f.fbp.LockPath)
if err != nil {
return err
}
return nil
}
// flush has the function signature of the flushCb type
// and sends events data to the events server
func (egf *GrpcEventFlusher) flush(ctx context.Context, path string) error {
fs := egf.fbp.GetFileSys()
func (f FileFlusher) flush(ctx context.Context, path string) error {
fs := f.fbp.GetFileSys()
data, err := fs.ReadFile(path)
if err != nil {
return err
}
isFileValid, err := egf.fbp.CheckingFunc(data, path)
isFileValid, err := f.fbp.CheckingFunc(data, path)
if isFileValid && err == nil {
req := &eventsapi.LogEventsRequest{}
@@ -123,7 +93,7 @@ func (egf *GrpcEventFlusher) flush(ctx context.Context, path string) error {
return err
}
if err := egf.em.SendLogEventsRequest(ctx, req); err != nil {
if err := f.emitter.LogEventsRequest(ctx, req); err != nil {
return err
}
@@ -137,72 +107,55 @@ func (egf *GrpcEventFlusher) flush(ctx context.Context, path string) error {
return errInvalidFile
}
// Flush satisfies the Flusher interface and calls this Flusher's flush method on each events file
func (egf *GrpcEventFlusher) Flush(ctx context.Context) error {
fs := egf.fbp.GetFileSys()
var _ Flusher = &FileFlusher{}
evtsDir := egf.fbp.GetEventsDirPath()
// lockAndFlush locks the given lockPath and passes the flushCB to the filesys' Iter method
func (f FileFlusher) lockAndFlush(ctx context.Context, fsys filesys.Filesys, dirPath string, lockPath string) error {
fsLock := filesys.CreateFilesysLock(fsys, lockPath)
isUnlocked, err := fsLock.TryLock()
defer func() error {
err := fsLock.Unlock()
if err != nil {
return err
}
return nil
}()
err := lockAndFlush(ctx, fs, evtsDir, egf.fbp.LockPath, egf.flush)
if err != nil {
return err
}
return nil
}
// IOFlusher parses event files and writes them to stdout
type IOFlusher struct {
fbp *FileBackedProc
}
// NewIOFlusher creates a new IOFlusher
func NewIOFlusher(fs filesys.Filesys, userHomeDir string, doltDir string) *IOFlusher {
fbp := NewFileBackedProc(fs, userHomeDir, doltDir, MD5FileNamer, CheckFilenameMD5)
if exists := fbp.EventsDirExists(); !exists {
panic(ErrEventsDataDir)
}
return &IOFlusher{fbp: fbp}
}
// flush has the function signature of the flushCb type
// and writes data to stdout
func (iof *IOFlusher) flush(ctx context.Context, path string) error {
fs := iof.fbp.GetFileSys()
data, err := fs.ReadFile(path)
if err != nil {
return err
}
req := &eventsapi.LogEventsRequest{}
if err := proto.Unmarshal(data, req); err != nil {
return err
}
// needed for bats test
fmt.Fprintf(color.Output, "%+v\n", req)
if err := fs.DeleteFile(path); err != nil {
return err
}
return nil
}
// Flush satisfies the Flusher interface and calls this Flusher's flush method on each events file
func (iof *IOFlusher) Flush(ctx context.Context) error {
fs := iof.fbp.GetFileSys()
evtsDir := iof.fbp.GetEventsDirPath()
err := lockAndFlush(ctx, fs, evtsDir, iof.fbp.LockPath, iof.flush)
if err != nil {
if errors.Is(err, fslock.ErrLocked) {
return ErrFileLocked
}
return err
}
if !isUnlocked {
return nil
}
var returnErr error
iterErr := fsys.Iter(dirPath, false, func(path string, size int64, isDir bool) (stop bool) {
if err := f.flush(ctx, path); err != nil {
if errors.Is(err, errInvalidFile) {
// ignore invalid files found in the events directory
return false
} else if _, isPathError := err.(*fs.PathError); isPathError {
// The lock file on windows has this issue, skip this file
// We can't use errors.Is because fs.PathError doesn't implement Is
return false
}
returnErr = err
return true
}
return false
})
if iterErr != nil {
return iterErr
} else if returnErr != nil {
return returnErr
}
return nil
}

View File

@@ -52,7 +52,7 @@ func NewTestClient() *TestClient {
type flushTester struct {
Client *TestClient
Fbp *FileBackedProc
Flusher *GrpcEventFlusher
Flusher *FileFlusher
}
func createFlushTester(fs filesys.Filesys, hdir string, ddir string) *flushTester {
@@ -62,7 +62,7 @@ func createFlushTester(fs filesys.Filesys, hdir string, ddir string) *flushTeste
fbp := NewFileBackedProc(fs, hdir, ddir, sn.Name, sn.Check)
gef := &GrpcEventFlusher{em: &GrpcEmitter{client}, fbp: fbp}
gef := &FileFlusher{emitter: &GrpcEmitter{client}, fbp: fbp}
return &flushTester{Client: client, Fbp: fbp, Flusher: gef}
}
@@ -89,43 +89,45 @@ func TestEventFlushing(t *testing.T) {
filesystems := []string{"inMemFS", "local"}
for _, fsName := range filesystems {
for _, test := range tests {
t.Run(fsName, func(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
ctx := context.Background()
t.Run(test.name, func(t *testing.T) {
ctx := context.Background()
var ft *flushTester
var ft *flushTester
if fsName == "inMemFS" {
fs := filesys.NewInMemFS([]string{tempEvtsDir}, nil, tempEvtsDir)
if fsName == "inMemFS" {
fs := filesys.NewInMemFS([]string{tempEvtsDir}, nil, tempEvtsDir)
ft = createFlushTester(fs, homeDir, doltTestDir)
} else {
fs := filesys.LocalFS
ft = createFlushTester(fs, homeDir, doltTestDir)
} else {
fs := filesys.LocalFS
path := filepath.Join(dPath, evtPath)
dDir := testLib.TestDir(path)
path := filepath.Join(dPath, evtPath)
dDir := testLib.TestDir(path)
ft = createFlushTester(fs, "", dDir)
}
ft = createFlushTester(fs, "", dDir)
}
ces := make([]*eventsapi.ClientEvent, 0)
ces := make([]*eventsapi.ClientEvent, 0)
for i := 0; i < test.numEvents; i++ {
ce := &eventsapi.ClientEvent{}
ces = append(ces, ce)
}
for i := 0; i < test.numEvents; i++ {
ce := &eventsapi.ClientEvent{}
ces = append(ces, ce)
}
assert.Equal(t, len(ces), test.numEvents)
assert.Equal(t, len(ces), test.numEvents)
err := ft.Fbp.WriteEvents(testVersion, ces)
assert.Equal(t, err, nil)
err := ft.Fbp.WriteEvents(testVersion, ces)
assert.Equal(t, err, nil)
err = ft.Flusher.Flush(ctx)
err = ft.Flusher.Flush(ctx)
assert.Equal(t, err, nil)
assert.Equal(t, len(ft.Client.CES), len(ces))
})
}
assert.NoError(t, err)
assert.Equal(t, len(ft.Client.CES), len(ces))
})
}
})
}
}

View File

@@ -27,7 +27,7 @@ import (
// EventNowFunc function is used to get the current time and can be overridden for testing.
var EventNowFunc = time.Now
func nowTimestamp() *timestamppb.Timestamp {
func NowTimestamp() *timestamppb.Timestamp {
now := EventNowFunc()
ts := timestamppb.New(now)
err := ts.CheckValid()
@@ -51,7 +51,7 @@ func NewEvent(ceType eventsapi.ClientEventType) *Event {
return &Event{
ce: &eventsapi.ClientEvent{
Id: uuid.New().String(),
StartTime: nowTimestamp(),
StartTime: NowTimestamp(),
Type: ceType,
},
m: &sync.Mutex{},
@@ -83,7 +83,7 @@ func (evt *Event) close() *eventsapi.ClientEvent {
evt.m.Lock()
defer evt.m.Unlock()
evt.ce.EndTime = nowTimestamp()
evt.ce.EndTime = NowTimestamp()
for k, v := range evt.attributes {
evt.ce.Attributes = append(evt.ce.Attributes, &eventsapi.ClientEventAttribute{Id: k, Value: v})

View File

@@ -210,7 +210,7 @@ func (fbp *FileBackedProc) WriteEvents(version string, evts []*eventsapi.ClientE
Version: version,
Platform: plat,
Events: evts,
App: eventsapi.AppID_APP_DOLT,
App: Application,
}
data, err := proto.Marshal(req)

View File

@@ -42,8 +42,10 @@ func (namer *SequentialNamer) Name(bytes []byte) string {
}
func (namer *SequentialNamer) Check(data []byte, path string) (bool, error) {
// todo
return true, nil
filename := filepath.Base(path)
ext := filepath.Ext(filename)
return ext == evtDataExt, nil
}
func (namer *SequentialNamer) GetIdx() int {

View File

@@ -0,0 +1,43 @@
// Copyright 2023 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
const UserEmailKey = "user.email"
const UserNameKey = "user.name"
const UserCreds = "user.creds"
const DoltEditor = "core.editor"
const InitBranchName = "init.defaultbranch"
const RemotesApiHostKey = "remotes.default_host"
const RemotesApiHostPortKey = "remotes.default_port"
const AddCredsUrlKey = "creds.add_url"
const DoltLabInsecureKey = "doltlab.insecure"
const MetricsDisabled = "metrics.disabled"
const MetricsHost = "metrics.host"
const MetricsPort = "metrics.port"
const MetricsInsecure = "metrics.insecure"
const PushAutoSetupRemote = "push.autosetupremote"

View File

@@ -3,9 +3,9 @@
# Assert that all bash test constructs will correctly fail on Mac OS's older bash version
# by ending them with '|| false'.
# https://github.com/sstephenson/bats/issues/49
@test "bats: all bash test constructs end with '|| false' {
@test "bats: all bash test constructs end with '|| false'" {
run grep -E ' *\]\][[:space:]]*$' -n *.bats
# grep returns 0 if matches are found, 1 if matches are NOT found, and 2 if no input files were found
echo -e "Incorrect bash test constructs: \n$output"
[ $status -eq 1 ]
}
}

View File

@@ -314,6 +314,46 @@ SQL
[[ ! "$output" =~ "4" ]] || false
}
@test "checkout: -B flag will forcefully reset an existing branch" {
dolt sql -q 'create table test (id int primary key);'
dolt sql -q 'insert into test (id) values (89012);'
dolt commit -Am 'first change.'
dolt sql -q 'insert into test (id) values (76543);'
dolt commit -Am 'second change.'
dolt checkout -b testbr main~1
run dolt sql -q "select * from test;"
[[ "$output" =~ "89012" ]] || false
[[ ! "$output" =~ "76543" ]] || false
# make a change to the branch which we'll lose
dolt sql -q 'insert into test (id) values (19283);'
dolt commit -Am 'change to testbr.'
dolt checkout main
dolt checkout -B testbr main
run dolt sql -q "select * from test;"
[[ "$output" =~ "89012" ]] || false
[[ "$output" =~ "76543" ]] || false
[[ ! "$output" =~ "19283" ]] || false
}
@test "checkout: -B will create a branch that does not exist" {
dolt sql -q 'create table test (id int primary key);'
dolt sql -q 'insert into test (id) values (89012);'
dolt commit -Am 'first change.'
dolt sql -q 'insert into test (id) values (76543);'
dolt commit -Am 'second change.'
dolt checkout -B testbr main~1
run dolt sql -q "select * from test;"
[[ "$output" =~ "89012" ]] || false
[[ ! "$output" =~ "76543" ]] || false
}
@test "checkout: attempting to checkout a detached head shows a suggestion instead" {
dolt sql -q "create table test (id int primary key);"
dolt add .

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_no_dolt_init
}
teardown() {
stop_sql_server
assert_feature_version
teardown_common
}
@test "clone-drop: clone a database and then drop it" {
mkdir repo
cd repo
dolt init
dolt remote add pushed 'file://../pushed'
dolt push pushed main:main
dolt sql -q 'call dolt_clone("file://../pushed", "cloned"); drop database cloned;'
}
@test "clone-drop: sql-server: clone a database and then drop it" {
mkdir repo
cd repo
dolt init
dolt remote add pushed 'file://../pushed'
dolt push pushed main:main
start_sql_server
dolt sql -q 'call dolt_clone("file://../pushed", "cloned"); drop database cloned;'
}

View File

@@ -36,7 +36,6 @@ teardown() {
[ "$status" -eq 0 ]
# Need to make this a regex because of the coloring
[[ "$output" =~ "Config successfully updated" ]] || false
[ -f `nativepath ~/.dolt/config_global.json` ]
run dolt config --list
[ "$status" -eq 0 ]
[ "$output" = "test = test" ]

View File

@@ -151,7 +151,7 @@ teardown() {
[[ "$output" =~ "3,Table" ]] || false
}
@test "dump: SQL type - compare tables in database with tables imported file " {
@test "dump: SQL type - compare tables in database with tables imported file" {
dolt branch new_branch
dolt sql -q "CREATE TABLE new_table(pk int primary key);"
dolt sql -q "INSERT INTO new_table VALUES (1);"
@@ -176,7 +176,7 @@ teardown() {
[[ "$output" = "" ]] || false
}
@test "dump: SQL type (no-batch) - compare tables in database with tables imported file " {
@test "dump: SQL type (no-batch) - compare tables in database with tables imported file" {
dolt branch new_branch
dolt sql -q "CREATE TABLE new_table(pk int primary key);"
@@ -207,7 +207,7 @@ teardown() {
[[ "$output" = "" ]] || false
}
@test "dump: SQL type (batch is no-op) - compare tables in database with tables imported file " {
@test "dump: SQL type (batch is no-op) - compare tables in database with tables imported file" {
dolt branch new_branch
dolt sql -q "CREATE TABLE warehouse(warehouse_id int primary key, warehouse_name longtext);"
@@ -520,7 +520,7 @@ SQL
[ -f doltdump/warehouse.csv ]
}
@test "dump: CSV type - compare tables in database with tables imported from corresponding files " {
@test "dump: CSV type - compare tables in database with tables imported from corresponding files" {
create_tables
dolt add .
@@ -660,7 +660,7 @@ SQL
[ -f doltdump/warehouse.json ]
}
@test "dump: JSON type - compare tables in database with tables imported from corresponding files " {
@test "dump: JSON type - compare tables in database with tables imported from corresponding files" {
create_tables
dolt add .

View File

@@ -419,7 +419,9 @@ print(df)
table = pq.read_table('dt.parquet')
print(table.to_pandas())
" > arrow_test.py
run python3 arrow_test.py
[ "$output" = "$panda_result" ]
echo "import pandas as pd

View File

@@ -22,6 +22,7 @@ SKIP_SERVER_TESTS=$(cat <<-EOM
~1pk5col-strings.bats~
~sql-tags.bats~
~empty-repo.bats~
~clone-drops.bats~
~verify-constraints.bats~
~db-revision-specifiers.bats~
~ignore.bats~

View File

@@ -710,6 +710,7 @@ teardown() {
[[ "$output" =~ "invalid --decorate option" ]] || false
}
# bats test_tags=no_lambda
@test "log: check pager" {
skiponwindows "Need to install expect and make this script work on windows."
dolt commit --allow-empty -m "commit 1"
@@ -747,4 +748,4 @@ teardown() {
[[ ! "$output" =~ "HEAD" ]] || false
run dolt log commit2
[[ "$output" =~ "HEAD" ]] || false
}
}

View File

@@ -59,7 +59,7 @@ teardown() {
[[ "$output" =~ "gc - Cleans up unreferenced data from the repository." ]] || false
[[ "$output" =~ "filter-branch - Edits the commit history using the provided query." ]] || false
[[ "$output" =~ "merge-base - Find the common ancestor of two commits." ]] || false
[[ "$output" =~ "version - Displays the current Dolt cli version." ]] || false
[[ "$output" =~ "version - Displays the version for the Dolt binary." ]] || false
[[ "$output" =~ "dump - Export all tables in the working set into a file." ]] || false
}

View File

@@ -86,7 +86,7 @@ setup() {
BATS_TEST_TIMEOUT=1
}
# bats test_tags=no_lambda
@test "performance: merge with schema change and no conflict" {
dolt checkout full
dolt checkout -b mod2
@@ -118,6 +118,7 @@ setup() {
log_status_eq 0
}
# bats test_tags=no_lambda
@test "performance: merge with schema change and conflict" {
dolt checkout full
dolt checkout -b mod2

View File

@@ -52,6 +52,7 @@ SQL
[[ "${#lines[@]}" = "2" ]] || false
}
# bats test_tags=no_lambda
@test "regression-tests: UNIQUE index violations do not break future INSERTs" {
skiponwindows "Need to install expect and make this script work on windows."
mkdir doltsql

View File

@@ -1008,6 +1008,51 @@ create_five_remote_branches_main_and_master() {
[[ "$output" =~ "remotes/origin/branch-two" ]] || false
}
@test "remotes: clone --single-branch does not create remote refs for all remote branches" {
create_three_remote_branches
cd dolt-repo-clones
dolt clone --single-branch http://localhost:50051/test-org/test-repo
cd test-repo
run dolt branch -a
[ "$status" -eq 0 ]
[[ "$output" =~ "* main" ]] || false
[[ ! "$output" =~ " branch-one" ]] || false
[[ ! "$output" =~ " branch-two" ]] || false
[[ "$output" =~ "remotes/origin/main" ]] || false
[[ ! "$output" =~ "remotes/origin/branch-one" ]] || false
[[ ! "$output" =~ "remotes/origin/branch-two" ]] || false
}
@test "remotes: clone --branch specifies which branch to clone" {
create_three_remote_branches
cd dolt-repo-clones
dolt clone --branch branch-one http://localhost:50051/test-org/test-repo
cd test-repo
run dolt branch -a
[ "$status" -eq 0 ]
[[ "$output" =~ "* branch-one" ]] || false
[[ ! "$output" =~ " main" ]] || false
[[ ! "$output" =~ " branch-two" ]] || false
[[ "$output" =~ "remotes/origin/main" ]] || false
[[ "$output" =~ "remotes/origin/branch-one" ]] || false
[[ "$output" =~ "remotes/origin/branch-two" ]] || false
}
@test "remotes: clone --single-branch --branch does not create all remote refs" {
create_three_remote_branches
cd dolt-repo-clones
dolt clone --branch branch-one --single-branch http://localhost:50051/test-org/test-repo
cd test-repo
run dolt branch -a
[ "$status" -eq 0 ]
[[ "$output" =~ "* branch-one" ]] || false
[[ ! "$output" =~ " main" ]] || false
[[ ! "$output" =~ " branch-two" ]] || false
[[ ! "$output" =~ "remotes/origin/main" ]] || false
[[ "$output" =~ "remotes/origin/branch-one" ]] || false
[[ ! "$output" =~ "remotes/origin/branch-two" ]] || false
}
@test "remotes: fetch creates new remote refs for new remote branches" {
create_main_remote_branch

View File

@@ -17,9 +17,9 @@ teardown() {
cp -a $BATS_TEST_DIRNAME/helper/testEvents/* $BATS_TMPDIR/config-$$/.dolt/eventsData/
# kick off two child processes, one should lock the other out of the events dir
dolt send-metrics -output >file1.txt &
dolt send-metrics --output-format stdout >file1.txt &
pid1=$!
dolt send-metrics -output >file2.txt &
dolt send-metrics --output-format stdout >file2.txt &
pid2=$!
# wait for processes to finish
@@ -66,3 +66,73 @@ teardown() {
echo this block should not fire
return 1
}
@test "send-metrics: test event logging" {
DOLT_DISABLE_EVENT_FLUSH=true dolt sql -q "create table t1 (a int primary key, b int)"
DOLT_DISABLE_EVENT_FLUSH=true dolt sql -q "insert into t1 values (1, 2)"
DOLT_DISABLE_EVENT_FLUSH=true dolt ls
DOLT_DISABLE_EVENT_FLUSH=true dolt status
# output all the metrics data to stdout for examination
run dolt send-metrics --output-format stdout
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 4 ]
# output is random-ordered, so we have to search line in it
sql_count=$(echo "$output" | grep -o "type:SQL" | wc -l)
ls_count=$(echo "$output" | grep -o "type:LS" | wc -l)
status_count=$(echo "$output" | grep -o "type:STATUS" | wc -l)
[ "$sql_count" -eq 2 ]
[ "$ls_count" -eq 1 ]
[ "$status_count" -eq 1 ]
# send metrics should be empty after this, since it deletes all old metrics
run dolt send-metrics --output-format stdout
[ "$status" -eq 0 ]
[ "$output" == "" ]
}
# TODO: we need a local metrics server here that we can spin up to verify the send actually works
# end-to-end
@test "send-metrics: grpc smoke test" {
DOLT_DISABLE_EVENT_FLUSH=true dolt sql -q "create table t1 (a int primary key, b int)"
DOLT_DISABLE_EVENT_FLUSH=true dolt sql -q "insert into t1 values (1, 2)"
DOLT_DISABLE_EVENT_FLUSH=true dolt ls
DOLT_DISABLE_EVENT_FLUSH=true dolt status
# output all the metrics data to stdout for examination
dolt config --global --add metrics.host "fake.server"
run dolt send-metrics
[ "$status" -eq 1 ]
[[ "$output" =~ "Error flushing events" ]] || false
[[ "$output" =~ "fake.server" ]] || false
}
@test "send-metrics: sql-server heartbeat" {
# output all the metrics data to stdout for examination
DOLT_EVENTS_EMITTER=logger DOLT_SQL_SERVER_HEARTBEAT_INTERVAL=1s dolt sql-server -l debug > heartbeats.out 2>&1 &
server_pid=$!
sleep 5
kill $server_pid
cat heartbeats.out
wc=`grep SQL_SERVER_HEARTBEAT heartbeats.out | wc -l`
[ $wc -gt 0 ]
}
# TODO: we need a local metrics server here that we can spin up to verify the send actually works
# end-to-end
@test "send-metrics: sql-server grpc heartbeat smoketest" {
dolt config --global --add metrics.host "fake.server"
DOLT_SQL_SERVER_HEARTBEAT_INTERVAL=1s dolt sql-server -l debug > heartbeats.out 2>&1 &
server_pid=$!
sleep 5
kill $server_pid
wc=`grep 'failed to send heartbeat event' heartbeats.out | wc -l`
[ $wc -gt 0 ]
wc=`grep 'fake.server' heartbeats.out | wc -l`
[ $wc -gt 0 ]
}

View File

@@ -130,7 +130,7 @@ SQL
[ "$output" = "" ]
}
@test "sql-diff: output reconciles change to PRIMARY KEY field in row " {
@test "sql-diff: output reconciles change to PRIMARY KEY field in row" {
dolt checkout -b firstbranch
dolt sql <<SQL
CREATE TABLE test (

View File

@@ -1370,6 +1370,7 @@ data_dir: $DATA_DIR
dolt --port $PORT --host 0.0.0.0 --no-tls -u dolt --use-db repo1 sql -q "call dolt_fetch()"
}
# bats test_tags=no_lambda
@test "sql-server: run mysql from shell" {
skiponwindows "Has dependencies that are not installed on Windows CI"
if [[ `uname` == 'Darwin' ]]; then
@@ -1471,12 +1472,14 @@ data_dir: $DATA_DIR
SERVER_PID=$!
wait_for_connection $PORT 5000
cat log.txt
run dolt sql -q "select 1 as col1"
[ $status -eq 0 ]
[[ $output =~ col1 ]] || false
[[ $output =~ " 1 " ]] || false
run grep '\"/tmp/mysql.sock\"' log.txt
run grep '"/tmp/mysql.sock"' log.txt
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 1 ]
@@ -1504,7 +1507,7 @@ data_dir: $DATA_DIR
[[ $output =~ col1 ]] || false
[[ $output =~ " 1 " ]] || false
run grep '\"/tmp/mysql.sock\"' log.txt
run grep '"/tmp/mysql.sock"' log.txt
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 1 ]
}

View File

@@ -52,6 +52,7 @@ teardown() {
[[ "$output" =~ "test" ]] || false
}
# bats test_tags=no_lambda
@test "sql-shell: sql shell writes to disk after every iteration (autocommit)" {
skiponwindows "Need to install expect and make this script work on windows."
run $BATS_TEST_DIRNAME/sql-shell.expect
@@ -66,11 +67,13 @@ teardown() {
[[ "$output" =~ "+---------------------" ]] || false
}
# bats test_tags=no_lambda
@test "sql-shell: shell works after failing query" {
skiponwindows "Need to install expect and make this script work on windows."
$BATS_TEST_DIRNAME/sql-works-after-failing-query.expect
}
# bats test_tags=no_lambda
@test "sql-shell: empty DB in prompt is OK" {
skiponwindows "Need to install expect and make this script work on windows."
if [ "$SQL_ENGINE" = "remote-engine" ]; then
@@ -128,6 +131,7 @@ SQL
[[ $output =~ "112,111" ]] || false
}
# bats test_tags=no_lambda
@test "sql-shell: delimiter" {
skiponwindows "Need to install expect and make this script work on windows."
mkdir doltsql
@@ -154,6 +158,7 @@ SQL
rm -rf doltsql
}
# bats test_tags=no_lambda
@test "sql-shell: use databases" {
skiponwindows "Need to install expect and make this script work on windows."
mkdir doltsql

View File

@@ -2701,6 +2701,7 @@ SQL
[[ "$output" =~ "*************************** 14. row ***************************" ]] || false
}
# bats test_tags=no_lambda
@test "sql: vertical query format in sql shell" {
skiponwindows "Need to install expect and make this script work on windows."

View File

@@ -205,7 +205,7 @@ SQL
[[ "$output" =~ "3" ]] || false
}
@test "status: dolt reset --hard with more than one additional arg throws an error " {
@test "status: dolt reset --hard with more than one additional arg throws an error" {
run dolt reset --hard HEAD HEAD2
[ "$status" -eq 1 ]
}

View File

@@ -18,5 +18,5 @@ teardown() {
}
grep_for_testify() {
strings `which dolt` | grep testify
strings `type -p dolt` | grep testify
}

View File

@@ -5,7 +5,7 @@ go 1.21
require (
github.com/dolthub/dolt/go v0.40.4
github.com/google/uuid v1.3.0
github.com/stretchr/testify v1.8.2
github.com/stretchr/testify v1.8.3
golang.org/x/sync v0.3.0
gopkg.in/square/go-jose.v2 v2.5.1
gopkg.in/yaml.v3 v3.0.1

View File

@@ -1,6 +1,5 @@
github.com/creasty/defaults v1.6.0 h1:ltuE9cfphUtlrBeomuu8PEyISTXnxqkBIoQfXgv7BSc=
github.com/creasty/defaults v1.6.0/go.mod h1:iGzKe6pbEHnpMPtfDXZEr0NVxWnPTjb1bbDy08fPzYM=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/go-sql-driver/mysql v1.7.2-0.20230713085235-0b18dac46f7f h1:4+t8Qb99xUG/Ea00cQAiQl+gsjpK8ZYtAO8E76gRzQI=
@@ -11,13 +10,8 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
@@ -28,6 +22,5 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w=
gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -197,7 +197,7 @@ export const diffTests = [
to_name: "myview",
to_fragment: "CREATE VIEW `myview` AS SELECT * FROM test",
to_extra: { CreatedAt: 0 },
to_sql_mode: 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION,ONLY_FULL_GROUP_BY',
to_sql_mode: 'NO_ENGINE_SUBSTITUTION,ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES',
to_commit: "WORKING",
to_commit_date: "2023-03-09T07:56:29.035Z",
from_type: null,

View File

@@ -34,7 +34,7 @@ export const viewsTests = [
name: "myview",
fragment: "CREATE VIEW `myview` AS SELECT * FROM test",
extra: { CreatedAt: 0 },
sql_mode: 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION,ONLY_FULL_GROUP_BY',
sql_mode: 'NO_ENGINE_SUBSTITUTION,ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES',
},
],
},

View File

@@ -93,6 +93,7 @@ enum ClientEventType {
SHOW = 61;
PROFILE = 62;
REFLOG = 63;
SQL_SERVER_HEARTBEAT = 64;
}
enum MetricID {
@@ -111,4 +112,5 @@ enum AttributeID {
enum AppID {
APP_ID_UNSPECIFIED = 0;
APP_DOLT = 1;
APP_DOLTGRES = 2;
}