Merge main, resolve new event conflict

This commit is contained in:
Zach Musgrave
2023-12-04 10:25:52 -08:00
37 changed files with 695 additions and 348 deletions

View File

@@ -280,6 +280,12 @@ func CreateCountCommitsArgParser() *argparser.ArgParser {
return ap
}
func CreateReflogArgParser() *argparser.ArgParser {
ap := argparser.NewArgParserWithMaxArgs("reflog", 1)
ap.SupportsFlag(AllFlag, "", "Show all refs, including hidden refs, such as DoltHub workspace refs")
return ap
}
func CreateGlobalArgParser(name string) *argparser.ArgParser {
ap := argparser.NewArgParserWithVariableArgs(name)
if name == "dolt" {

View File

@@ -0,0 +1,197 @@
// Copyright 2023 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"context"
"fmt"
"strings"
"github.com/dolthub/go-mysql-server/sql"
"github.com/gocraft/dbr/v2"
"github.com/gocraft/dbr/v2/dialect"
"github.com/dolthub/dolt/go/cmd/dolt/cli"
"github.com/dolthub/dolt/go/cmd/dolt/errhand"
eventsapi "github.com/dolthub/dolt/go/gen/proto/dolt/services/eventsapi/v1alpha1"
"github.com/dolthub/dolt/go/libraries/doltcore/env"
"github.com/dolthub/dolt/go/libraries/utils/argparser"
"github.com/dolthub/dolt/go/store/util/outputpager"
)
var reflogDocs = cli.CommandDocumentationContent{
ShortDesc: "Shows a history of named refs",
LongDesc: `Shows the history of named refs (e.g. branches and tags), which is useful for understanding how a branch
or tag changed over time to reference different commits, particularly for information not surfaced through {{.EmphasisLeft}}dolt log{{.EmphasisRight}}.
The data from Dolt's reflog comes from [Dolt's journaling chunk store](https://www.dolthub.com/blog/2023-03-08-dolt-chunk-journal/).
This data is local to a Dolt database and never included when pushing, pulling, or cloning a Dolt database. This means when you clone a Dolt database, it will not have any reflog data until you perform operations that change what commit branches or tags reference.
Dolt's reflog is similar to [Git's reflog](https://git-scm.com/docs/git-reflog), but there are a few differences:
- The Dolt reflog currently only supports named references, such as branches and tags, and not any of Git's special refs (e.g. {{.EmphasisLeft}}HEAD{{.EmphasisRight}}, {{.EmphasisLeft}}FETCH-HEAD{{.EmphasisRight}}, {{.EmphasisLeft}}MERGE-HEAD{{.EmphasisRight}}).
- The Dolt reflog can be queried for the log of references, even after a reference has been deleted. In Git, once a branch or tag is deleted, the reflog for that ref is also deleted and to find the last commit a branch or tag pointed to you have to use Git's special {{.EmphasisLeft}}HEAD{{.EmphasisRight}} reflog to find the commit, which can sometimes be challenging. Dolt makes this much easier by allowing you to see the history for a deleted ref so you can easily see the last commit a branch or tag pointed to before it was deleted.`,
Synopsis: []string{
`[--all] {{.LessThan}}ref{{.GreaterThan}}`,
},
}
type ReflogCmd struct{}
// Name is returns the name of the Dolt cli command. This is what is used on the command line to invoke the command
func (cmd ReflogCmd) Name() string {
return "reflog"
}
// Description returns a description of the command
func (cmd ReflogCmd) Description() string {
return "Show history of named refs."
}
// EventType returns the type of the event to log
func (cmd ReflogCmd) EventType() eventsapi.ClientEventType {
return eventsapi.ClientEventType_REFLOG
}
func (cmd ReflogCmd) Docs() *cli.CommandDocumentation {
ap := cmd.ArgParser()
return cli.NewCommandDocumentation(reflogDocs, ap)
}
func (cmd ReflogCmd) ArgParser() *argparser.ArgParser {
return cli.CreateReflogArgParser()
}
func (cmd ReflogCmd) RequiresRepo() bool {
return false
}
// Exec executes the command
func (cmd ReflogCmd) Exec(ctx context.Context, commandStr string, args []string, dEnv *env.DoltEnv, cliCtx cli.CliContext) int {
ap := cmd.ArgParser()
help, usage := cli.HelpAndUsagePrinters(cli.CommandDocsForCommandString(commandStr, reflogDocs, ap))
apr := cli.ParseArgsOrDie(ap, args, help)
queryist, sqlCtx, closeFunc, err := cliCtx.QueryEngine(ctx)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
if closeFunc != nil {
defer closeFunc()
}
query, err := constructInterpolatedDoltReflogQuery(apr)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
rows, err := GetRowsForSql(queryist, sqlCtx, query)
if err != nil {
return HandleVErrAndExitCode(errhand.VerboseErrorFromError(err), usage)
}
return printReflog(rows, queryist, sqlCtx)
}
// constructInterpolatedDoltReflogQuery generates the sql query necessary to call the DOLT_REFLOG() function.
// Also interpolates this query to prevent sql injection
func constructInterpolatedDoltReflogQuery(apr *argparser.ArgParseResults) (string, error) {
var params []interface{}
var args []string
if apr.NArg() == 1 {
params = append(params, apr.Arg(0))
args = append(args, "?")
}
if apr.Contains(cli.AllFlag) {
args = append(args, "'--all'")
}
query := fmt.Sprintf("SELECT ref, commit_hash, commit_message FROM DOLT_REFLOG(%s)", strings.Join(args, ", "))
interpolatedQuery, err := dbr.InterpolateForDialect(query, params, dialect.MySQL)
if err != nil {
return "", err
}
return interpolatedQuery, nil
}
type ReflogInfo struct {
ref string
commitHash string
commitMessage string
}
// printReflog takes a list of sql rows with columns ref, commit hash, commit message. Prints the reflog to stdout
func printReflog(rows []sql.Row, queryist cli.Queryist, sqlCtx *sql.Context) int {
var reflogInfo []ReflogInfo
// Get the hash of HEAD for the `HEAD ->` decoration
headHash := ""
res, err := GetRowsForSql(queryist, sqlCtx, "SELECT hashof('HEAD')")
if err == nil {
// still print the reflog even if we can't get the hash
headHash = res[0][0].(string)
}
for _, row := range rows {
ref := row[0].(string)
commitHash := row[1].(string)
commitMessage := row[2].(string)
reflogInfo = append(reflogInfo, ReflogInfo{ref, commitHash, commitMessage})
}
reflogToStdOut(reflogInfo, headHash)
return 0
}
// reflogToStdOut takes a list of ReflogInfo and prints the reflog to stdout
func reflogToStdOut(reflogInfo []ReflogInfo, headHash string) {
if cli.ExecuteWithStdioRestored == nil {
return
}
cli.ExecuteWithStdioRestored(func() {
pager := outputpager.Start()
defer pager.Stop()
for _, info := range reflogInfo {
// TODO: use short hash instead
line := []string{fmt.Sprintf("\033[33m%s\033[0m", info.commitHash)} // commit hash in yellow (33m)
processedRef := processRefForReflog(info.ref)
if headHash != "" && headHash == info.commitHash {
line = append(line, fmt.Sprintf("\033[33m(\033[36;1mHEAD -> %s\033[33m)\033[0m", processedRef)) // HEAD in cyan (36;1)
} else {
line = append(line, fmt.Sprintf("\033[33m(%s\033[33m)\033[0m", processedRef)) // () in yellow (33m)
}
line = append(line, fmt.Sprintf("%s\n", info.commitMessage))
pager.Writer.Write([]byte(strings.Join(line, " ")))
}
})
}
// processRefForReflog takes a full ref (e.g. refs/heads/master) or tag name and returns the ref name (e.g. master) with relevant decoration.
func processRefForReflog(fullRef string) string {
if strings.HasPrefix(fullRef, "refs/heads/") {
return fmt.Sprintf("\033[32;1m%s\033[0m", strings.TrimPrefix(fullRef, "refs/heads/")) // branch in green (32;1m)
} else if strings.HasPrefix(fullRef, "refs/tags/") {
return fmt.Sprintf("\033[33mtag: %s\033[0m", strings.TrimPrefix(fullRef, "refs/tags/")) // tag in yellow (33m)
} else if strings.HasPrefix(fullRef, "refs/remotes/") {
return fmt.Sprintf("\033[31;1m%s\033[0m", strings.TrimPrefix(fullRef, "refs/remotes/")) // remote in red (31;1m)
} else if strings.HasPrefix(fullRef, "refs/workspaces/") {
return fmt.Sprintf("\033[35;1mworkspace: %s\033[0m", strings.TrimPrefix(fullRef, "refs/workspaces/")) // workspace in magenta (35;1m)
} else {
return fullRef
}
}

View File

@@ -65,7 +65,7 @@ import (
)
const (
Version = "1.28.2"
Version = "1.29.0"
)
var dumpDocsCommand = &commands.DumpDocsCmd{}
@@ -123,6 +123,7 @@ var doltSubCommands = []cli.Command{
&commands.Assist{},
commands.ProfileCmd{},
commands.QueryDiff{},
commands.ReflogCmd{},
}
var commandsWithoutCliCtx = []cli.Command{

View File

@@ -23,11 +23,10 @@
package eventsapi
import (
reflect "reflect"
sync "sync"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
@@ -155,7 +154,8 @@ const (
ClientEventType_STASH_POP ClientEventType = 60
ClientEventType_SHOW ClientEventType = 61
ClientEventType_PROFILE ClientEventType = 62
ClientEventType_SQL_SERVER_HEARTBEAT ClientEventType = 63
ClientEventType_REFLOG ClientEventType = 63
ClientEventType_SQL_SERVER_HEARTBEAT ClientEventType = 64
)
// Enum value maps for ClientEventType.
@@ -224,7 +224,8 @@ var (
60: "STASH_POP",
61: "SHOW",
62: "PROFILE",
63: "SQL_SERVER_HEARTBEAT",
63: "REFLOG",
64: "SQL_SERVER_HEARTBEAT",
}
ClientEventType_value = map[string]int32{
"TYPE_UNSPECIFIED": 0,
@@ -290,7 +291,8 @@ var (
"STASH_POP": 60,
"SHOW": 61,
"PROFILE": 62,
"SQL_SERVER_HEARTBEAT": 63,
"REFLOG": 63,
"SQL_SERVER_HEARTBEAT": 64,
}
)
@@ -481,7 +483,7 @@ var file_dolt_services_eventsapi_v1alpha1_event_constants_proto_rawDesc = []byte
0x52, 0x4d, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00,
0x12, 0x09, 0x0a, 0x05, 0x4c, 0x49, 0x4e, 0x55, 0x58, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57,
0x49, 0x4e, 0x44, 0x4f, 0x57, 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x41, 0x52, 0x57,
0x49, 0x4e, 0x10, 0x03, 0x2a, 0x97, 0x08, 0x0a, 0x0f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x45,
0x49, 0x4e, 0x10, 0x03, 0x2a, 0xa3, 0x08, 0x0a, 0x0f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x45,
0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08,
0x0a, 0x04, 0x49, 0x4e, 0x49, 0x54, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x41, 0x54,
@@ -545,29 +547,30 @@ var file_dolt_services_eventsapi_v1alpha1_event_constants_proto_rawDesc = []byte
0x0a, 0x0a, 0x53, 0x54, 0x41, 0x53, 0x48, 0x5f, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x3b, 0x12, 0x0d,
0x0a, 0x09, 0x53, 0x54, 0x41, 0x53, 0x48, 0x5f, 0x50, 0x4f, 0x50, 0x10, 0x3c, 0x12, 0x08, 0x0a,
0x04, 0x53, 0x48, 0x4f, 0x57, 0x10, 0x3d, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x52, 0x4f, 0x46, 0x49,
0x4c, 0x45, 0x10, 0x3e, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x51, 0x4c, 0x5f, 0x53, 0x45, 0x52, 0x56,
0x45, 0x52, 0x5f, 0x48, 0x45, 0x41, 0x52, 0x54, 0x42, 0x45, 0x41, 0x54, 0x10, 0x3f, 0x2a, 0x6a,
0x0a, 0x08, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x4d, 0x45,
0x54, 0x52, 0x49, 0x43, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44,
0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x42, 0x59, 0x54, 0x45, 0x53, 0x5f, 0x44, 0x4f, 0x57, 0x4e,
0x4c, 0x4f, 0x41, 0x44, 0x45, 0x44, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x4f, 0x57, 0x4e,
0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x4d, 0x53, 0x5f, 0x45, 0x4c, 0x41, 0x50, 0x53, 0x45, 0x44, 0x10,
0x02, 0x12, 0x17, 0x0a, 0x13, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x41, 0x50, 0x49, 0x5f, 0x52,
0x50, 0x43, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x03, 0x2a, 0x45, 0x0a, 0x0b, 0x41, 0x74,
0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x49, 0x44, 0x12, 0x19, 0x0a, 0x15, 0x41, 0x54, 0x54,
0x52, 0x49, 0x42, 0x55, 0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49,
0x45, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x5f, 0x55,
0x52, 0x4c, 0x5f, 0x53, 0x43, 0x48, 0x45, 0x4d, 0x45, 0x10, 0x02, 0x22, 0x04, 0x08, 0x01, 0x10,
0x01, 0x2a, 0x3f, 0x0a, 0x05, 0x41, 0x70, 0x70, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x50,
0x50, 0x5f, 0x49, 0x44, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44,
0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x10, 0x01,
0x12, 0x10, 0x0a, 0x0c, 0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x47, 0x52, 0x45, 0x53,
0x10, 0x02, 0x42, 0x51, 0x5a, 0x4f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d,
0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x68, 0x75, 0x62, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f, 0x67, 0x6f,
0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f,
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61,
0x70, 0x69, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x76, 0x65, 0x6e,
0x74, 0x73, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x4c, 0x45, 0x10, 0x3e, 0x12, 0x0a, 0x0a, 0x06, 0x52, 0x45, 0x46, 0x4c, 0x4f, 0x47, 0x10, 0x3f,
0x12, 0x18, 0x0a, 0x14, 0x53, 0x51, 0x4c, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x45, 0x52, 0x5f, 0x48,
0x45, 0x41, 0x52, 0x54, 0x42, 0x45, 0x41, 0x54, 0x10, 0x40, 0x2a, 0x6a, 0x0a, 0x08, 0x4d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x4d, 0x45, 0x54, 0x52, 0x49, 0x43,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x14,
0x0a, 0x10, 0x42, 0x59, 0x54, 0x45, 0x53, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44,
0x45, 0x44, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44,
0x5f, 0x4d, 0x53, 0x5f, 0x45, 0x4c, 0x41, 0x50, 0x53, 0x45, 0x44, 0x10, 0x02, 0x12, 0x17, 0x0a,
0x13, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x41, 0x50, 0x49, 0x5f, 0x52, 0x50, 0x43, 0x5f, 0x45,
0x52, 0x52, 0x4f, 0x52, 0x10, 0x03, 0x2a, 0x45, 0x0a, 0x0b, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62,
0x75, 0x74, 0x65, 0x49, 0x44, 0x12, 0x19, 0x0a, 0x15, 0x41, 0x54, 0x54, 0x52, 0x49, 0x42, 0x55,
0x54, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00,
0x12, 0x15, 0x0a, 0x11, 0x52, 0x45, 0x4d, 0x4f, 0x54, 0x45, 0x5f, 0x55, 0x52, 0x4c, 0x5f, 0x53,
0x43, 0x48, 0x45, 0x4d, 0x45, 0x10, 0x02, 0x22, 0x04, 0x08, 0x01, 0x10, 0x01, 0x2a, 0x3f, 0x0a,
0x05, 0x41, 0x70, 0x70, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x12, 0x41, 0x50, 0x50, 0x5f, 0x49, 0x44,
0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0c,
0x0a, 0x08, 0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c,
0x41, 0x50, 0x50, 0x5f, 0x44, 0x4f, 0x4c, 0x54, 0x47, 0x52, 0x45, 0x53, 0x10, 0x02, 0x42, 0x51,
0x5a, 0x4f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x6f, 0x6c,
0x74, 0x68, 0x75, 0x62, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f, 0x67, 0x6f, 0x2f, 0x67, 0x65, 0x6e,
0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x6f, 0x6c, 0x74, 0x2f, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x73, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61, 0x70, 0x69, 0x2f, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x61, 0x70,
0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (

View File

@@ -57,7 +57,7 @@ require (
github.com/cespare/xxhash v1.1.0
github.com/creasty/defaults v1.6.0
github.com/dolthub/flatbuffers/v23 v23.3.3-dh.2
github.com/dolthub/go-mysql-server v0.17.1-0.20231201021351-97a2867b8225
github.com/dolthub/go-mysql-server v0.17.1-0.20231201211641-8889517a6d60
github.com/dolthub/swiss v0.1.0
github.com/goccy/go-json v0.10.2
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510

View File

@@ -181,8 +181,8 @@ github.com/dolthub/fslock v0.0.3 h1:iLMpUIvJKMKm92+N1fmHVdxJP5NdyDK5bK7z7Ba2s2U=
github.com/dolthub/fslock v0.0.3/go.mod h1:QWql+P17oAAMLnL4HGB5tiovtDuAjdDTPbuqx7bYfa0=
github.com/dolthub/go-icu-regex v0.0.0-20230524105445-af7e7991c97e h1:kPsT4a47cw1+y/N5SSCkma7FhAPw7KeGmD6c9PBZW9Y=
github.com/dolthub/go-icu-regex v0.0.0-20230524105445-af7e7991c97e/go.mod h1:KPUcpx070QOfJK1gNe0zx4pA5sicIK1GMikIGLKC168=
github.com/dolthub/go-mysql-server v0.17.1-0.20231201021351-97a2867b8225 h1:6+FqniHHZRWNfuXtIlvSLUWoZhJEixyWYcgSsPlT69U=
github.com/dolthub/go-mysql-server v0.17.1-0.20231201021351-97a2867b8225/go.mod h1:vXlRKS39WHav9N51VsfYphKhmSA2t5FkhHmW3BtwH5I=
github.com/dolthub/go-mysql-server v0.17.1-0.20231201211641-8889517a6d60 h1:4oj5xEB5anIEfFa2PXSMnb2jELIcnB4mexIeozmKyZM=
github.com/dolthub/go-mysql-server v0.17.1-0.20231201211641-8889517a6d60/go.mod h1:vXlRKS39WHav9N51VsfYphKhmSA2t5FkhHmW3BtwH5I=
github.com/dolthub/ishell v0.0.0-20221214210346-d7db0b066488 h1:0HHu0GWJH0N6a6keStrHhUAK5/o9LVfkh44pvsV4514=
github.com/dolthub/ishell v0.0.0-20221214210346-d7db0b066488/go.mod h1:ehexgi1mPxRTk0Mok/pADALuHbvATulTh6gzr7NzZto=
github.com/dolthub/jsonpath v0.0.2-0.20230525180605-8dc13778fd72 h1:NfWmngMi1CYUWU4Ix8wM+USEhjc+mhPlT9JUR/anvbQ=

View File

@@ -154,10 +154,10 @@ func testGarbageCollection(t *testing.T, test gcTest) {
// In September 2023, we found a failure to handle the `hasCache` in
// `*NomsBlockStore` appropriately while cleaning up a memtable into which
// dangling references had been written could result in writing chunks to a
// database which referenced non-existant chunks.
// database which referenced non-existent chunks.
//
// The general pattern was to get new chunk addresses into the hasCache, but
// not written to the store, and then to have an incoming chunk add a refenece
// not written to the store, and then to have an incoming chunk add a reference
// to missing chunk. At that time, we would clear the memtable, since it had
// invalid chunks in it, but we wouldn't purge the hasCache. Later writes which
// attempted to reference the chunks which had made it into the hasCache would

View File

@@ -79,15 +79,13 @@ func (si *ServerInterceptor) authenticate(ctx context.Context) error {
ctx, err := si.AccessController.ApiAuthenticate(ctx)
if err != nil {
si.Lgr.Warnf("authentication failed: %s", err.Error())
status.Error(codes.Unauthenticated, "unauthenticated")
return err
return status.Error(codes.Unauthenticated, err.Error())
}
// Have a valid user in the context. Check authorization.
if authorized, err := si.AccessController.ApiAuthorize(ctx); !authorized {
si.Lgr.Warnf("authorization failed: %s", err.Error())
status.Error(codes.PermissionDenied, "unauthorized")
return err
return status.Error(codes.PermissionDenied, err.Error())
}
// Access Granted.

View File

@@ -1015,9 +1015,11 @@ func (dcs *DoltChunkStore) uploadTableFileWithRetries(ctx context.Context, table
req := &remotesapi.GetUploadLocsRequest{RepoId: id, RepoToken: token, RepoPath: dcs.repoPath, TableFileDetails: []*remotesapi.TableFileDetails{tbfd}}
resp, err := dcs.csClient.GetUploadLocations(ctx, req)
if err != nil {
if err != nil {
return NewRpcError(err, "GetUploadLocations", dcs.host, req)
err := NewRpcError(err, "GetUploadLocations", dcs.host, req)
if err.IsPermanent() {
return backoff.Permanent(err)
}
return err
}
if resp.RepoToken != "" {

View File

@@ -38,6 +38,10 @@ func (rpce *RpcError) Error() string {
return rpce.originalErrMsg
}
func (rpce *RpcError) IsPermanent() bool {
return statusCodeIsPermanentError(rpce.status)
}
func (rpce *RpcError) FullDetails() string {
jsonStr, _ := GetJsonEncodedRequest(rpce)
return rpce.originalErrMsg + "\nhost:" + rpce.host + "\nrpc: " + rpce.rpc + "\nparams:" + jsonStr

View File

@@ -46,12 +46,18 @@ func processHttpResp(resp *http.Response, err error) error {
// ProcessGrpcErr converts an error from a Grpc call into a RetriableCallState
func processGrpcErr(err error) error {
st, ok := status.FromError(err)
if !ok {
return err
st, _ := status.FromError(err)
if statusCodeIsPermanentError(st) {
return backoff.Permanent(err)
}
return err
}
switch st.Code() {
func statusCodeIsPermanentError(s *status.Status) bool {
if s == nil {
return false
}
switch s.Code() {
case codes.InvalidArgument,
codes.NotFound,
codes.AlreadyExists,
@@ -60,8 +66,7 @@ func processGrpcErr(err error) error {
codes.Unimplemented,
codes.OutOfRange,
codes.Unauthenticated:
return backoff.Permanent(err)
return true
}
return err
return false
}

View File

@@ -45,8 +45,6 @@ type DiffStatTableFunction struct {
dotCommitExpr sql.Expression
tableNameExpr sql.Expression
database sql.Database
tabId sql.TableId
colset sql.ColSet
}
var diffStatTableSchema = sql.Schema{
@@ -79,26 +77,6 @@ func (ds *DiffStatTableFunction) NewInstance(ctx *sql.Context, db sql.Database,
return node, nil
}
func (ds *DiffStatTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *ds
ret.tabId = id
return &ret
}
func (ds *DiffStatTableFunction) Id() sql.TableId {
return ds.tabId
}
func (ds *DiffStatTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *ds
ds.colset = set
return &ret
}
func (ds *DiffStatTableFunction) Columns() sql.ColSet {
return ds.colset
}
func (ds *DiffStatTableFunction) DataLength(ctx *sql.Context) (uint64, error) {
numBytesPerRow := schema.SchemaAvgLength(ds.Schema())
numRows, _, err := ds.RowCount(ctx)

View File

@@ -42,8 +42,6 @@ type DiffSummaryTableFunction struct {
dotCommitExpr sql.Expression
tableNameExpr sql.Expression
database sql.Database
tabId sql.TableId
colset sql.ColSet
}
var diffSummaryTableSchema = sql.Schema{
@@ -69,26 +67,6 @@ func (ds *DiffSummaryTableFunction) NewInstance(ctx *sql.Context, db sql.Databas
return node, nil
}
func (ds *DiffSummaryTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *ds
ret.tabId = id
return &ret
}
func (ds *DiffSummaryTableFunction) Id() sql.TableId {
return ds.tabId
}
func (ds *DiffSummaryTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *ds
ret.colset = set
return &ret
}
func (ds *DiffSummaryTableFunction) Columns() sql.ColSet {
return ds.colset
}
func (ds *DiffSummaryTableFunction) DataLength(ctx *sql.Context) (uint64, error) {
numBytesPerRow := schema.SchemaAvgLength(ds.Schema())
numRows, _, err := ds.RowCount(ctx)

View File

@@ -54,29 +54,6 @@ type DiffTableFunction struct {
tableDelta diff.TableDelta
fromDate *types.Timestamp
toDate *types.Timestamp
tabId sql.TableId
colset sql.ColSet
}
func (dtf *DiffTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *dtf
ret.tabId = id
return &ret
}
func (dtf *DiffTableFunction) Id() sql.TableId {
return dtf.tabId
}
func (dtf *DiffTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *dtf
ret.colset = set
return &ret
}
func (dtf *DiffTableFunction) Columns() sql.ColSet {
return dtf.colset
}
// NewInstance creates a new instance of TableFunction interface

View File

@@ -48,9 +48,6 @@ type LogTableFunction struct {
showParents bool
decoration string
tabId sql.TableId
colset sql.ColSet
database sql.Database
}
@@ -77,26 +74,6 @@ func (ltf *LogTableFunction) NewInstance(ctx *sql.Context, db sql.Database, expr
return node, nil
}
func (ltf *LogTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *ltf
ret.tabId = id
return &ret
}
func (ltf *LogTableFunction) Id() sql.TableId {
return ltf.tabId
}
func (ltf *LogTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *ltf
ret.colset = set
return &ret
}
func (ltf *LogTableFunction) Columns() sql.ColSet {
return ltf.colset
}
// Database implements the sql.Databaser interface
func (ltf *LogTableFunction) Database() sql.Database {
return ltf.database

View File

@@ -74,28 +74,6 @@ type PatchTableFunction struct {
dotCommitExpr sql.Expression
tableNameExpr sql.Expression
database sql.Database
tabId sql.TableId
colset sql.ColSet
}
func (p *PatchTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *p
ret.tabId = id
return &ret
}
func (p *PatchTableFunction) Id() sql.TableId {
return p.tabId
}
func (p *PatchTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *p
ret.colset = set
return &ret
}
func (p *PatchTableFunction) Columns() sql.ColSet {
return p.colset
}
func (p *PatchTableFunction) DataLength(ctx *sql.Context) (uint64, error) {

View File

@@ -45,29 +45,6 @@ type QueryDiffTableFunction struct {
rowIter2 sql.RowIter
schema1 sql.Schema
schema2 sql.Schema
tabId sql.TableId
colset sql.ColSet
}
func (tf *QueryDiffTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *tf
ret.tabId = id
return &ret
}
func (tf *QueryDiffTableFunction) Id() sql.TableId {
return tf.tabId
}
func (tf *QueryDiffTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *tf
ret.colset = set
return &ret
}
func (tf *QueryDiffTableFunction) Columns() sql.ColSet {
return tf.colset
}
// NewInstance creates a new instance of TableFunction interface

View File

@@ -60,9 +60,6 @@ type SchemaDiffTableFunction struct {
tableNameExpr sql.Expression
database sql.Database
tabId sql.TableId
colset sql.ColSet
}
var schemaDiffTableSchema = sql.Schema{
@@ -87,25 +84,6 @@ func (ds *SchemaDiffTableFunction) NewInstance(ctx *sql.Context, db sql.Database
return node, nil
}
func (ds *SchemaDiffTableFunction) WithId(id sql.TableId) sql.TableIdNode {
//TODO implement me
panic("implement me")
}
func (ds *SchemaDiffTableFunction) Id() sql.TableId {
return ds.tabId
}
func (ds *SchemaDiffTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *ds
ds.colset = set
return &ret
}
func (ds *SchemaDiffTableFunction) Columns() sql.ColSet {
return ds.colset
}
func (ds *SchemaDiffTableFunction) DataLength(ctx *sql.Context) (uint64, error) {
numBytesPerRow := schema.SchemaAvgLength(ds.Schema())
numRows, _, err := ds.RowCount(ctx)

View File

@@ -1984,7 +1984,7 @@ func TestBrokenSystemTableQueries(t *testing.T) {
}
func TestHistorySystemTable(t *testing.T) {
harness := newDoltHarness(t).WithParallelism(1)
harness := newDoltHarness(t).WithParallelism(2)
defer harness.Close()
harness.Setup(setup.MydbData)
for _, test := range HistorySystemTableScriptTests {

View File

@@ -4317,7 +4317,7 @@ var DoltReflogTestScripts = []queries.ScriptTest{
Assertions: []queries.ScriptTestAssertion{
{
Query: "select * from dolt_reflog('foo', 'bar');",
ExpectedErrStr: "function 'dolt_reflog' expected 0 or 1 arguments, 2 received",
ExpectedErrStr: "error: dolt_reflog has too many positional arguments. Expected at most 1, found 2: ['foo' 'bar']",
},
{
Query: "select * from dolt_reflog(NULL);",

View File

@@ -419,7 +419,6 @@ func (ht *HistoryTable) Collation() sql.CollationID {
// Partitions returns a PartitionIter which will be used in getting partitions each of which is used to create RowIter.
func (ht *HistoryTable) Partitions(ctx *sql.Context) (sql.PartitionIter, error) {
// TODO reset ht.cmItr on close
iter, err := ht.filterIter(ctx, ht.cmItr)
if err != nil {
return nil, err

View File

@@ -29,11 +29,9 @@ import (
)
type ReflogTableFunction struct {
ctx *sql.Context
database sql.Database
refExpr sql.Expression
tabId sql.TableId
colset sql.ColSet
ctx *sql.Context
database sql.Database
refAndArgExprs []sql.Expression
}
var _ sql.TableFunction = (*ReflogTableFunction)(nil)
@@ -60,26 +58,6 @@ func (rltf *ReflogTableFunction) NewInstance(ctx *sql.Context, database sql.Data
return node, nil
}
func (rltf *ReflogTableFunction) WithId(id sql.TableId) sql.TableIdNode {
ret := *rltf
ret.tabId = id
return &ret
}
func (rltf *ReflogTableFunction) Id() sql.TableId {
return rltf.tabId
}
func (rltf *ReflogTableFunction) WithColumns(set sql.ColSet) sql.TableIdNode {
ret := *rltf
ret.colset = set
return &ret
}
func (rltf *ReflogTableFunction) Columns() sql.ColSet {
return rltf.colset
}
func (rltf *ReflogTableFunction) RowIter(ctx *sql.Context, row sql.Row) (sql.RowIter, error) {
sqlDb, ok := rltf.database.(dsess.SqlDatabase)
if !ok {
@@ -87,17 +65,30 @@ func (rltf *ReflogTableFunction) RowIter(ctx *sql.Context, row sql.Row) (sql.Row
}
var refName string
if rltf.refExpr != nil {
target, err := rltf.refExpr.Eval(ctx, row)
showAll := false
for _, expr := range rltf.refAndArgExprs {
target, err := expr.Eval(ctx, row)
if err != nil {
return nil, fmt.Errorf("error evaluating expression (%s): %s",
rltf.refExpr.String(), err.Error())
expr.String(), err.Error())
}
refName, ok = target.(string)
targetStr, ok := target.(string)
if !ok {
return nil, fmt.Errorf("argument (%v) is not a string value, but a %T", target, target)
}
if targetStr == "--all" {
if showAll {
return nil, fmt.Errorf("error: multiple values provided for `all`")
}
showAll = true
} else {
if refName != "" {
return nil, fmt.Errorf("error: %s has too many positional arguments. Expected at most %d, found %d: %s",
rltf.Name(), 1, 2, rltf.refAndArgExprs)
}
refName = targetStr
}
}
ddb := sqlDb.DbData().Ddb
@@ -131,9 +122,15 @@ func (rltf *ReflogTableFunction) RowIter(ctx *sql.Context, row sql.Row) (sql.Row
if doltRef.GetType() == ref.InternalRefType {
return nil
}
// skip workspace refs by default
if doltRef.GetType() == ref.WorkspaceRefType {
if !showAll {
return nil
}
}
// If a ref expression to filter on was specified, see if we match the current ref
if rltf.refExpr != nil {
if refName != "" {
// If the caller has supplied a branch or tag name, without the fully qualified ref path,
// take the first match and use that as the canonical ref to filter on
if strings.HasSuffix(strings.ToLower(id), "/"+strings.ToLower(refName)) {
@@ -194,14 +191,21 @@ func (rltf *ReflogTableFunction) Schema() sql.Schema {
}
func (rltf *ReflogTableFunction) Resolved() bool {
if rltf.refExpr != nil {
return rltf.refExpr.Resolved()
for _, expr := range rltf.refAndArgExprs {
if !expr.Resolved() {
return false
}
}
return true
}
func (rltf *ReflogTableFunction) String() string {
return fmt.Sprintf("DOLT_REFLOG(%s)", rltf.refExpr.String())
var args []string
for _, expr := range rltf.refAndArgExprs {
args = append(args, expr.String())
}
return fmt.Sprintf("DOLT_REFLOG(%s)", strings.Join(args, ", "))
}
func (rltf *ReflogTableFunction) Children() []sql.Node {
@@ -226,21 +230,17 @@ func (rltf *ReflogTableFunction) IsReadOnly() bool {
}
func (rltf *ReflogTableFunction) Expressions() []sql.Expression {
if rltf.refExpr != nil {
return []sql.Expression{rltf.refExpr}
}
return []sql.Expression{}
return rltf.refAndArgExprs
}
func (rltf *ReflogTableFunction) WithExpressions(expression ...sql.Expression) (sql.Node, error) {
if len(expression) > 1 {
return nil, sql.ErrInvalidArgumentNumber.New(rltf.Name(), "0 or 1", len(expression))
if len(expression) > 2 {
return nil, sql.ErrInvalidArgumentNumber.New(rltf.Name(), "0 to 2", len(expression))
}
new := *rltf
if len(expression) > 0 {
new.refExpr = expression[0]
}
new.refAndArgExprs = expression
return &new, nil
}

View File

@@ -48,7 +48,7 @@ func TestStrSet(t *testing.T) {
t.Error("Set doesn't match expectation after removes", strSet.AsSlice())
}
strSet.Remove("non-existant string")
strSet.Remove("non-existent string")
if !isAsExpected(strSet, []string{"a", "c", "e"}) {
t.Error("Set doesn't match expectation after noop remove", strSet.AsSlice())

View File

@@ -94,7 +94,7 @@ type AppliedEditStats struct {
// Deletions counts the number of items deleted from the map
Deletions int64
// NonexistantDeletes counts the number of items where a deletion was attempted, but the key didn't exist in the map
// NonexistentDeletes counts the number of items where a deletion was attempted, but the key didn't exist in the map
// so there was no impact
NonExistentDeletes int64
}

View File

@@ -680,7 +680,7 @@ DELIM
[[ "$output" =~ "PRIMARY KEY (\`pk\`)" ]] || false
}
@test "1pk5col-ints: dolt schema show on non existant table" {
@test "1pk5col-ints: dolt schema show on non existent table" {
run dolt schema show foo
[ "$status" -eq 0 ]
[ "$output" = "foo not found" ]

View File

@@ -9,7 +9,7 @@ teardown() {
teardown_common
}
@test "conflict-detection: merge non-existant branch errors" {
@test "conflict-detection: merge non-existent branch errors" {
run dolt merge batmans-parents
[ $status -eq 1 ]
[[ "$output" =~ "branch not found" ]] || false

View File

@@ -139,7 +139,7 @@ SQL
[[ "${lines[3]}" =~ ' 4 ' ]] || false
}
@test "create-views: cannot create view referencing non-existant table" {
@test "create-views: cannot create view referencing non-existent table" {
run dolt sql <<SQL
create view broken as select id from my_users;
SQL

View File

@@ -132,7 +132,7 @@ teardown() {
[ "$output" = "Already on branch 'main'" ]
}
@test "empty-repo: dolt checkout non-existant branch" {
@test "empty-repo: dolt checkout non-existent branch" {
run dolt checkout foo
[ "$status" -ne 0 ]
[ "$output" = "error: could not find foo" ]

View File

@@ -135,6 +135,7 @@ SKIP_SERVER_TESTS=$(cat <<-EOM
~cli-hosted.bats~
~profile.bats~
~ls.bats~
~reflog.bats~
EOM
)

View File

@@ -227,7 +227,7 @@ CSV
[[ "$output" =~ "reserved" ]] || false
}
@test "import-create-tables: try to table import with nonexistant --pk arg" {
@test "import-create-tables: try to table import with nonexistent --pk arg" {
run dolt table import -c -pk="batmansparents" test 1pk5col-ints.csv
[ "$status" -eq 1 ]
[[ "$output" =~ "Error determining the output schema." ]] || false
@@ -235,7 +235,7 @@ CSV
[[ "$output" =~ "column 'batmansparents' not found" ]] || false
}
@test "import-create-tables: try to table import with one valid and one nonexistant --pk arg" {
@test "import-create-tables: try to table import with one valid and one nonexistent --pk arg" {
run dolt table import -c -pk="pk,batmansparents" test 1pk5col-ints.csv
[ "$status" -eq 1 ]
[[ "$output" =~ "Error determining the output schema." ]] || false

View File

@@ -3,10 +3,10 @@ load $BATS_TEST_DIRNAME/helper/common.bash
setup() {
setup_common
dolt sql -q "create table t1 (pk int PRIMARY KEY)"
dolt commit -Am "create table t1"
dolt sql -q "create table t2 (pk int PRIMARY KEY)"
dolt commit -Am "create table t2"
dolt sql -q "create table table_one (pk int PRIMARY KEY)"
dolt commit -Am "create table table_one"
dolt sql -q "create table table_two (pk int PRIMARY KEY)"
dolt commit -Am "create table table_two"
}
teardown() {
@@ -18,42 +18,42 @@ teardown() {
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
}
@test "ls: ls includes unstaged table" {
dolt sql -q "create table t3 (pk int PRIMARY KEY)"
dolt sql -q "create table table_three (pk int PRIMARY KEY)"
run dolt ls
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 4 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "t3" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "table_three" ]] || false
}
@test "ls: ls includes staged table" {
dolt sql -q "create table t3 (pk int PRIMARY KEY)"
dolt sql -q "create table table_three (pk int PRIMARY KEY)"
dolt add .
run dolt ls
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 4 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "t3" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "table_three" ]] || false
}
@test "ls: --verbose shows row count" {
dolt sql -q "insert into t1 values (1), (2), (3)"
dolt sql -q "insert into table_one values (1), (2), (3)"
run dolt ls --verbose
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "3 rows" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "0 rows" ]] || false
}
@@ -71,24 +71,24 @@ teardown() {
[[ "$output" =~ "dolt_remotes" ]] || false
[[ "$output" =~ "dolt_branches" ]] || false
[[ "$output" =~ "dolt_remote_branches" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t1" ]] || false
[[ "$output" =~ "dolt_history_t1" ]] || false
[[ "$output" =~ "dolt_conflicts_t1" ]] || false
[[ "$output" =~ "dolt_diff_t1" ]] || false
[[ "$output" =~ "dolt_commit_diff_t1" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t2" ]] || false
[[ "$output" =~ "dolt_history_t2" ]] || false
[[ "$output" =~ "dolt_conflicts_t2" ]] || false
[[ "$output" =~ "dolt_diff_t2" ]] || false
[[ "$output" =~ "dolt_commit_diff_t2" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_one" ]] || false
[[ "$output" =~ "dolt_history_table_one" ]] || false
[[ "$output" =~ "dolt_conflicts_table_one" ]] || false
[[ "$output" =~ "dolt_diff_table_one" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_one" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_two" ]] || false
[[ "$output" =~ "dolt_history_table_two" ]] || false
[[ "$output" =~ "dolt_conflicts_table_two" ]] || false
[[ "$output" =~ "dolt_diff_table_two" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_two" ]] || false
}
@test "ls: --all shows tables in working set and system tables" {
run dolt ls --all
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "System tables:" ]] || false
[[ "$output" =~ "dolt_status" ]] || false
[[ "$output" =~ "dolt_commits" ]] || false
@@ -99,27 +99,27 @@ teardown() {
[[ "$output" =~ "dolt_remotes" ]] || false
[[ "$output" =~ "dolt_branches" ]] || false
[[ "$output" =~ "dolt_remote_branches" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t1" ]] || false
[[ "$output" =~ "dolt_history_t1" ]] || false
[[ "$output" =~ "dolt_conflicts_t1" ]] || false
[[ "$output" =~ "dolt_diff_t1" ]] || false
[[ "$output" =~ "dolt_commit_diff_t1" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t2" ]] || false
[[ "$output" =~ "dolt_history_t2" ]] || false
[[ "$output" =~ "dolt_conflicts_t2" ]] || false
[[ "$output" =~ "dolt_diff_t2" ]] || false
[[ "$output" =~ "dolt_commit_diff_t2" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_one" ]] || false
[[ "$output" =~ "dolt_history_table_one" ]] || false
[[ "$output" =~ "dolt_conflicts_table_one" ]] || false
[[ "$output" =~ "dolt_diff_table_one" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_one" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_two" ]] || false
[[ "$output" =~ "dolt_history_table_two" ]] || false
[[ "$output" =~ "dolt_conflicts_table_two" ]] || false
[[ "$output" =~ "dolt_diff_table_two" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_two" ]] || false
}
@test "ls: --all and --verbose shows row count for tables in working set" {
dolt sql -q "insert into t1 values (1), (2), (3)"
dolt sql -q "insert into table_one values (1), (2), (3)"
run dolt ls --all --verbose
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in working set:" ]] || false
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "3 rows" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "0 rows" ]] || false
[[ "$output" =~ "System tables:" ]] || false
[[ "$output" =~ "dolt_status" ]] || false
@@ -131,16 +131,16 @@ teardown() {
[[ "$output" =~ "dolt_remotes" ]] || false
[[ "$output" =~ "dolt_branches" ]] || false
[[ "$output" =~ "dolt_remote_branches" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t1" ]] || false
[[ "$output" =~ "dolt_history_t1" ]] || false
[[ "$output" =~ "dolt_conflicts_t1" ]] || false
[[ "$output" =~ "dolt_diff_t1" ]] || false
[[ "$output" =~ "dolt_commit_diff_t1" ]] || false
[[ "$output" =~ "dolt_constraint_violations_t2" ]] || false
[[ "$output" =~ "dolt_history_t2" ]] || false
[[ "$output" =~ "dolt_conflicts_t2" ]] || false
[[ "$output" =~ "dolt_diff_t2" ]] || false
[[ "$output" =~ "dolt_commit_diff_t2" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_one" ]] || false
[[ "$output" =~ "dolt_history_table_one" ]] || false
[[ "$output" =~ "dolt_conflicts_table_one" ]] || false
[[ "$output" =~ "dolt_diff_table_one" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_one" ]] || false
[[ "$output" =~ "dolt_constraint_violations_table_two" ]] || false
[[ "$output" =~ "dolt_history_table_two" ]] || false
[[ "$output" =~ "dolt_conflicts_table_two" ]] || false
[[ "$output" =~ "dolt_diff_table_two" ]] || false
[[ "$output" =~ "dolt_commit_diff_table_two" ]] || false
}
@test "ls: --system and --all are mutually exclusive" {
@@ -153,33 +153,33 @@ teardown() {
run dolt ls HEAD
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in " ]] || false # Tables in [hash]:
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
run dolt ls HEAD~1
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in " ]] || false # Tables in [hash]:
[[ "$output" =~ "t1" ]] || false
! [[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "table_one" ]] || false
! [[ "$output" =~ "table_two" ]] || false
}
@test "ls: ls with branch" {
dolt checkout -b branch1
dolt sql -q "create table t3 (pk int primary key)"
dolt commit -Am "create table t3"
dolt sql -q "create table table_three (pk int primary key)"
dolt commit -Am "create table table_three"
dolt checkout main
run dolt ls branch1
[ "$status" -eq 0 ]
[[ "$output" =~ "Tables in " ]] || false # Tables in [hash]:
[[ "$output" =~ "t1" ]] || false
[[ "$output" =~ "t2" ]] || false
[[ "$output" =~ "t3" ]] || false
[[ "$output" =~ "table_one" ]] || false
[[ "$output" =~ "table_two" ]] || false
[[ "$output" =~ "table_three" ]] || false
}
@test "ls: no tables in working set" {
dolt sql -q "drop table t1"
dolt sql -q "drop table t2"
dolt sql -q "drop table table_one"
dolt sql -q "drop table table_two"
run dolt ls
[ "$status" -eq 0 ]

244
integration-tests/bats/reflog.bats Normal file → Executable file
View File

@@ -6,40 +6,27 @@ teardown() {
teardown_common
}
# Asserts that when DOLT_DISABLE_REFLOG is set, the dolt_reflog() table
# function returns an empty result set with no error.
# Asserts that when DOLT_DISABLE_REFLOG is set, dolt reflog returns nothing with no error.
@test "reflog: disabled with DOLT_DISABLE_REFLOG" {
export DOLT_DISABLE_REFLOG=true
setup_common
setup_common # need to set env vars before setup_common for remote tests
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
dolt commit --allow-empty -m "test commit 1"
run dolt sql -q "select * from dolt_reflog();"
run dolt reflog
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
# Sanity check for the most basic case of querying the Dolt reflog
@test "reflog: enabled by default" {
setup_common
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt sql -q "select * from dolt_reflog();"
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 6 ]
[[ "$output" =~ "initial commit" ]] || false
[[ "$output" =~ "Initialize data repository" ]] || false
}
# Asserts that when DOLT_REFLOG_RECORD_LIMIT has been set, the reflog only contains the
# most recent entries and is limited by the env var's value.
@test "reflog: set DOLT_REFLOG_RECORD_LIMIT" {
export DOLT_REFLOG_RECORD_LIMIT=2
setup_common
setup_common # need to set env vars before setup_common for remote tests
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
@@ -47,10 +34,227 @@ teardown() {
dolt commit --allow-empty -m "test commit 2"
# Only the most recent two ref changes should appear in the log
run dolt sql -q "select * from dolt_reflog();"
run dolt reflog
[ "$status" -eq 0 ]
[[ "$output" =~ "test commit 1" ]] || false
[[ "$output" =~ "test commit 2" ]] || false
[[ ! "$output" =~ "initial commit" ]] || false
[[ ! "$output" =~ "Initialize data repository" ]] || false
}
@test "reflog: simple reflog" {
setup_common
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt reflog
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) initial commit" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
}
@test "reflog: reflog with ref given" {
setup_common
dolt sql <<SQL
create table t1(pk int primary key);
call dolt_commit('-Am', 'creating table t1');
insert into t1 values(1);
call dolt_commit('-Am', 'inserting row 1');
call dolt_tag('tag1');
call dolt_checkout('-b', 'branch1');
insert into t1 values(2);
call dolt_commit('-Am', 'inserting row 2');
insert into t1 values(3);
call dolt_commit('-Am', 'inserting row 3');
call dolt_tag('-d', 'tag1');
call dolt_tag('tag1');
SQL
run dolt reflog refs/heads/main
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) inserting row 1" ]] || false
[[ "$out" =~ "creating table t1" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
# ref is case-insensitive
run dolt reflog rEFs/heAdS/MAIN
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) inserting row 1" ]] || false
[[ "$out" =~ "creating table t1" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
run dolt reflog main
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) inserting row 1" ]] || false
[[ "$out" =~ "creating table t1" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
# ref is case-insensitive
run dolt reflog MaIn
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) inserting row 1" ]] || false
[[ "$out" =~ "creating table t1" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
run dolt reflog refs/heads/branch1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(branch1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 2" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
run dolt reflog branch1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(branch1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 2" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
run dolt reflog refs/tags/tag1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(tag: tag1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
# ref is case-insensitive
run dolt reflog Refs/tAGs/TaG1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(tag: tag1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
run dolt reflog tag1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(tag: tag1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
# ref is case-insensitive
run dolt reflog TAg1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(tag: tag1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
dolt branch -D branch1
run dolt reflog branch1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 3 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(branch1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 2" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
dolt tag -d tag1
run dolt reflog tag1
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(tag: tag1) inserting row 3" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
}
@test "reflog: garbage collection with no newgen data" {
setup_common
run dolt reflog
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 1 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) Initialize data repository" ]] || false
dolt gc
run dolt reflog
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
@test "reflog: garbage collection with newgen data" {
setup_common
dolt sql <<SQL
create table t1(pk int primary key);
call dolt_commit('-Am', 'creating table t1');
insert into t1 values(1);
call dolt_commit('-Am', 'inserting row 1');
call dolt_tag('tag1');
insert into t1 values(2);
call dolt_commit('-Am', 'inserting row 2');
SQL
run dolt reflog main
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 4 ]
out=$(echo "$output" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$out" =~ "(HEAD -> main) inserting row 2" ]] || false
[[ "$out" =~ "inserting row 1" ]] || false
[[ "$out" =~ "creating table t1" ]] || false
[[ "$out" =~ "Initialize data repository" ]] || false
dolt gc
run dolt reflog main
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
@test "reflog: too many arguments given" {
setup_common
run dolt reflog foo bar
[ "$status" -eq 1 ]
[[ "$output" =~ "error: reflog has too many positional arguments" ]] || false
}
@test "reflog: unknown ref returns nothing" {
setup_common
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt reflog foo
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
@test "reflog: 'HEAD -> ' decoration only appears on HEAD entries" {
setup_common
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt reflog
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 2 ]
line1=$(echo "${lines[0]}" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
line2=$(echo "${lines[1]}" | sed -E 's/\x1b\[[0-9;]*m//g') # remove special characters for color
[[ "$line1" =~ "(HEAD -> main) initial commit" ]] || false
[[ "$line2" =~ "Initialize data repository" ]] || false
[[ ! "$line2" =~ "HEAD" ]] || false
}

View File

@@ -26,14 +26,14 @@ skip_if_no_aws_tests() {
dolt fetch origin
}
@test "remotes-aws: fetch with non-existant dynamo table fails" {
@test "remotes-aws: fetch with non-existent dynamo table fails" {
skip_if_no_aws_tests
dolt remote add origin 'aws://['"this_dynamodb_table_does_not_exist_b612c34f055f4b458"':'"$DOLT_BATS_AWS_BUCKET"']/'"$DOLT_BATS_AWS_EXISTING_REPO"
run dolt fetch origin
[ "$status" -eq 1 ]
}
@test "remotes-aws: fetch with non-existant s3 bucket fails" {
@test "remotes-aws: fetch with non-existent s3 bucket fails" {
skip_if_no_aws_tests
dolt remote add origin 'aws://['"$DOLT_BATS_AWS_TABLE"':'"this_s3_bucket_does_not_exist_5883eaaa20a4797bb"']/'"$DOLT_BATS_AWS_EXISTING_REPO"
run dolt fetch origin

View File

@@ -1270,3 +1270,27 @@ SQL
[[ "$localOutput" == "$remoteOutput" ]] || false
}
@test "sql-local-remote: verify dolt reflog behavior" {
cd altDB
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt --verbose-engine-setup reflog
[ $status -eq 0 ]
[[ "$output" =~ "starting local mode" ]] || false
[[ "$output" =~ "initial commit" ]] || false
run dolt reflog
localOutput=$output
start_sql_server altDB
run dolt --verbose-engine-setup reflog
[ $status -eq 0 ]
[[ "$output" =~ "starting remote mode" ]] || false
[[ "$output" =~ "initial commit" ]] || false
run dolt reflog
remoteOutput=$output
[[ "$localOutput" == "$remoteOutput" ]] || false
}

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bats
load $BATS_TEST_DIRNAME/helper/common.bash
teardown() {
assert_feature_version
teardown_common
}
# Asserts that when DOLT_DISABLE_REFLOG is set, the dolt_reflog() table
# function returns an empty result set with no error.
@test "sql-reflog: disabled with DOLT_DISABLE_REFLOG" {
export DOLT_DISABLE_REFLOG=true
setup_common # need to set env vars before setup_common for remote tests
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
dolt commit --allow-empty -m "test commit 1"
run dolt sql -q "select * from dolt_reflog();"
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 0 ]
}
# Sanity check for the most basic case of querying the Dolt reflog
@test "sql-reflog: enabled by default" {
setup_common
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
run dolt sql -q "select * from dolt_reflog();"
[ "$status" -eq 0 ]
[ "${#lines[@]}" -eq 6 ]
[[ "$output" =~ "initial commit" ]] || false
[[ "$output" =~ "Initialize data repository" ]] || false
}
# Asserts that when DOLT_REFLOG_RECORD_LIMIT has been set, the reflog only contains the
# most recent entries and is limited by the env var's value.
@test "sql-reflog: set DOLT_REFLOG_RECORD_LIMIT" {
export DOLT_REFLOG_RECORD_LIMIT=2
setup_common # need to set env vars before setup_common for remote tests
dolt sql -q "create table t (i int primary key, j int);"
dolt sql -q "insert into t values (1, 1), (2, 2), (3, 3)";
dolt commit -Am "initial commit"
dolt commit --allow-empty -m "test commit 1"
dolt commit --allow-empty -m "test commit 2"
# Only the most recent two ref changes should appear in the log
run dolt sql -q "select * from dolt_reflog();"
[ "$status" -eq 0 ]
[[ "$output" =~ "test commit 1" ]] || false
[[ "$output" =~ "test commit 2" ]] || false
[[ ! "$output" =~ "initial commit" ]] || false
[[ ! "$output" =~ "Initialize data repository" ]] || false
}

View File

@@ -11,7 +11,7 @@ tests:
args: ["--config", "server.yaml"]
error_matches:
- "require_secure_transport can only be `true` when a tls_key and tls_cert are provided."
- name: tls_key non-existant
- name: tls_key non-existent
repos:
- name: repo1
with_files:
@@ -28,7 +28,7 @@ tests:
args: ["--config", "server.yaml"]
error_matches:
- "no such file or directory"
- name: tls_cert non-existant
- name: tls_cert non-existent
repos:
- name: repo1
with_files:

View File

@@ -92,7 +92,8 @@ enum ClientEventType {
STASH_POP = 60;
SHOW = 61;
PROFILE = 62;
SQL_SERVER_HEARTBEAT = 63;
REFLOG = 63;
SQL_SERVER_HEARTBEAT = 64;
}
enum MetricID {